fix: logprobs from openai (#85)

pull/86/head v0.1.4
Laurel Orr 1 year ago committed by GitHub
parent d7401c6ec5
commit e559c8fa59
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -107,6 +107,27 @@ class OpenAIClient(Client):
"""
return {"model_name": self.NAME, "engine": getattr(self, "engine")}
def validate_response(self, response: Dict, request: Dict) -> Dict[str, Any]:
"""
Validate response as dict.
Args:
response: response
request: request
Return:
response as dict
"""
validated_response = super().validate_response(response, request)
# Handle logprobs
for choice in validated_response["choices"]:
if "logprobs" in choice:
logprobs = choice.pop("logprobs")
if logprobs and "token_logprobs" in logprobs:
choice["token_logprobs"] = logprobs["token_logprobs"]
choice["tokens"] = logprobs["tokens"]
return validated_response
def split_usage(self, request: Dict, choices: List[str]) -> List[Dict[str, int]]:
"""Split usage into list of usages for each prompt."""
try:

@ -52,7 +52,7 @@ class LMModelChoice(BaseModel):
text: str
token_logprobs: Optional[List[float]] = None
tokens: Optional[List[int]] = None
tokens: Optional[List[str]] = None
class ArrayModelChoice(BaseModel):

Loading…
Cancel
Save