Skip to content

Commit

Permalink
fix(openai): logprobs when echo is enabled (#761)
Browse files Browse the repository at this point in the history
Signed-off-by: Aaron <[email protected]>
  • Loading branch information
aarnphm authored Dec 10, 2023
1 parent c3a0b5c commit 0811441
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions openllm-python/src/openllm/entrypoints/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,18 +298,22 @@ async def completion_stream_generator():
i = output.index
delta_text = output.text
token_ids = output.token_ids
top_logprobs = output.logprobs[previous_num_tokens[i]:]
logprobs = None
top_logprobs = None
if request.logprobs is not None:
top_logprobs = output.logprobs[previous_num_tokens[i]:]

if request.echo and not previous_echo[i]:
if not echo_without_generation:
delta_text = res.prompt + delta_text
token_ids = res.prompt_token_ids + token_ids
top_logprobs = res.prompt_logprobs + top_logprobs
if top_logprobs:
top_logprobs = res.prompt_logprobs + top_logprobs
else:
delta_text = res.prompt
token_ids = res.prompt_token_ids
top_logprobs = res.prompt_logprobs
if top_logprobs:
top_logprobs = res.prompt_logprobs
previous_echo[i] = True
if request.logprobs is not None:
logprobs = create_logprobs(output.token_ids, output.logprobs[previous_num_tokens[i]:], request.logprobs, len(previous_texts[i]), llm=llm)
Expand Down

0 comments on commit 0811441

Please sign in to comment.