Skip to content

Commit

Permalink
[plugins/openai_plugin] Minor bugfixes after plugin refactor (#64)
Browse files Browse the repository at this point in the history
* fix: stop_reason was always None

Stop reason was beeing checked on the preproccesed token list and thus
deepget would always return None.

* fix: drop unnecessary tokens before timeout check

* fix: typo
  • Loading branch information
sjmonson authored Oct 18, 2024
1 parent 2fafa69 commit 955c120
Showing 1 changed file with 5 additions and 9 deletions.
14 changes: 5 additions & 9 deletions plugins/openai_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,11 @@ def streaming_request_http(self, query: dict, user_id: int, test_end_time: float
token['lat'] = token['time'] - prev_time
prev_time = token['time']

# Append our vaild token
# Find the last response with finish_reason set.
if deepget(message, "choices", 0, "finish_reason"):
result.stop_reason = deepget(message, "choices", 0, "finish_reason")

# Append our valid token
tokens.append(token)

# First chunk may not be a token, just a connection ack
Expand All @@ -341,9 +345,6 @@ def streaming_request_http(self, query: dict, user_id: int, test_end_time: float
# the current token.
result.output_tokens_before_timeout = sum(t['count'] for t in tokens if t['time'] <= test_end_time)

# Last token comes with finish_reason set.
result.stop_reason = deepget(resps[-1], "choices", 0, "finish_reason")

# Full response received, return
result.output_text = "".join([token['text'] for token in tokens])

Expand All @@ -355,10 +356,5 @@ def streaming_request_http(self, query: dict, user_id: int, test_end_time: float
if expected_output_tokens and result.output_tokens != expected_output_tokens:
logger.warning(f"Received {result.output_tokens} tokens but expected {expected_output_tokens} tokens")

# If test duration timeout didn't happen before the last token is received,
# total tokens before the timeout will be equal to the total tokens in the response.
if not result.output_tokens_before_timeout:
result.output_tokens_before_timeout = result.output_tokens

result.calculate_results()
return result

0 comments on commit 955c120

Please sign in to comment.