diff --git a/llm/utils.py b/llm/utils.py index a47984d2..e9853185 100644 --- a/llm/utils.py +++ b/llm/utils.py @@ -147,9 +147,9 @@ def remove_empty_and_zero(obj): def token_usage_string(input_tokens, output_tokens, token_details) -> str: bits = [] if input_tokens is not None: - bits.append(f"{input_tokens} input") + bits.append(f"{format(input_tokens, ',')} input") if output_tokens is not None: - bits.append(f"{output_tokens} output") + bits.append(f"{format(output_tokens, ',')} output") if token_details: bits.append(json.dumps(token_details)) return ", ".join(bits) diff --git a/tests/test_cli_openai_models.py b/tests/test_cli_openai_models.py index 8bd45338..3d0a7c16 100644 --- a/tests/test_cli_openai_models.py +++ b/tests/test_cli_openai_models.py @@ -174,8 +174,8 @@ def test_gpt4o_mini_sync_and_async(monkeypatch, tmpdir, httpx_mock, async_, usag } ], "usage": { - "prompt_tokens": 10, - "completion_tokens": 2, + "prompt_tokens": 1000, + "completion_tokens": 2000, "total_tokens": 12, }, "system_fingerprint": "fp_49254d0e9b", @@ -192,7 +192,7 @@ def test_gpt4o_mini_sync_and_async(monkeypatch, tmpdir, httpx_mock, async_, usag assert result.exit_code == 0 assert result.output == "Ho ho ho\n" if usage: - assert result.stderr == "Token usage: 10 input, 2 output\n" + assert result.stderr == "Token usage: 1,000 input, 2,000 output\n" # Confirm it was correctly logged assert log_db.exists() db = sqlite_utils.Database(str(log_db))