From b17aaaf1c38cba5c7c998b64acc94e3df8c9d483 Mon Sep 17 00:00:00 2001 From: charles Date: Sat, 30 Aug 2025 23:54:46 -0700 Subject: [PATCH] Fix the duplicate tracking --- app/services/provider_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/services/provider_service.py b/app/services/provider_service.py index ef50406..1fd211e 100644 --- a/app/services/provider_service.py +++ b/app/services/provider_service.py @@ -742,16 +742,16 @@ async def token_counting_stream() -> AsyncGenerator[bytes, None]: f"Found usage data in chunk: {data['usage']}" ) usage = data.get("usage", {}) - input_tokens += ( + input_tokens = ( usage.get("prompt_tokens", 0) or 0 ) - output_tokens += ( + output_tokens = ( usage.get("completion_tokens", 0) or 0 ) prompt_tokens_details = usage.get("prompt_tokens_details", {}) or {} completion_tokens_details = usage.get("completion_tokens_details", {}) or {} - cached_tokens += prompt_tokens_details.get("cached_tokens", 0) - reasoning_tokens += completion_tokens_details.get("reasoning_tokens", 0) + cached_tokens = prompt_tokens_details.get("cached_tokens", 0) + reasoning_tokens = completion_tokens_details.get("reasoning_tokens", 0) # Extract content from the chunk based on OpenAI format if "choices" in data: