From bd589e50e22090f873d6d537582e7e7a4b675f47 Mon Sep 17 00:00:00 2001 From: shaohuzhang1 <80892890+shaohuzhang1@users.noreply.github.com> Date: Wed, 6 Aug 2025 11:00:43 +0800 Subject: [PATCH] fix: Calling the model using non stream cannot obtain the token (#3826) --- apps/models_provider/impl/base_chat_open_ai.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/apps/models_provider/impl/base_chat_open_ai.py b/apps/models_provider/impl/base_chat_open_ai.py index c96bfacf2..626a751f7 100644 --- a/apps/models_provider/impl/base_chat_open_ai.py +++ b/apps/models_provider/impl/base_chat_open_ai.py @@ -99,7 +99,7 @@ class BaseChatOpenAI(ChatOpenAI): except Exception as e: tokenizer = TokenizerManage.get_tokenizer() return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages]) - return self.usage_metadata.get('input_tokens', 0) + return self.usage_metadata.get('input_tokens', self.usage_metadata.get('prompt_tokens', 0)) def get_num_tokens(self, text: str) -> int: if self.usage_metadata is None or self.usage_metadata == {}: @@ -108,7 +108,8 @@ class BaseChatOpenAI(ChatOpenAI): except Exception as e: tokenizer = TokenizerManage.get_tokenizer() return len(tokenizer.encode(text)) - return self.get_last_generation_info().get('output_tokens', 0) + return self.get_last_generation_info().get('output_tokens', + self.get_last_generation_info().get('completion_tokens', 0)) def _stream(self, *args: Any, **kwargs: Any) -> Iterator[ChatGenerationChunk]: kwargs['stream_usage'] = True @@ -133,7 +134,7 @@ class BaseChatOpenAI(ChatOpenAI): ) usage_metadata: Optional[UsageMetadata] = ( - _create_usage_metadata(token_usage) if token_usage else None + _create_usage_metadata(token_usage) if token_usage and token_usage.get("prompt_tokens") else None ) if len(choices) == 0: # logprobs is implicitly None