fix: Non streaming sessions cannot count token consumption #3635 (#3709)

This commit is contained in:
shaohuzhang1 2025-07-22 18:43:10 +08:00 committed by GitHub
parent 14d011d61d
commit bc6a5a8869
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -99,7 +99,7 @@ class BaseChatOpenAI(ChatOpenAI):
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
return self.usage_metadata.get('input_tokens', 0)
return self.usage_metadata.get('input_tokens', self.usage_metadata.get('prompt_tokens', 0))
def get_num_tokens(self, text: str) -> int:
if self.usage_metadata is None or self.usage_metadata == {}:
@ -108,7 +108,8 @@ class BaseChatOpenAI(ChatOpenAI):
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return len(tokenizer.encode(text))
return self.get_last_generation_info().get('output_tokens', 0)
return self.get_last_generation_info().get('output_tokens',
self.get_last_generation_info().get('completion_tokens', 0))
def _stream(self, *args: Any, **kwargs: Any) -> Iterator[ChatGenerationChunk]:
kwargs['stream_usage'] = True