fix: 修复vllm模型不能正常生成问题的bug

This commit is contained in:
CaptainB 2024-10-18 11:54:44 +08:00 committed by 刘瑞斌
parent 12d348439c
commit cb3c064463

View File

@ -54,5 +54,5 @@ class BaseChatOpenAI(ChatOpenAI):
**kwargs,
).generations[0][0],
).message
self.usage_metadata = chat_result.response_metadata['token_usage']
self.usage_metadata = chat_result.response_metadata['token_usage'] if 'token_usage' in chat_result.response_metadata else chat_result.usage_metadata
return chat_result