From 30b1bdfe5ee15f92349b935ff66d661adf56602c Mon Sep 17 00:00:00 2001 From: shaohuzhang1 Date: Thu, 29 Aug 2024 17:21:19 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8Dopenai=E8=AE=A1?= =?UTF-8?q?=E7=AE=97token=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit (cherry picked from commit 3d1b3ea8d5d1066677f99b0b6e26656c95a57829) --- .../models_provider/impl/openai_model_provider/model/llm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/setting/models_provider/impl/openai_model_provider/model/llm.py b/apps/setting/models_provider/impl/openai_model_provider/model/llm.py index ce9d624c2..cdd187d9c 100644 --- a/apps/setting/models_provider/impl/openai_model_provider/model/llm.py +++ b/apps/setting/models_provider/impl/openai_model_provider/model/llm.py @@ -46,14 +46,14 @@ class OpenAIChatModel(MaxKBBaseModel, ChatOpenAI): def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int: try: - super().get_num_tokens_from_messages(messages) + return super().get_num_tokens_from_messages(messages) except Exception as e: tokenizer = TokenizerManage.get_tokenizer() return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages]) def get_num_tokens(self, text: str) -> int: try: - super().get_num_tokens(text) + return super().get_num_tokens(text) except Exception as e: tokenizer = TokenizerManage.get_tokenizer() return len(tokenizer.encode(text))