fix: Remove vllm image cache

--bug=1052365 --user=刘瑞斌 【github#2353】vllm视觉模型修改最大tokens不生效 https://www.tapd.cn/57709429/s/1657667
This commit is contained in:
CaptainB 2025-02-24 16:29:20 +08:00 committed by 刘瑞斌
parent fa1886a17e
commit 3c6b65baa1

View File

@ -18,3 +18,6 @@ class VllmImage(MaxKBBaseModel, BaseChatOpenAI):
stream_usage=True,
**optional_params,
)
def is_cache_model(self):
return False