From 3c6b65baa1060ae8efdd1e494bca9fa1346c25db Mon Sep 17 00:00:00 2001 From: CaptainB Date: Mon, 24 Feb 2025 16:29:20 +0800 Subject: [PATCH] fix: Remove vllm image cache MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --bug=1052365 --user=刘瑞斌 【github#2353】vllm视觉模型修改最大tokens不生效 https://www.tapd.cn/57709429/s/1657667 --- .../models_provider/impl/vllm_model_provider/model/image.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/apps/setting/models_provider/impl/vllm_model_provider/model/image.py b/apps/setting/models_provider/impl/vllm_model_provider/model/image.py index f3b69a382..75eafd80b 100644 --- a/apps/setting/models_provider/impl/vllm_model_provider/model/image.py +++ b/apps/setting/models_provider/impl/vllm_model_provider/model/image.py @@ -18,3 +18,6 @@ class VllmImage(MaxKBBaseModel, BaseChatOpenAI): stream_usage=True, **optional_params, ) + + def is_cache_model(self): + return False