mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
fix: Remove vllm image cache
--bug=1052365 --user=刘瑞斌 【github#2353】vllm视觉模型修改最大tokens不生效 https://www.tapd.cn/57709429/s/1657667
This commit is contained in:
parent
fa1886a17e
commit
3c6b65baa1
|
|
@ -18,3 +18,6 @@ class VllmImage(MaxKBBaseModel, BaseChatOpenAI):
|
|||
stream_usage=True,
|
||||
**optional_params,
|
||||
)
|
||||
|
||||
def is_cache_model(self):
|
||||
return False
|
||||
|
|
|
|||
Loading…
Reference in New Issue