From 19d48693ec3c1a28f9fb570cb927816fe759e227 Mon Sep 17 00:00:00 2001 From: wxg0103 <727495428@qq.com> Date: Fri, 20 Sep 2024 19:12:36 +0800 Subject: [PATCH] =?UTF-8?q?refactor:=20=E4=BC=98=E5=8C=96=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E6=9C=80=E5=A4=A7tokens?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../impl/aws_bedrock_model_provider/credential/llm.py | 2 +- .../impl/azure_model_provider/credential/llm.py | 2 +- .../impl/deepseek_model_provider/credential/llm.py | 2 +- .../impl/gemini_model_provider/credential/llm.py | 2 +- .../impl/kimi_model_provider/credential/llm.py | 2 +- .../impl/ollama_model_provider/credential/llm.py | 2 +- .../impl/openai_model_provider/credential/llm.py | 2 +- .../impl/qwen_model_provider/credential/llm.py | 2 +- .../impl/vllm_model_provider/credential/llm.py | 2 +- .../impl/volcanic_engine_model_provider/credential/llm.py | 2 +- .../impl/wenxin_model_provider/credential/llm.py | 2 +- .../models_provider/impl/xf_model_provider/credential/llm.py | 4 ++-- .../impl/xinference_model_provider/credential/llm.py | 2 +- .../impl/zhipu_model_provider/credential/llm.py | 2 +- 14 files changed, 15 insertions(+), 15 deletions(-) diff --git a/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py b/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py index e64d8b282..8c4d6834d 100644 --- a/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py @@ -22,7 +22,7 @@ class BedrockLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/azure_model_provider/credential/llm.py b/apps/setting/models_provider/impl/azure_model_provider/credential/llm.py index b9e730aa0..09e51dca6 100644 --- a/apps/setting/models_provider/impl/azure_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/azure_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class AzureLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/deepseek_model_provider/credential/llm.py b/apps/setting/models_provider/impl/deepseek_model_provider/credential/llm.py index ee2279bbc..9739b71ac 100644 --- a/apps/setting/models_provider/impl/deepseek_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/deepseek_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class DeepSeekLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/gemini_model_provider/credential/llm.py b/apps/setting/models_provider/impl/gemini_model_provider/credential/llm.py index 2612205d4..4cacbe12f 100644 --- a/apps/setting/models_provider/impl/gemini_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/gemini_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class GeminiLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/kimi_model_provider/credential/llm.py b/apps/setting/models_provider/impl/kimi_model_provider/credential/llm.py index 1ee2fcee1..a6d06a894 100644 --- a/apps/setting/models_provider/impl/kimi_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/kimi_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class KimiLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/ollama_model_provider/credential/llm.py b/apps/setting/models_provider/impl/ollama_model_provider/credential/llm.py index 5558bcab4..33f6d8c26 100644 --- a/apps/setting/models_provider/impl/ollama_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/ollama_model_provider/credential/llm.py @@ -26,7 +26,7 @@ class OllamaLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py index 58dfc1308..755f9558a 100644 --- a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class OpenAILLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/qwen_model_provider/credential/llm.py b/apps/setting/models_provider/impl/qwen_model_provider/credential/llm.py index a8177c545..b9bb45ea9 100644 --- a/apps/setting/models_provider/impl/qwen_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/qwen_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class QwenModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=2048, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py b/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py index 97c6217c3..0e100d742 100644 --- a/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py @@ -22,7 +22,7 @@ class VLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/volcanic_engine_model_provider/credential/llm.py b/apps/setting/models_provider/impl/volcanic_engine_model_provider/credential/llm.py index 15fffec2c..48c434b4b 100644 --- a/apps/setting/models_provider/impl/volcanic_engine_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/volcanic_engine_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class VolcanicEngineLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/wenxin_model_provider/credential/llm.py b/apps/setting/models_provider/impl/wenxin_model_provider/credential/llm.py index a77a6303f..342cb2e08 100644 --- a/apps/setting/models_provider/impl/wenxin_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/wenxin_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class WenxinLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=2, - _max=2048, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/xf_model_provider/credential/llm.py b/apps/setting/models_provider/impl/xf_model_provider/credential/llm.py index 0a6d9a0ac..8ec12e308 100644 --- a/apps/setting/models_provider/impl/xf_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/xf_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class XunFeiLLMModelGeneralParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=4096, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) @@ -45,7 +45,7 @@ class XunFeiLLMModelProParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=4096, _min=1, - _max=8192, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/xinference_model_provider/credential/llm.py b/apps/setting/models_provider/impl/xinference_model_provider/credential/llm.py index 6317ff663..8a6ad4958 100644 --- a/apps/setting/models_provider/impl/xinference_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/xinference_model_provider/credential/llm.py @@ -22,7 +22,7 @@ class XinferenceLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=800, _min=1, - _max=4096, + _max=100000, _step=1, precision=0) diff --git a/apps/setting/models_provider/impl/zhipu_model_provider/credential/llm.py b/apps/setting/models_provider/impl/zhipu_model_provider/credential/llm.py index dc1d1f191..48c1194ee 100644 --- a/apps/setting/models_provider/impl/zhipu_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/zhipu_model_provider/credential/llm.py @@ -28,7 +28,7 @@ class ZhiPuLLMModelParams(BaseForm): TooltipLabel('输出最大Tokens', '指定模型可生成的最大token个数'), required=True, default_value=1024, _min=1, - _max=4096, + _max=100000, _step=1, precision=0)