From 4f78382df4201ee5f03edb637da586695edc18e5 Mon Sep 17 00:00:00 2001 From: Joe Stone Date: Fri, 26 Apr 2024 18:05:16 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=20=E6=B7=BB=E5=8A=A0=E5=8D=83=E9=97=AE?= =?UTF-8?q?=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../impl/ollama_model_provider/ollama_model_provider.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/apps/setting/models_provider/impl/ollama_model_provider/ollama_model_provider.py b/apps/setting/models_provider/impl/ollama_model_provider/ollama_model_provider.py index 2cc1601a3..4bbc2d96b 100644 --- a/apps/setting/models_provider/impl/ollama_model_provider/ollama_model_provider.py +++ b/apps/setting/models_provider/impl/ollama_model_provider/ollama_model_provider.py @@ -73,7 +73,11 @@ model_dict = { 'llama2-chinese:13b': ModelInfo( 'llama2-chinese:13b', '由于Llama2本身的中文对齐较弱,我们采用中文指令集,对meta-llama/Llama-2-13b-chat-hf进行LoRA微调,使其具备较强的中文对话能力。', - ModelTypeConst.LLM, ollama_llm_model_credential) + ModelTypeConst.LLM, ollama_llm_model_credential), + 'qwen-14b': ModelInfo( + 'qwen:14b', + 'qwen 1.5 14b 相较于以往版本,模型与人类偏好的对齐程度以及多语言处理能力上有显著增强。所有规模的模型都支持32768个tokens的上下文长度。', + ModelTypeConst.LLM, ollama_llm_model_credential), }