From 1a291997f3d720b6262646b1b027cd50c4df03e5 Mon Sep 17 00:00:00 2001 From: shaohuzhang1 <80892890+shaohuzhang1@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:56:46 +0800 Subject: [PATCH] =?UTF-8?q?=20feat:=20=E6=94=AF=E6=8C=81=E9=98=BF=E9=87=8C?= =?UTF-8?q?=E9=80=9A=E4=B9=89=E5=A4=A7=E6=A8=A1=E5=9E=8B=20#42=20(#154)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: 支持阿里通义大模型 #42 --- .../constants/model_provider_constants.py | 2 + .../impl/qwen_model_provider/__init__.py | 8 ++ .../qwen_model_provider/icon/qwen_icon_svg | 1 + .../qwen_model_provider.py | 92 +++++++++++++++++++ pyproject.toml | 1 + 5 files changed, 104 insertions(+) create mode 100644 apps/setting/models_provider/impl/qwen_model_provider/__init__.py create mode 100644 apps/setting/models_provider/impl/qwen_model_provider/icon/qwen_icon_svg create mode 100644 apps/setting/models_provider/impl/qwen_model_provider/qwen_model_provider.py diff --git a/apps/setting/models_provider/constants/model_provider_constants.py b/apps/setting/models_provider/constants/model_provider_constants.py index 1fdb71e82..3816795e5 100644 --- a/apps/setting/models_provider/constants/model_provider_constants.py +++ b/apps/setting/models_provider/constants/model_provider_constants.py @@ -11,6 +11,7 @@ from enum import Enum from setting.models_provider.impl.azure_model_provider.azure_model_provider import AzureModelProvider from setting.models_provider.impl.ollama_model_provider.ollama_model_provider import OllamaModelProvider from setting.models_provider.impl.openai_model_provider.openai_model_provider import OpenAIModelProvider +from setting.models_provider.impl.qwen_model_provider.qwen_model_provider import QwenModelProvider from setting.models_provider.impl.wenxin_model_provider.wenxin_model_provider import WenxinModelProvider from setting.models_provider.impl.kimi_model_provider.kimi_model_provider import KimiModelProvider @@ -21,3 +22,4 @@ class ModelProvideConstants(Enum): model_ollama_provider = OllamaModelProvider() model_openai_provider = OpenAIModelProvider() model_kimi_provider = KimiModelProvider() + model_qwen_provider = QwenModelProvider() diff --git a/apps/setting/models_provider/impl/qwen_model_provider/__init__.py b/apps/setting/models_provider/impl/qwen_model_provider/__init__.py new file mode 100644 index 000000000..53b7001e5 --- /dev/null +++ b/apps/setting/models_provider/impl/qwen_model_provider/__init__.py @@ -0,0 +1,8 @@ +# coding=utf-8 +""" + @project: maxkb + @Author:虎 + @file: __init__.py.py + @date:2023/10/31 17:16 + @desc: +""" diff --git a/apps/setting/models_provider/impl/qwen_model_provider/icon/qwen_icon_svg b/apps/setting/models_provider/impl/qwen_model_provider/icon/qwen_icon_svg new file mode 100644 index 000000000..cb9a718af --- /dev/null +++ b/apps/setting/models_provider/impl/qwen_model_provider/icon/qwen_icon_svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/apps/setting/models_provider/impl/qwen_model_provider/qwen_model_provider.py b/apps/setting/models_provider/impl/qwen_model_provider/qwen_model_provider.py new file mode 100644 index 000000000..46ad1c6ec --- /dev/null +++ b/apps/setting/models_provider/impl/qwen_model_provider/qwen_model_provider.py @@ -0,0 +1,92 @@ +# coding=utf-8 +""" + @project: maxkb + @Author:虎 + @file: qwen_model_provider.py + @date:2023/10/31 16:19 + @desc: +""" +import os +from typing import Dict + +from langchain.schema import HumanMessage +from langchain_community.chat_models.tongyi import ChatTongyi + +from common import forms +from common.exception.app_exception import AppApiException +from common.forms import BaseForm +from common.util.file_util import get_file_content +from setting.models_provider.base_model_provider import ModelProvideInfo, ModelTypeConst, BaseModelCredential, \ + ModelInfo, IModelProvider, ValidCode +from smartdoc.conf import PROJECT_DIR + + +class OpenAILLMModelCredential(BaseForm, BaseModelCredential): + + def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], raise_exception=False): + model_type_list = QwenModelProvider().get_model_type_list() + if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))): + raise AppApiException(ValidCode.valid_error.value, f'{model_type} 模型类型不支持') + for key in ['api_key']: + if key not in model_credential: + if raise_exception: + raise AppApiException(ValidCode.valid_error.value, f'{key} 字段为必填字段') + else: + return False + try: + model = QwenModelProvider().get_model(model_type, model_name, model_credential) + model.invoke([HumanMessage(content='你好')]) + except Exception as e: + if isinstance(e, AppApiException): + raise e + if raise_exception: + raise AppApiException(ValidCode.valid_error.value, f'校验失败,请检查参数是否正确: {str(e)}') + else: + return False + return True + + def encryption_dict(self, model: Dict[str, object]): + return {**model, 'api_key': super().encryption(model.get('api_key', ''))} + + api_key = forms.PasswordInputField('API Key', required=True) + + +qwen_model_credential = OpenAILLMModelCredential() + +model_dict = { + 'qwen-turbo': ModelInfo('qwen-turbo', '', ModelTypeConst.LLM, qwen_model_credential), + 'qwen-plus': ModelInfo('qwen-plus', '', ModelTypeConst.LLM, qwen_model_credential), + 'qwen-max': ModelInfo('qwen-max', '', ModelTypeConst.LLM, qwen_model_credential) +} + + +class QwenModelProvider(IModelProvider): + + def get_dialogue_number(self): + return 3 + + def get_model(self, model_type, model_name, model_credential: Dict[str, object], **model_kwargs) -> ChatTongyi: + chat_tong_yi = ChatTongyi( + model_name=model_name, + dashscope_api_key=model_credential.get('api_key') + ) + return chat_tong_yi + + def get_model_credential(self, model_type, model_name): + if model_name in model_dict: + return model_dict.get(model_name).model_credential + return qwen_model_credential + + def get_model_provide_info(self): + return ModelProvideInfo(provider='model_qwen_provider', name='通义千问', icon=get_file_content( + os.path.join(PROJECT_DIR, "apps", "setting", 'models_provider', 'impl', 'qwen_model_provider', 'icon', + 'qwen_icon_svg'))) + + def get_model_list(self, model_type: str): + if model_type is None: + raise AppApiException(500, '模型类型不能为空') + return [model_dict.get(key).to_dict() for key in + list(filter(lambda key: model_dict.get(key).model_type == model_type, model_dict.keys()))] + + def get_model_type_list(self): + return [{'key': "大语言模型", 'value': "LLM"}] diff --git a/pyproject.toml b/pyproject.toml index cbe0e5dc2..d300f6195 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ django-apscheduler = "^0.6.2" pymupdf = "^1.24.0" python-docx = "^1.1.0" xlwt = "^1.3.0" +dashscope = "^1.17.0" [build-system] requires = ["poetry-core"]