feat: Ali Bailian supplier model list adds qwen3 model (#3026)

This commit is contained in:
shaohuzhang1 2025-04-29 19:08:17 +08:00 committed by GitHub
parent 0c7cca035e
commit 79b2de8893
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 44 additions and 1 deletions

View File

@ -22,3 +22,4 @@ from .table_checkbox import *
from .radio_card_field import *
from .label import *
from .slider_field import *
from .switch_field import *

View File

@ -28,6 +28,6 @@ class SwitchField(BaseField):
@param props_info:
"""
super().__init__('Switch', label, required, default_value, relation_show_field_dict,
super().__init__('SwitchInput', label, required, default_value, relation_show_field_dict,
{},
TriggerType.OPTION_LIST, attrs, props_info)

View File

@ -51,6 +51,23 @@ model_info_list = [ModelInfo('gte-rerank',
_("Universal text vector is Tongyi Lab's multi-language text unified vector model based on the LLM base. It provides high-level vector services for multiple mainstream languages around the world and helps developers quickly convert text data into high-quality vector data."),
ModelTypeConst.EMBEDDING, aliyun_bai_lian_embedding_model_credential,
AliyunBaiLianEmbedding),
ModelInfo('qwen3-0.6b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-1.7b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-4b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-8b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-14b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-32b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-30b-a3b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen3-235b-a22b', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen-turbo', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,
BaiLianChatModel),
ModelInfo('qwen-plus', '', ModelTypeConst.LLM, aliyun_bai_lian_llm_model_credential,

View File

@ -30,6 +30,29 @@ class BaiLianLLMModelParams(BaseForm):
precision=0)
class BaiLianLLMStreamModelParams(BaseForm):
temperature = forms.SliderField(TooltipLabel(_('Temperature'),
_('Higher values make the output more random, while lower values make it more focused and deterministic')),
required=True, default_value=0.7,
_min=0.1,
_max=1.0,
_step=0.01,
precision=2)
max_tokens = forms.SliderField(
TooltipLabel(_('Output the maximum Tokens'),
_('Specify the maximum number of tokens that the model can generate')),
required=True, default_value=800,
_min=1,
_max=100000,
_step=1,
precision=0)
stream = forms.SwitchField(label=TooltipLabel(_('Is the answer in streaming mode'),
_('Is the answer in streaming mode')),
required=True, default_value=True)
class BaiLianLLMModelCredential(BaseForm, BaseModelCredential):
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], model_params, provider,
@ -72,4 +95,6 @@ class BaiLianLLMModelCredential(BaseForm, BaseModelCredential):
api_key = forms.PasswordInputField('API Key', required=True)
def get_model_params_setting_form(self, model_name):
if 'qwen3' in model_name:
return BaiLianLLMStreamModelParams()
return BaiLianLLMModelParams()