From d1f7a826615185a4c6325bdce229b944058135fc Mon Sep 17 00:00:00 2001 From: shaohuzhang1 <80892890+shaohuzhang1@users.noreply.github.com> Date: Tue, 21 Jan 2025 17:04:40 +0800 Subject: [PATCH] fix: Volcano engine large speech model cannot be added (#2058) --- .../impl/openai_model_provider/credential/llm.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py index 68f12c77d..3d5d5c90c 100644 --- a/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/openai_model_provider/credential/llm.py @@ -18,7 +18,8 @@ from setting.models_provider.base_model_provider import BaseModelCredential, Val class OpenAILLMModelParams(BaseForm): - temperature = forms.SliderField(TooltipLabel(_('Temperature'), _('Higher values make the output more random, while lower values make it more focused and deterministic')), + temperature = forms.SliderField(TooltipLabel(_('Temperature'), + _('Higher values make the output more random, while lower values make it more focused and deterministic')), required=True, default_value=0.7, _min=0.1, _max=1.0, @@ -26,7 +27,8 @@ class OpenAILLMModelParams(BaseForm): precision=2) max_tokens = forms.SliderField( - TooltipLabel(_('Output the maximum Tokens'), _('Specify the maximum number of tokens that the model can generate')), + TooltipLabel(_('Output the maximum Tokens'), + _('Specify the maximum number of tokens that the model can generate')), required=True, default_value=800, _min=1, _max=100000, @@ -40,7 +42,8 @@ class OpenAILLMModelCredential(BaseForm, BaseModelCredential): raise_exception=False): model_type_list = provider.get_model_type_list() if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))): - raise AppApiException(ValidCode.valid_error.value, __('{model_type} Model type is not supported').format(model_type=model_type)) + raise AppApiException(ValidCode.valid_error.value, + __('{model_type} Model type is not supported').format(model_type=model_type)) for key in ['api_base', 'api_key']: if key not in model_credential: @@ -51,12 +54,14 @@ class OpenAILLMModelCredential(BaseForm, BaseModelCredential): try: model = provider.get_model(model_type, model_name, model_credential, **model_params) - model.invoke([HumanMessage(content=_('Hello'))]) + model.invoke([HumanMessage(content=__('Hello'))]) except Exception as e: if isinstance(e, AppApiException): raise e if raise_exception: - raise AppApiException(ValidCode.valid_error.value, __('Verification failed, please check whether the parameters are correct: {error}').format(error=str(e))) + raise AppApiException(ValidCode.valid_error.value, + __('Verification failed, please check whether the parameters are correct: {error}').format( + error=str(e))) else: return False return True