From eb6f4e8cb8eda5962f501830d531795215366894 Mon Sep 17 00:00:00 2001 From: shaohuzhang1 <80892890+shaohuzhang1@users.noreply.github.com> Date: Wed, 26 Feb 2025 12:04:25 +0800 Subject: [PATCH] fix: VLLM Supplier Dialogue Verification (#2412) --- .../impl/vllm_model_provider/credential/llm.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py b/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py index ffe7a8d66..7c82505d6 100644 --- a/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/vllm_model_provider/credential/llm.py @@ -48,9 +48,11 @@ class VLLMModelCredential(BaseForm, BaseModelCredential): model = provider.get_model(model_type, model_name, model_credential, **model_params) try: res = model.invoke([HumanMessage(content=gettext('Hello'))]) - print(res) except Exception as e: - print(e) + raise AppApiException(ValidCode.valid_error.value, + gettext( + 'Verification failed, please check whether the parameters are correct: {error}').format( + error=str(e))) return True def encryption_dict(self, model_info: Dict[str, object]):