diff --git a/apps/models_provider/impl/aliyun_bai_lian_model_provider/model/llm.py b/apps/models_provider/impl/aliyun_bai_lian_model_provider/model/llm.py index 50b439c49..44b078c7d 100644 --- a/apps/models_provider/impl/aliyun_bai_lian_model_provider/model/llm.py +++ b/apps/models_provider/impl/aliyun_bai_lian_model_provider/model/llm.py @@ -14,11 +14,12 @@ class BaiLianChatModel(MaxKBBaseModel, BaseChatOpenAI): @staticmethod def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs): optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs) - if 'qwen-omni-turbo' in model_name or 'qwq' in model_name: - optional_params['streaming'] = True + # if 'qwen-omni-turbo' in model_name or 'qwq' in model_name: + # optional_params['streaming'] = True return BaiLianChatModel( model=model_name, openai_api_base=model_credential.get('api_base'), openai_api_key=model_credential.get('api_key'), + streaming=True, extra_body=optional_params )