From 6c7ae3f79fd16495d2b2365b1a8acca742104760 Mon Sep 17 00:00:00 2001 From: CaptainB Date: Fri, 20 Dec 2024 12:33:08 +0800 Subject: [PATCH] chore: fix typo. --- .../impl/azure_model_provider/model/embedding.py | 6 +----- .../impl/gemini_model_provider/model/embedding.py | 5 +---- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/apps/setting/models_provider/impl/azure_model_provider/model/embedding.py b/apps/setting/models_provider/impl/azure_model_provider/model/embedding.py index c7acf595c..f1b35db8e 100644 --- a/apps/setting/models_provider/impl/azure_model_provider/model/embedding.py +++ b/apps/setting/models_provider/impl/azure_model_provider/model/embedding.py @@ -16,14 +16,10 @@ from setting.models_provider.base_model_provider import MaxKBBaseModel class AzureOpenAIEmbeddingModel(MaxKBBaseModel, AzureOpenAIEmbeddings): @staticmethod def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs): - return AzureOpenAIEmbeddings( + return AzureOpenAIEmbeddingModel( model=model_name, openai_api_key=model_credential.get('api_key'), azure_endpoint=model_credential.get('api_base'), openai_api_version=model_credential.get('api_version'), openai_api_type="azure", ) - - - def is_cache_model(self): - return True \ No newline at end of file diff --git a/apps/setting/models_provider/impl/gemini_model_provider/model/embedding.py b/apps/setting/models_provider/impl/gemini_model_provider/model/embedding.py index 7dd09053d..5d82b07e9 100644 --- a/apps/setting/models_provider/impl/gemini_model_provider/model/embedding.py +++ b/apps/setting/models_provider/impl/gemini_model_provider/model/embedding.py @@ -16,10 +16,7 @@ from setting.models_provider.base_model_provider import MaxKBBaseModel class GeminiEmbeddingModel(MaxKBBaseModel, GoogleGenerativeAIEmbeddings): @staticmethod def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs): - return GoogleGenerativeAIEmbeddings( + return GeminiEmbeddingModel( google_api_key=model_credential.get('api_key'), model=model_name, ) - - def is_cache_model(self): - return False \ No newline at end of file