feat: Support gemini embedding model

This commit is contained in:
CaptainB 2024-12-19 18:39:48 +08:00 committed by 刘瑞斌
parent 24bb7d5cfa
commit f533bb5350
3 changed files with 82 additions and 0 deletions

View File

@ -0,0 +1,45 @@
# coding=utf-8
"""
@project: MaxKB
@Author
@file embedding.py
@date2024/7/12 16:45
@desc:
"""
from typing import Dict
from common import forms
from common.exception.app_exception import AppApiException
from common.forms import BaseForm
from setting.models_provider.base_model_provider import BaseModelCredential, ValidCode
class GeminiEmbeddingCredential(BaseForm, BaseModelCredential):
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], provider,
raise_exception=True):
model_type_list = provider.get_model_type_list()
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
raise AppApiException(ValidCode.valid_error.value, f'{model_type} 模型类型不支持')
for key in ['api_key']:
if key not in model_credential:
if raise_exception:
raise AppApiException(ValidCode.valid_error.value, f'{key} 字段为必填字段')
else:
return False
try:
model = provider.get_model(model_type, model_name, model_credential)
model.embed_query('你好')
except Exception as e:
if isinstance(e, AppApiException):
raise e
if raise_exception:
raise AppApiException(ValidCode.valid_error.value, f'校验失败,请检查参数是否正确: {str(e)}')
else:
return False
return True
def encryption_dict(self, model: Dict[str, object]):
return {**model, 'api_key': super().encryption(model.get('api_key', ''))}
api_key = forms.PasswordInputField('API Key', required=True)

View File

@ -11,9 +11,11 @@ import os
from common.util.file_util import get_file_content
from setting.models_provider.base_model_provider import IModelProvider, ModelProvideInfo, ModelInfo, ModelTypeConst, \
ModelInfoManage
from setting.models_provider.impl.gemini_model_provider.credential.embedding import GeminiEmbeddingCredential
from setting.models_provider.impl.gemini_model_provider.credential.image import GeminiImageModelCredential
from setting.models_provider.impl.gemini_model_provider.credential.llm import GeminiLLMModelCredential
from setting.models_provider.impl.gemini_model_provider.credential.stt import GeminiSTTModelCredential
from setting.models_provider.impl.gemini_model_provider.model.embedding import GeminiEmbeddingModel
from setting.models_provider.impl.gemini_model_provider.model.image import GeminiImage
from setting.models_provider.impl.gemini_model_provider.model.llm import GeminiChatModel
from setting.models_provider.impl.gemini_model_provider.model.stt import GeminiSpeechToText
@ -22,6 +24,7 @@ from smartdoc.conf import PROJECT_DIR
gemini_llm_model_credential = GeminiLLMModelCredential()
gemini_image_model_credential = GeminiImageModelCredential()
gemini_stt_model_credential = GeminiSTTModelCredential()
gemini_embedding_model_credential = GeminiEmbeddingCredential()
model_info_list = [
ModelInfo('gemini-1.0-pro', '最新的Gemini 1.0 Pro模型随Google更新而更新',
@ -56,14 +59,23 @@ model_stt_info_list = [
GeminiSpeechToText),
]
model_embedding_info_list = [
ModelInfo('models/embedding-001', '',
ModelTypeConst.EMBEDDING,
gemini_embedding_model_credential,
GeminiEmbeddingModel),
]
model_info_manage = (
ModelInfoManage.builder()
.append_model_info_list(model_info_list)
.append_model_info_list(model_image_info_list)
.append_model_info_list(model_stt_info_list)
.append_model_info_list(model_embedding_info_list)
.append_default_model_info(model_info_list[0])
.append_default_model_info(model_image_info_list[0])
.append_default_model_info(model_stt_info_list[0])
.append_default_model_info(model_embedding_info_list[0])
.build()
)

View File

@ -0,0 +1,25 @@
# coding=utf-8
"""
@project: MaxKB
@Author
@file embedding.py
@date2024/7/12 17:44
@desc:
"""
from typing import Dict
from langchain_google_genai import GoogleGenerativeAIEmbeddings
from setting.models_provider.base_model_provider import MaxKBBaseModel
class GeminiEmbeddingModel(MaxKBBaseModel, GoogleGenerativeAIEmbeddings):
@staticmethod
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
return GoogleGenerativeAIEmbeddings(
google_api_key=model_credential.get('api_key'),
model=model_name,
)
def is_cache_model(self):
return False