diff --git a/apps/setting/models_provider/impl/ollama_model_provider/model/llm.py b/apps/setting/models_provider/impl/ollama_model_provider/model/llm.py
index 7c98f7e5c..6cd291ff3 100644
--- a/apps/setting/models_provider/impl/ollama_model_provider/model/llm.py
+++ b/apps/setting/models_provider/impl/ollama_model_provider/model/llm.py
@@ -10,7 +10,8 @@ from typing import List, Dict
from urllib.parse import urlparse, ParseResult
from langchain_core.messages import BaseMessage, get_buffer_string
-from langchain_openai.chat_models import ChatOpenAI
+from langchain_ollama.chat_models import ChatOllama
+
from common.config.tokenizer_manage_config import TokenizerManage
from setting.models_provider.base_model_provider import MaxKBBaseModel
@@ -24,7 +25,7 @@ def get_base_url(url: str):
return result_url[:-1] if result_url.endswith("/") else result_url
-class OllamaChatModel(MaxKBBaseModel, ChatOpenAI):
+class OllamaChatModel(MaxKBBaseModel, ChatOllama):
@staticmethod
def is_cache_model():
return False
@@ -33,12 +34,10 @@ class OllamaChatModel(MaxKBBaseModel, ChatOpenAI):
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
api_base = model_credential.get('api_base', '')
base_url = get_base_url(api_base)
- base_url = base_url if base_url.endswith('/v1') else (base_url + '/v1')
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
- return OllamaChatModel(model=model_name, openai_api_base=base_url,
- openai_api_key=model_credential.get('api_key'),
- stream_usage=True, **optional_params)
+ return OllamaChatModel(model=model_name, base_url=base_url,
+ stream=True, **optional_params)
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
tokenizer = TokenizerManage.get_tokenizer()
diff --git a/pyproject.toml b/pyproject.toml
index a5d175b85..8134688e7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -28,6 +28,7 @@ pycryptodome = "^3.19.0"
beautifulsoup4 = "^4.12.2"
html2text = "^2024.2.26"
langchain-openai = "^0.1.8"
+langchain-ollama = "0.1.3"
django-ipware = "^6.0.4"
django-apscheduler = "^0.6.2"
pymupdf = "1.24.9"
diff --git a/ui/src/components/ai-chat/ExecutionDetailDialog.vue b/ui/src/components/ai-chat/ExecutionDetailDialog.vue
index 76917fc9d..cfb6f54d0 100644
--- a/ui/src/components/ai-chat/ExecutionDetailDialog.vue
+++ b/ui/src/components/ai-chat/ExecutionDetailDialog.vue
@@ -244,6 +244,7 @@
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
+ noImgZoomIn
/>
-
@@ -264,6 +265,7 @@
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
+ noImgZoomIn
/>
-
@@ -300,6 +302,7 @@
editorId="preview-only"
:modelValue="file_content"
style="background: none"
+ noImgZoomIn
/>
-
@@ -351,6 +354,7 @@
editorId="preview-only"
:modelValue="file_content"
style="background: none"
+ noImgZoomIn
/>
-
@@ -374,6 +378,7 @@
editorId="preview-only"
:modelValue="item.content"
style="background: none"
+ noImgZoomIn
/>
@@ -573,6 +578,7 @@
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
+ noImgZoomIn
/>
-
@@ -603,6 +609,7 @@
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
+ noImgZoomIn
/>
-
diff --git a/ui/src/views/document/index.vue b/ui/src/views/document/index.vue
index 3b3b712dc..626282f36 100644
--- a/ui/src/views/document/index.vue
+++ b/ui/src/views/document/index.vue
@@ -64,12 +64,7 @@
:storeKey="storeKey"
>
-
+
-
+
{{ datetimeFormat(row.create_time) }}
@@ -829,7 +819,7 @@ function getList(bool?: boolean) {
const param = {
...(filterText.value && { name: filterText.value }),
...filterMethod.value,
- order_by: orderBy.value,
+ order_by: orderBy.value
}
documentApi
.getDocument(id as string, paginationConfig.value, param, bool ? undefined : loading)