Compare commits

...

87 Commits

Author SHA1 Message Date
shaohuzhang1 b9dcfaff92
fix: Application log permission error (#4087)
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-09-23 14:53:39 +08:00
shaohuzhang1 2c697e8364
fix: Incomplete file upload resulted in Exception: 'file_id' error when answering (#4078)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-09-22 17:24:53 +08:00
shaohuzhang1 d2c7167623
fix: In the floating window embedding, the user in the conversation is displayed as a tourist #4019 (#4075) 2025-09-22 16:00:25 +08:00
shaohuzhang1 c9634f9950
fix: Judgment execution logic (#4072) 2025-09-22 15:06:04 +08:00
wxg0103 c88d90b2f5 refactor: remove unused 'status' field from API response schema 2025-09-22 14:39:14 +08:00
wxg0103 d405b06016 feat: implement RSA encryption for login data and add LDAP login support 2025-09-22 14:35:02 +08:00
wangdan-fit2cloud a5595bd840 fix: Modify the font color of the thinking process on the Q&A page. 2025-09-22 14:25:48 +08:00
shaohuzhang1 d9d7264542
fix: Modify document authentication method (#4006)
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-09-09 11:29:41 +08:00
shaohuzhang1 b3cc8e24c6
fix: Unauthorized query of application dialogue logs (#4004) 2025-09-09 11:01:29 +08:00
wangdan-fit2cloud f1d7079605 fix: Fix form node drag-and-drop issues
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-08-28 17:31:16 +08:00
shaohuzhang1 a0dc4c0648
fix: Return the link to the connection pool after the node execution is completed (#3919)
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-08-22 17:44:10 +08:00
shaohuzhang1 4c878b011a feat: Add connection pool connection check parameters (#3905)
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-08-21 13:40:06 +08:00
wxg0103 f79ae01a92 feat: expand permissions for application access token in chat views 2025-08-21 13:40:06 +08:00
shaohuzhang1 749c66fc35 fix: Internationalization Error (#3900) 2025-08-21 13:40:06 +08:00
shaohuzhang1 ff202361d5 fix: Internationalization Error (#3899) 2025-08-21 13:40:06 +08:00
shaohuzhang1 959187b5d2
fix: Some web pages are unable to be crawled (#3897) 2025-08-20 16:15:59 +08:00
CaptainB 76c0b66152 fix: update parameter name in drop_dataset_index call for clarity 2025-08-20 15:42:43 +08:00
shaohuzhang1 65dd3637ee
fix: Please parse the image content. This needs to be optimized to remove the automatically occurring problems. #3837 (#3895) 2025-08-20 14:54:20 +08:00
shaohuzhang1 141de353ea
fix: AI dialogue node calls MCP, AI's reply content is included in the code block of MCP call result, markdown parsing error #3846 (#3894) 2025-08-20 13:48:30 +08:00
shaohuzhang1 c74352046b
fix: The website knowledge base lacks comprehensive methods for determining files and folders, resulting in ineffective access to links within the final document #2935 (#3893) 2025-08-20 11:36:38 +08:00
CaptainB e910217850 fix: remove unnecessary tobytes() calls for file byte retrieval
--bug=1060633 --user=刘瑞斌 【应用】图片理解节点解析图片报错 https://www.tapd.cn/57709429/s/1758154
2025-08-20 11:17:55 +08:00
shaohuzhang1 79e4b902c1
fix: Fix form style issues (#3892)
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-08-20 11:08:01 +08:00
shaohuzhang1 cd81992803
fix: Advanced Layout Canvas Editing - Special Editing Method will connect the exit of the current node to the exit of subsequent nodes when adding them (#3891) 2025-08-20 10:57:19 +08:00
CaptainB 655ca53419 fix: refactor collation refresh and reindex logic to improve error handling and connection management
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-08-19 15:49:22 +08:00
shaohuzhang1 a3b14cb562
fix: The issue of prologue in the conversation log (#3880)
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-08-19 14:54:05 +08:00
CaptainB b976273876 fix: update API request handling to use request body schema instead of query parameters 2025-08-19 14:40:27 +08:00
CaptainB 886c45fb1d fix: update Dockerfile to use Python 3.11 and PostgreSQL 15.14 images 2025-08-19 14:06:48 +08:00
CaptainB 87eb5c3789 fix: update Docker images to use latest versions and adjust permissions 2025-08-19 14:05:12 +08:00
CaptainB 0e1ae663b1 fix: add migration to refresh collation and reindex database 2025-08-19 13:55:10 +08:00
CaptainB a10991ad6d fix: update pypdf version to 6.0.0 2025-08-19 12:40:05 +08:00
CaptainB 6f69c8beb8 fix: add existence check for ProblemParagraphMapping association
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-08-18 17:11:53 +08:00
shaohuzhang1 98b58e7fda
fix: Form collection, multiple dragging and dropping of parameters failed(#3745)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-08-18 16:01:16 +08:00
CaptainB 4c7cd248f8 fix: update base images to use bookworm variant 2025-08-18 15:31:04 +08:00
shaohuzhang1 ae75df098f
perf: Segmented search function in the knowledge base, clearing search content when switching search keywords(#3867) (#3872)
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-08-18 15:12:32 +08:00
CaptainB 979a0d8f41 fix: update checkout reference to v1 in build-and-push workflow 2025-08-18 15:08:42 +08:00
CaptainB 3aa8054055 fix: update PostgreSQL version in Dockerfile 2025-08-18 14:56:55 +08:00
shaohuzhang1 67ec325dd6
perf: Upgrade markdown editor version(#3322)
* perf: Optimize login loading issues

* perf: Upgrade markdown editor version(#3322)

---------

Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-08-18 11:28:49 +08:00
shaohuzhang1 3d6bcb99d0
fix: Upgrading psycopg (#3869) 2025-08-18 10:39:21 +08:00
wxg0103 8031b0a2fa refactor: simplify success message handling in dataset update process
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-08-06 17:25:10 +08:00
CaptainB 9d4679a835 fix: improve charset detection in HTML parsing
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-07-31 13:35:11 +08:00
CaptainB 37100281ee fix: improve speech synthesis handling in ChatOperationButton
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
--bug=1059740 --user=刘瑞斌 【github#3769】应用开启浏览器语音播放,在问答界面,暂停播放后,无法再次开启播放 https://www.tapd.cn/57709429/s/1744903
2025-07-30 11:15:41 +08:00
CaptainB 2ff63d930b feat: add dataset index creation and deletion functions
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-07-25 11:45:55 +08:00
shaohuzhang1 0651f3d68c
fix: Voice to text, text to voice form cannot obtain data after collecting nodes (#3742)
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
2025-07-25 11:44:51 +08:00
shaohuzhang1 ceea85e2d8
fix: MCP calls node, workflow error after form recall (#3738)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-25 10:34:51 +08:00
wangdan-fit2cloud 5f50059443 fix: Adjust application‘s similarity text
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-24 10:33:34 +08:00
shaohuzhang1 d5148ddadf
fix: When the execution parameter of the function library is not None, no verification is performed (#3729)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-23 18:59:46 +08:00
shaohuzhang1 b838a14bd8
fix: Session timeout setting (#3728)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-23 18:12:40 +08:00
shaohuzhang1 5ba725ba18
refactor: recycle db connection to avoid "the connection is closed" exception. (#3726) (#3727) 2025-07-23 17:40:39 +08:00
shaohuzhang1 d1cd01f555
refactor: recycle db connection to avoid "the connection is closed" exception. (#3726) 2025-07-23 17:15:38 +08:00
wangdan-fit2cloud 6f4df54917 fix: draggable issue 2025-07-23 16:46:36 +08:00
shaohuzhang1 9d790f1eda
fix: When referencing workflow fields, if the node is not executed, return None data (#3724) 2025-07-23 16:42:36 +08:00
zhangzhanwei dd5622d2bb fix: Swagger doc 2025-07-23 16:35:46 +08:00
zhangzhanwei 7eaf860869 fix: Swagger 2025-07-23 15:56:22 +08:00
CaptainB bd0f44efd1 fix: validate required fields in FunctionNodeForm
--bug=1055176 --user=刘瑞斌 【github#2996】函数引用前置节点参数,前置节点删除后,函数参数为空可以发布应用 https://www.tapd.cn/57709429/s/1739785
2025-07-23 15:15:32 +08:00
shaohuzhang1 23fcb0e94e
fix: After the application opens a session, after a period of time, the conversation record ID will report an error (#3712) 2025-07-23 13:56:27 +08:00
wangdan-fit2cloud 3083d48dff perf: Similarity copywriting optimization 2025-07-23 13:02:06 +08:00
shaohuzhang1 ef549c7c89
fix: Anchor point positioning error of the discriminator node (#3710)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-22 19:00:35 +08:00
shaohuzhang1 bc6a5a8869
fix: Non streaming sessions cannot count token consumption #3635 (#3709) 2025-07-22 18:43:10 +08:00
wangdan-fit2cloud 14d011d61d fix: Firefox browser compatibility with drag and drop upload issues
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-22 17:36:54 +08:00
wangdan-fit2cloud 753bf5f777 fix: hit test title style optimize(#3533) 2025-07-22 17:13:07 +08:00
shaohuzhang1 8e3e46a96d
fix: Replace replaceAll with the replace function (#3706) #3656 2025-07-22 17:08:57 +08:00
shaohuzhang1 33762f26bf
fix: Password change prompt (#3705) 2025-07-22 16:23:15 +08:00
wangdan-fit2cloud 2adb872cdf fix: Application dialogue history error(#3223) 2025-07-22 16:05:07 +08:00
shaohuzhang1 a0b6aaa568
fix: Specifying a specific form parameter for the reply output will result in an error #3309 (#3703) 2025-07-22 15:49:59 +08:00
shaohuzhang1 01075166b8
docs: Missing thinking process related parameters in model_setting #3134 (#3700) 2025-07-22 12:05:24 +08:00
shaohuzhang1 ae30052dae
docs: Delete the conversation log history_day parameter #3159 (#3699) 2025-07-22 11:52:04 +08:00
shaohuzhang1 55a7d73f98
fix: The thinking process information of AI dialogue nodes is lost (#3698) 2025-07-22 11:26:52 +08:00
shaohuzhang1 0531a6ecc8
feat: Support session_timeout parameter (#3697) 2025-07-22 10:35:20 +08:00
shaohuzhang1 1ee0eac455
build: locales (#3696) 2025-07-22 10:20:46 +08:00
CaptainB 0ba9b97752 feat: add MySQL and PostgreSQL query templates with JSON serialization
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-21 17:45:20 +08:00
wangdan-fit2cloud 02d6239a71 fix: Quick question in the opening statement, English word breaks.(#3158)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
2025-07-21 16:45:47 +08:00
shaohuzhang1 8d3b3f8121
fix: Quick question in the opening statement, English word breaks.(#3158)
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-07-21 16:20:38 +08:00
shaohuzhang1 4013606a93
fix: The password rule prompt is unclear #3547 (#3689) 2025-07-21 16:12:47 +08:00
shaohuzhang1 40be71d765
fix: Create document prompt error #3527 (#3688) 2025-07-21 15:24:49 +08:00
shaohuzhang1 8ecf5b52ed
fix: Interface permission verification error #3309 (#3687) 2025-07-21 15:18:40 +08:00
shaohuzhang1 abe51dc30c
fix: Quick question in the opening statement, English word breaks.(#3158)
Co-authored-by: wangdan-fit2cloud <dan.wang@fit2cloud.com>
2025-07-21 12:34:14 +08:00
CaptainB b7ba9fdf67 fix: change HitTest action methods from GET to PUT in API views 2025-07-21 12:12:26 +08:00
shaohuzhang1 bca56af788
fix: Interface permission verification error #3343 (#3683) 2025-07-21 11:38:41 +08:00
wxg0103 622a8e525c refactor: add early return for invalid document type in document_serializers.py
--bug=1057562 --user=王孝刚 【知识库】飞书知识库对接,设置命中处理方式保存报错 https://www.tapd.cn/57709429/s/1736833
2025-07-21 11:32:26 +08:00
wxg0103 f568c6800f refactor: add early return for invalid document type in document_serializers.py
--bug=1057562 --user=王孝刚 【知识库】飞书知识库对接,设置命中处理方式保存报错 https://www.tapd.cn/57709429/s/1736833
2025-07-21 11:31:03 +08:00
CaptainB 93d1958fef fix: validate transport type in MCP server configuration
--bug=1056812 --user=刘瑞斌 【github##3232】【应用编排】mcp节点的transport配置填写http,错误提示不对 https://www.tapd.cn/57709429/s/1736690
2025-07-21 11:22:00 +08:00
wxg0103 90ee3c4d21 refactor: improve formatting in chat_serializers.py
--bug=1059060 --user=王孝刚 【github#3392】【应用】对话日志使用自定义时间段导出报错 https://www.tapd.cn/57709429/s/1736692
2025-07-21 11:20:15 +08:00
liqiang-fit2cloud 30ddab322f
Update README_CN.md
Some checks failed
sync2gitee / repo-sync (push) Has been cancelled
Typos Check / Spell Check with Typos (push) Has been cancelled
2025-07-16 10:30:40 +08:00
maninhill ee83139b96
chore: Update README_CN.md (#3621)
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
Typos Check / Spell Check with Typos (push) Waiting to run
2025-07-15 21:56:48 +08:00
maninhill 1268b2043a
chore: Update README_CN.md (#3619) 2025-07-15 21:35:03 +08:00
liqiang-fit2cloud f01a65f507 docs: Update README.md
Some checks are pending
sync2gitee / repo-sync (push) Waiting to run
Typos Check / Spell Check with Typos (push) Waiting to run
2025-07-15 16:26:03 +08:00
liqiang-fit2cloud efa196c58b docs: Update README.md 2025-07-15 16:23:46 +08:00
86 changed files with 996 additions and 436 deletions

View File

@ -33,13 +33,13 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
with:
ref: main
ref: v1
- name: Prepare
id: prepare
run: |
DOCKER_IMAGE=ghcr.io/1panel-dev/maxkb-python-pg
DOCKER_PLATFORMS=${{ github.event.inputs.architecture }}
TAG_NAME=python3.11-pg15.8
TAG_NAME=python3.11-pg15.14
DOCKER_IMAGE_TAGS="--tag ${DOCKER_IMAGE}:${TAG_NAME} --tag ${DOCKER_IMAGE}:latest"
echo ::set-output name=docker_image::${DOCKER_IMAGE}
echo ::set-output name=version::${TAG_NAME}

View File

@ -24,7 +24,7 @@ MaxKB = Max Knowledge Brain, it is an open-source platform for building enterpri
Execute the script below to start a MaxKB container using Docker:
```bash
docker run -d --name=maxkb --restart=always -p 8080:8080 -v ~/.maxkb:/var/lib/postgresql/data -v ~/.python-packages:/opt/maxkb/app/sandbox/python-packages 1panel/maxkb
docker run -d --name=maxkb --restart=always -p 8080:8080 -v ~/.maxkb:/var/lib/postgresql/data -v ~/.python-packages:/opt/maxkb/app/sandbox/python-packages 1panel/maxkb:v1
```
Access MaxKB web interface at `http://your_server_ip:8080` with default admin credentials:
@ -32,7 +32,7 @@ Access MaxKB web interface at `http://your_server_ip:8080` with default admin cr
- username: admin
- password: MaxKB@123..
中国用户如遇到 Docker 镜像 Pull 失败问题,请参照该 [离线安装文档](https://maxkb.cn/docs/installation/offline_installtion/) 进行安装。
中国用户如遇到 Docker 镜像 Pull 失败问题,请参照该 [离线安装文档](https://maxkb.cn/docs/v1/installation/offline_installtion/) 进行安装。
## Screenshots

View File

@ -14,12 +14,12 @@
</p>
<hr/>
MaxKB = Max Knowledge Brain是一款强大易用的企业级智能体平台,支持 RAG 检索增强生成、工作流编排、MCP 工具调用能力。MaxKB 支持对接各种主流大语言模型,广泛应用于智能客服、企业内部知识库问答、员工助手、学术研究与教育等场景。
MaxKB = Max Knowledge Brain是一个强大易用的企业级智能体平台,致力于解决企业 AI 落地面临的技术门槛高、部署成本高、迭代周期长等问题助力企业在人工智能时代赢得先机。秉承“开箱即用伴随成长”的设计理念MaxKB 支持企业快速接入主流大模型高效构建专属知识库并提供从基础问答RAG、复杂流程自动化工作流到智能体Agent的渐进式升级路径全面赋能智能客服、智能办公助手等多种应用场景。
- **RAG 检索增强生成**:高效搭建本地 AI 知识库,支持直接上传文档 / 自动爬取在线文档,支持文本自动拆分、向量化,有效减少大模型幻觉,提升问答效果;
- **灵活编排**:内置强大的工作流引擎、函数库和 MCP 工具调用能力,支持编排 AI 工作过程,满足复杂业务场景下的需求;
- **无缝嵌入**:支持零编码快速嵌入到第三方业务系统,让已有系统快速拥有智能问答能力,提高用户满意度;
- **模型中立**支持对接各种大模型包括本地私有大模型DeepSeek R1 / Llama 3 / Qwen 2 等)、国内公共大模型(通义千问 / 腾讯混元 / 字节豆包 / 百度千帆 / 智谱 AI / Kimi 等和国外公共大模型OpenAI / Claude / Gemini 等)。
- **模型中立**支持对接各种大模型包括本地私有大模型DeepSeek R1 / Qwen 3 等)、国内公共大模型(通义千问 / 腾讯混元 / 字节豆包 / 百度千帆 / 智谱 AI / Kimi 等和国外公共大模型OpenAI / Claude / Gemini 等)。
MaxKB 三分钟视频介绍https://www.bilibili.com/video/BV18JypYeEkj/
@ -27,10 +27,10 @@ MaxKB 三分钟视频介绍https://www.bilibili.com/video/BV18JypYeEkj/
```
# Linux 机器
docker run -d --name=maxkb --restart=always -p 8080:8080 -v ~/.maxkb:/var/lib/postgresql/data -v ~/.python-packages:/opt/maxkb/app/sandbox/python-packages registry.fit2cloud.com/maxkb/maxkb
docker run -d --name=maxkb --restart=always -p 8080:8080 -v ~/.maxkb:/var/lib/postgresql/data -v ~/.python-packages:/opt/maxkb/app/sandbox/python-packages registry.fit2cloud.com/maxkb/maxkb:v1
# Windows 机器
docker run -d --name=maxkb --restart=always -p 8080:8080 -v C:/maxkb:/var/lib/postgresql/data -v C:/python-packages:/opt/maxkb/app/sandbox/python-packages registry.fit2cloud.com/maxkb/maxkb
docker run -d --name=maxkb --restart=always -p 8080:8080 -v C:/maxkb:/var/lib/postgresql/data -v C:/python-packages:/opt/maxkb/app/sandbox/python-packages registry.fit2cloud.com/maxkb/maxkb:v1
# 用户名: admin
# 密码: MaxKB@123..
@ -38,8 +38,8 @@ docker run -d --name=maxkb --restart=always -p 8080:8080 -v C:/maxkb:/var/lib/po
- 你也可以通过 [1Panel 应用商店](https://apps.fit2cloud.com/1panel) 快速部署 MaxKB
- 如果是内网环境,推荐使用 [离线安装包](https://community.fit2cloud.com/#/products/maxkb/downloads) 进行安装部署;
- MaxKB 产品版本分为社区版和专业版,详情请参见:[MaxKB 产品版本对比](https://maxkb.cn/pricing.html)
- 如果您需要向团队介绍 MaxKB可以使用这个 [官方 PPT 材料](https://maxkb.cn/download/introduce-maxkb_202503.pdf)。
- MaxKB 不同产品产品版本的对比请参见:[MaxKB 产品版本对比](https://maxkb.cn/price)
- 如果您需要向团队介绍 MaxKB可以使用这个 [官方 PPT 材料](https://fit2cloud.com/maxkb/download/introduce-maxkb_202507.pdf)。
如你有更多问题,可以查看使用手册,或者通过论坛与我们交流。

View File

@ -11,7 +11,6 @@ import json
import re
import time
from functools import reduce
from types import AsyncGeneratorType
from typing import List, Dict
from django.db.models import QuerySet
@ -33,13 +32,25 @@ tool_message_template = """
<strong>Called MCP Tool: <em>%s</em></strong>
</summary>
```json
%s
```
</details>
"""
tool_message_json_template = """
```json
%s
```
"""
def generate_tool_message_template(name, context):
if '```' in context:
return tool_message_template % (name, context)
else:
return tool_message_template % (name, tool_message_json_template % (context))
def _write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow, answer: str,
reasoning_content: str):
@ -109,7 +120,7 @@ async def _yield_mcp_response(chat_model, message_list, mcp_servers):
response = agent.astream({"messages": message_list}, stream_mode='messages')
async for chunk in response:
if isinstance(chunk[0], ToolMessage):
content = tool_message_template % (chunk[0].name, chunk[0].content)
content = generate_tool_message_template(chunk[0].name, chunk[0].content)
chunk[0].content = content
yield chunk[0]
if isinstance(chunk[0], AIMessageChunk):
@ -188,6 +199,7 @@ class BaseChatNode(IChatNode):
self.context['answer'] = details.get('answer')
self.context['question'] = details.get('question')
self.context['reasoning_content'] = details.get('reasoning_content')
self.context['model_setting'] = details.get('model_setting')
if self.node_params.get('is_result', False):
self.answer_text = details.get('answer')
@ -274,6 +286,7 @@ class BaseChatNode(IChatNode):
"index": index,
'run_time': self.context.get('run_time'),
'system': self.context.get('system'),
'model_setting': self.context.get('model_setting'),
'history_message': [{'content': message.content, 'role': message.type} for message in
(self.context.get('history_message') if self.context.get(
'history_message') is not None else [])],

View File

@ -66,7 +66,7 @@ class BaseDocumentExtractNode(IDocumentExtractNode):
for doc in document:
file = QuerySet(File).filter(id=doc['file_id']).first()
buffer = io.BytesIO(file.get_byte().tobytes())
buffer = io.BytesIO(file.get_byte())
buffer.name = doc['name'] # this is the important line
for split_handle in (parse_table_handle_list + split_handles):

View File

@ -45,6 +45,8 @@ def get_field_value(debug_field_list, name, is_required):
def valid_reference_value(_type, value, name):
if value is None:
return
if _type == 'int':
instance_type = int | float
elif _type == 'float':
@ -70,10 +72,17 @@ def convert_value(name: str, value, _type, is_required, source, node):
if not is_required and source == 'reference' and (value is None or len(value) == 0):
return None
if source == 'reference':
if value and isinstance(value, list) and len(value) == 0:
if not is_required:
return None
else:
raise Exception(f"字段:{name}类型:{_type}值:{value}必填参数")
value = node.workflow_manage.get_reference_field(
value[0],
value[1:])
valid_reference_value(_type, value, name)
if value is None:
return None
if _type == 'int':
return int(value)
if _type == 'float':

View File

@ -32,6 +32,8 @@ def write_context(step_variable: Dict, global_variable: Dict, node, workflow):
def valid_reference_value(_type, value, name):
if value is None:
return
if _type == 'int':
instance_type = int | float
elif _type == 'float':
@ -52,10 +54,17 @@ def convert_value(name: str, value, _type, is_required, source, node):
if not is_required and (value is None or (isinstance(value, str) and len(value) == 0)):
return None
if source == 'reference':
if value and isinstance(value, list) and len(value) == 0:
if not is_required:
return None
else:
raise Exception(f"字段:{name}类型:{_type}值:{value}必填参数")
value = node.workflow_manage.get_reference_field(
value[0],
value[1:])
valid_reference_value(_type, value, name)
if value is None:
return None
if _type == 'int':
return int(value)
if _type == 'float':

View File

@ -62,7 +62,7 @@ def file_id_to_base64(file_id: str):
file = QuerySet(File).filter(id=file_id).first()
file_bytes = file.get_byte()
base64_image = base64.b64encode(file_bytes).decode("utf-8")
return [base64_image, what(None, file_bytes.tobytes())]
return [base64_image, what(None, file_bytes)]
class BaseImageUnderstandNode(IImageUnderstandNode):
@ -172,7 +172,7 @@ class BaseImageUnderstandNode(IImageUnderstandNode):
file = QuerySet(File).filter(id=file_id).first()
image_bytes = file.get_byte()
base64_image = base64.b64encode(image_bytes).decode("utf-8")
image_format = what(None, image_bytes.tobytes())
image_format = what(None, image_bytes)
images.append({'type': 'image_url', 'image_url': {'url': f'data:image/{image_format};base64,{base64_image}'}})
messages = [HumanMessage(
content=[

View File

@ -14,8 +14,6 @@ class BaseMcpNode(IMcpNode):
self.context['result'] = details.get('result')
self.context['tool_params'] = details.get('tool_params')
self.context['mcp_tool'] = details.get('mcp_tool')
if self.node_params.get('is_result', False):
self.answer_text = details.get('result')
def execute(self, mcp_servers, mcp_server, mcp_tool, tool_params, **kwargs) -> NodeResult:
servers = json.loads(mcp_servers)

View File

@ -18,6 +18,7 @@ class BaseSpeechToTextNode(ISpeechToTextNode):
def save_context(self, details, workflow_manage):
self.context['answer'] = details.get('answer')
self.context['result'] = details.get('answer')
if self.node_params.get('is_result', False):
self.answer_text = details.get('answer')
@ -31,7 +32,7 @@ class BaseSpeechToTextNode(ISpeechToTextNode):
# 根据file_name 吧文件转成mp3格式
file_format = file.file_name.split('.')[-1]
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{file_format}') as temp_file:
temp_file.write(file.get_byte().tobytes())
temp_file.write(file.get_byte())
temp_file_path = temp_file.name
with tempfile.NamedTemporaryFile(delete=False, suffix='.mp3') as temp_amr_file:
temp_mp3_path = temp_amr_file.name

View File

@ -37,6 +37,7 @@ def bytes_to_uploaded_file(file_bytes, file_name="generated_audio.mp3"):
class BaseTextToSpeechNode(ITextToSpeechNode):
def save_context(self, details, workflow_manage):
self.context['answer'] = details.get('answer')
self.context['result'] = details.get('result')
if self.node_params.get('is_result', False):
self.answer_text = details.get('answer')
@ -73,4 +74,5 @@ class BaseTextToSpeechNode(ITextToSpeechNode):
'content': self.context.get('content'),
'err_message': self.err_message,
'answer': self.context.get('answer'),
'result': self.context.get('result')
}

View File

@ -14,7 +14,7 @@ from concurrent.futures import ThreadPoolExecutor
from functools import reduce
from typing import List, Dict
from django.db import close_old_connections
from django.db import close_old_connections, connection
from django.db.models import QuerySet
from django.utils import translation
from django.utils.translation import get_language
@ -298,8 +298,8 @@ class WorkflowManage:
if global_fields is not None:
for global_field in global_fields:
global_field_list.append({**global_field, 'node_id': node_id, 'node_name': node_name})
field_list.sort(key=lambda f: len(f.get('node_name')), reverse=True)
global_field_list.sort(key=lambda f: len(f.get('node_name')), reverse=True)
field_list.sort(key=lambda f: len(f.get('node_name') + f.get('value')), reverse=True)
global_field_list.sort(key=lambda f: len(f.get('node_name') + f.get('value')), reverse=True)
self.field_list = field_list
self.global_field_list = global_field_list
@ -569,6 +569,8 @@ class WorkflowManage:
return None
finally:
current_node.node_chunk.end()
# 归还链接
connection.close()
def run_node_async(self, node):
future = executor.submit(self.run_node, node)
@ -678,10 +680,16 @@ class WorkflowManage:
return None
@staticmethod
def dependent_node(up_node_id, node):
def dependent_node(edge, node):
up_node_id = edge.sourceNodeId
if not node.node_chunk.is_end():
return False
if node.id == up_node_id:
if node.context.get('branch_id', None):
if edge.sourceAnchorId == f"{node.id}_{node.context.get('branch_id', None)}_right":
return True
else:
return False
if node.type == 'form-node':
if node.context.get('form_data', None) is not None:
return True
@ -694,9 +702,11 @@ class WorkflowManage:
@param node_id: 需要判断的节点id
@return:
"""
up_node_id_list = [edge.sourceNodeId for edge in self.flow.edges if edge.targetNodeId == node_id]
return all([any([self.dependent_node(up_node_id, node) for node in self.node_context]) for up_node_id in
up_node_id_list])
up_edge_list = [edge for edge in self.flow.edges if edge.targetNodeId == node_id]
return all(
[any([self.dependent_node(edge, node) for node in self.node_context if node.id == edge.sourceNodeId]) for
edge in
up_edge_list])
def get_up_node_id_list(self, node_id):
up_node_id_list = [edge.sourceNodeId for edge in self.flow.edges if edge.targetNodeId == node_id]
@ -755,7 +765,10 @@ class WorkflowManage:
if node_id == 'global':
return INode.get_field(self.context, fields)
else:
return self.get_node_by_id(node_id).get_reference_field(fields)
node = self.get_node_by_id(node_id)
if node:
return node.get_reference_field(fields)
return None
def get_workflow_content(self):
context = {

View File

@ -1,9 +1,8 @@
# Generated by Django 4.2.15 on 2024-09-18 16:14
import logging
import psycopg2
import psycopg
from django.db import migrations
from psycopg2 import extensions
from smartdoc.const import CONFIG
@ -17,7 +16,7 @@ def get_connect(db_name):
"port": CONFIG.get('DB_PORT')
}
# 建立连接
connect = psycopg2.connect(**conn_params)
connect = psycopg.connect(**conn_params)
return connect
@ -28,7 +27,7 @@ def sql_execute(conn, reindex_sql: str, alter_database_sql: str):
@param conn:
@param alter_database_sql:
"""
conn.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT)
conn.autocommit = True
with conn.cursor() as cursor:
cursor.execute(reindex_sql, [])
cursor.execute(alter_database_sql, [])

View File

@ -16,6 +16,7 @@ import re
import uuid
from functools import reduce
from typing import Dict, List
from django.contrib.postgres.fields import ArrayField
from django.core import cache, validators
from django.core import signing
@ -24,8 +25,8 @@ from django.db.models import QuerySet
from django.db.models.expressions import RawSQL
from django.http import HttpResponse
from django.template import Template, Context
from django.utils.translation import gettext_lazy as _, get_language, to_locale
from langchain_mcp_adapters.client import MultiServerMCPClient
from mcp.client.sse import sse_client
from rest_framework import serializers, status
from rest_framework.utils.formatting import lazy_format
@ -38,7 +39,7 @@ from common.config.embedding_config import VectorStore
from common.constants.authentication_type import AuthenticationType
from common.db.search import get_dynamics_model, native_search, native_page_search
from common.db.sql_execute import select_list
from common.exception.app_exception import AppApiException, NotFound404, AppUnauthorizedFailed, ChatException
from common.exception.app_exception import AppApiException, NotFound404, AppUnauthorizedFailed
from common.field.common import UploadedImageField, UploadedFileField
from common.models.db_model_manage import DBModelManage
from common.response import result
@ -57,7 +58,6 @@ from setting.models_provider.tools import get_model_instance_by_model_user_id
from setting.serializers.provider_serializers import ModelSerializer
from smartdoc.conf import PROJECT_DIR
from users.models import User
from django.utils.translation import gettext_lazy as _, get_language, to_locale
chat_cache = cache.caches['chat_cache']
@ -322,6 +322,7 @@ class ApplicationSerializer(serializers.Serializer):
def get_query_api_input(self, application, params):
query = ''
is_asker = False
if application.work_flow is not None:
work_flow = application.work_flow
if work_flow is not None:
@ -333,8 +334,10 @@ class ApplicationSerializer(serializers.Serializer):
if input_field_list is not None:
for field in input_field_list:
if field['assignment_method'] == 'api_input' and field['variable'] in params:
if field['variable'] == 'asker':
is_asker = True
query += f"&{field['variable']}={params[field['variable']]}"
if 'asker' in params:
if 'asker' in params and not is_asker:
query += f"&asker={params.get('asker')}"
return query
@ -1328,6 +1331,9 @@ class ApplicationSerializer(serializers.Serializer):
if '"stdio"' in self.data.get('mcp_servers'):
raise AppApiException(500, _('stdio is not supported'))
servers = json.loads(self.data.get('mcp_servers'))
for server, config in servers.items():
if config.get('transport') not in ['sse', 'streamable_http']:
raise AppApiException(500, _('Only support transport=sse or transport=streamable_http'))
async def get_mcp_tools(servers):
async with MultiServerMCPClient(servers) as client:

View File

@ -395,13 +395,14 @@ class ChatMessageSerializer(serializers.Serializer):
work_flow_manage = WorkflowManage(Flow.new_instance(chat_info.work_flow_version.work_flow),
{'history_chat_record': history_chat_record, 'question': message,
'chat_id': chat_info.chat_id, 'chat_record_id': str(
uuid.uuid1()) if chat_record is None else chat_record.id,
uuid.uuid1()) if chat_record is None else str(chat_record.id),
'stream': stream,
're_chat': re_chat,
'client_id': client_id,
'client_type': client_type,
'user_id': user_id}, WorkFlowPostHandler(chat_info, client_id, client_type),
base_to_response, form_data, image_list, document_list, audio_list, other_list,
base_to_response, form_data, image_list, document_list, audio_list,
other_list,
self.data.get('runtime_node_id'),
self.data.get('node_data'), chat_record, self.data.get('child_node'))
r = work_flow_manage.run()

View File

@ -13,8 +13,9 @@ import uuid
from functools import reduce
from io import BytesIO
from typing import Dict
import pytz
import openpyxl
import pytz
from django.core import validators
from django.core.cache import caches
from django.db import transaction, models
@ -34,7 +35,7 @@ from application.serializers.application_serializers import ModelDatasetAssociat
from application.serializers.chat_message_serializers import ChatInfo
from common.constants.permission_constants import RoleConstants
from common.db.search import native_search, native_page_search, page_search, get_dynamics_model
from common.exception.app_exception import AppApiException
from common.exception.app_exception import AppApiException, AppUnauthorizedFailed
from common.util.common import post
from common.util.field_message import ErrMessage
from common.util.file_util import get_file_content
@ -222,7 +223,8 @@ class ChatSerializers(serializers.Serializer):
reference_paragraph,
"\n".join([
f"{improve_paragraph_list[index].get('title')}\n{improve_paragraph_list[index].get('content')}"
for index in range(len(improve_paragraph_list))]),
for index in range(len(improve_paragraph_list))
]) if improve_paragraph_list is not None else "",
row.get('asker').get('user_name'),
row.get('message_tokens') + row.get('answer_tokens'), row.get('run_time'),
str(row.get('create_time').astimezone(pytz.timezone(TIME_ZONE)).strftime('%Y-%m-%d %H:%M:%S')
@ -483,6 +485,13 @@ class ChatRecordSerializer(serializers.Serializer):
chat_id = serializers.UUIDField(required=True)
order_asc = serializers.BooleanField(required=False, allow_null=True)
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
exist = QuerySet(Chat).filter(id=self.data.get("chat_id"),
application_id=self.data.get("application_id")).exists()
if not exist:
raise AppUnauthorizedFailed(403, _('No permission to access'))
def list(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)

View File

@ -61,8 +61,6 @@ class ApplicationApi(ApiMixin):
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_("Affiliation user"),
description=_("Affiliation user")),
'status': openapi.Schema(type=openapi.TYPE_BOOLEAN, title=_("Is publish"), description=_('Is publish')),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_("Creation time"),
description=_('Creation time')),
@ -302,7 +300,19 @@ class ApplicationApi(ApiMixin):
'no_references_prompt': openapi.Schema(type=openapi.TYPE_STRING,
title=_("No citation segmentation prompt"),
default="{question}",
description=_("No citation segmentation prompt"))
description=_("No citation segmentation prompt")),
'reasoning_content_enable': openapi.Schema(type=openapi.TYPE_BOOLEAN,
title=_("Reasoning enable"),
default=False,
description=_("Reasoning enable")),
'reasoning_content_end': openapi.Schema(type=openapi.TYPE_STRING,
title=_("Reasoning end tag"),
default="</think>",
description=_("Reasoning end tag")),
"reasoning_content_start": openapi.Schema(type=openapi.TYPE_STRING,
title=_("Reasoning start tag"),
default="<think>",
description=_("Reasoning start tag"))
}
)

View File

@ -326,11 +326,6 @@ class ChatApi(ApiMixin):
type=openapi.TYPE_STRING,
required=True,
description=_('Application ID')),
openapi.Parameter(name='history_day',
in_=openapi.IN_QUERY,
type=openapi.TYPE_NUMBER,
required=True,
description=_('Historical days')),
openapi.Parameter(name='abstract', in_=openapi.IN_QUERY, type=openapi.TYPE_STRING, required=False,
description=_("abstract")),
openapi.Parameter(name='min_star', in_=openapi.IN_QUERY, type=openapi.TYPE_INTEGER, required=False,

View File

@ -48,7 +48,11 @@ class ApplicationVersionView(APIView):
ApplicationVersionApi.Query.get_request_params_api()),
responses=result.get_page_api_response(ApplicationVersionApi.get_response_body_api()),
tags=[_('Application/Version')])
@has_permissions(PermissionConstants.APPLICATION_READ, compare=CompareConstants.AND)
@has_permissions(PermissionConstants.APPLICATION_READ,
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND), compare=CompareConstants.AND)
def get(self, request: Request, application_id: str, current_page: int, page_size: int):
return result.success(
ApplicationVersionSerializer.Query(
@ -65,7 +69,14 @@ class ApplicationVersionView(APIView):
manual_parameters=ApplicationVersionApi.Operate.get_request_params_api(),
responses=result.get_api_response(ApplicationVersionApi.get_response_body_api()),
tags=[_('Application/Version')])
@has_permissions(PermissionConstants.APPLICATION_READ, compare=CompareConstants.AND)
@has_permissions(PermissionConstants.APPLICATION_READ, ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(
group=Group.APPLICATION,
operate=Operate.USE,
dynamic_tag=keywords.get(
'application_id'))],
compare=CompareConstants.AND),
compare=CompareConstants.AND)
def get(self, request: Request, application_id: str, work_flow_version_id: str):
return result.success(
ApplicationVersionSerializer.Operate(

View File

@ -7,16 +7,6 @@
@desc:
"""
from django.core import cache
from django.http import HttpResponse
from django.utils.translation import gettext_lazy as _, gettext
from drf_yasg.utils import swagger_auto_schema
from langchain_core.prompts import PromptTemplate
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.views import APIView
from application.serializers.application_serializers import ApplicationSerializer
from application.serializers.application_statistics_serializers import ApplicationStatisticsSerializer
from application.swagger_api.application_api import ApplicationApi
@ -31,6 +21,14 @@ from common.response import result
from common.swagger_api.common_api import CommonApi
from common.util.common import query_params_to_single_dict
from dataset.serializers.dataset_serializers import DataSetSerializers
from django.core import cache
from django.http import HttpResponse
from django.utils.translation import gettext_lazy as _
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.views import APIView
chat_cache = cache.caches['chat_cache']
@ -494,7 +492,7 @@ class Application(APIView):
class HitTest(APIView):
authentication_classes = [TokenAuth]
@action(methods="GET", detail=False)
@action(methods="PUT", detail=False)
@swagger_auto_schema(operation_summary=_("Hit Test List"), operation_id=_("Hit Test List"),
manual_parameters=CommonApi.HitTestApi.get_request_params_api(),
responses=result.get_api_array_response(CommonApi.HitTestApi.get_response_body_api()),
@ -505,15 +503,15 @@ class Application(APIView):
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND))
def get(self, request: Request, application_id: str):
return result.success(
ApplicationSerializer.HitTest(data={'id': application_id, 'user_id': request.user.id,
"query_text": request.query_params.get("query_text"),
"top_number": request.query_params.get("top_number"),
'similarity': request.query_params.get('similarity'),
'search_mode': request.query_params.get(
'search_mode')}).hit_test(
))
def put(self, request: Request, application_id: str):
return result.success(ApplicationSerializer.HitTest(data={
'id': application_id,
'user_id': request.user.id,
"query_text": request.data.get("query_text"),
"top_number": request.data.get("top_number"),
'similarity': request.data.get('similarity'),
'search_mode': request.data.get('search_mode')}
).hit_test())
class Publish(APIView):
authentication_classes = [TokenAuth]

View File

@ -59,7 +59,8 @@ class ChatView(APIView):
@has_permissions(
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
@log(menu='Conversation Log', operate="Export conversation",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
@ -164,7 +165,9 @@ class ChatView(APIView):
@has_permissions(
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND
)
)
def get(self, request: Request, application_id: str):
return result.success(ChatSerializers.Query(
@ -182,8 +185,7 @@ class ChatView(APIView):
[RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.MANAGE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND),
compare=CompareConstants.AND)
compare=CompareConstants.AND))
@log(menu='Conversation Log', operate="Delete a conversation",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
def delete(self, request: Request, application_id: str, chat_id: str):
@ -206,7 +208,8 @@ class ChatView(APIView):
@has_permissions(
ViewPermission([RoleConstants.APPLICATION_ACCESS_TOKEN],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
def get(self, request: Request, application_id: str, current_page: int, page_size: int):
return result.success(ChatSerializers.ClientChatHistory(
@ -241,7 +244,7 @@ class ChatView(APIView):
request_body=ChatClientHistoryApi.Operate.ReAbstract.get_request_body_api(),
tags=[_("Application/Conversation Log")])
@has_permissions(ViewPermission(
[RoleConstants.APPLICATION_ACCESS_TOKEN],
[RoleConstants.APPLICATION_ACCESS_TOKEN, RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND),
@ -267,7 +270,8 @@ class ChatView(APIView):
@has_permissions(
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
def get(self, request: Request, application_id: str, current_page: int, page_size: int):
return result.success(ChatSerializers.Query(
@ -292,7 +296,8 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY,
RoleConstants.APPLICATION_ACCESS_TOKEN],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
def get(self, request: Request, application_id: str, chat_id: str, chat_record_id: str):
return result.success(ChatRecordSerializer.Operate(
@ -310,7 +315,8 @@ class ChatView(APIView):
@has_permissions(
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
def get(self, request: Request, application_id: str, chat_id: str):
return result.success(ChatRecordSerializer.Query(
@ -329,9 +335,11 @@ class ChatView(APIView):
tags=[_("Application/Conversation Log")]
)
@has_permissions(
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY],
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY,
RoleConstants.APPLICATION_ACCESS_TOKEN],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
def get(self, request: Request, application_id: str, chat_id: str, current_page: int, page_size: int):
return result.success(ChatRecordSerializer.Query(
@ -354,7 +362,8 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY,
RoleConstants.APPLICATION_ACCESS_TOKEN],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND)
)
@log(menu='Conversation Log', operate="Like, Dislike",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
@ -377,7 +386,7 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))]
))
, compare=CompareConstants.AND))
def get(self, request: Request, application_id: str, chat_id: str, chat_record_id: str):
return result.success(ChatRecordSerializer.ChatRecordImprove(
data={'chat_id': chat_id, 'chat_record_id': chat_record_id}).get())
@ -397,7 +406,7 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND
), ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.DATASET,
operate=Operate.MANAGE,
@ -424,6 +433,7 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND
), ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.DATASET,
@ -451,6 +461,7 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND
), ViewPermission([RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.DATASET,
@ -499,7 +510,8 @@ class ChatView(APIView):
ViewPermission([RoleConstants.ADMIN, RoleConstants.USER, RoleConstants.APPLICATION_KEY,
RoleConstants.APPLICATION_ACCESS_TOKEN],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.USE,
dynamic_tag=keywords.get('application_id'))])
dynamic_tag=keywords.get('application_id'))]
, compare=CompareConstants.AND)
)
def post(self, request: Request, application_id: str, chat_id: str):
files = request.FILES.getlist('file')

View File

@ -6,18 +6,18 @@
@date2024/3/14 03:02
@desc: 用户认证
"""
from django.core import cache
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from common.auth.handle.auth_base_handle import AuthBaseHandle
from common.constants.authentication_type import AuthenticationType
from common.constants.permission_constants import RoleConstants, get_permission_list_by_role, Auth
from common.exception.app_exception import AppAuthenticationFailed
from smartdoc.settings import JWT_AUTH
from smartdoc.const import CONFIG
from users.models import User
from django.core import cache
from users.models.user import get_user_dynamics_permission
from django.utils.translation import gettext_lazy as _
token_cache = cache.caches['token_cache']
@ -35,7 +35,7 @@ class UserToken(AuthBaseHandle):
auth_details = get_token_details()
user = QuerySet(User).get(id=auth_details['id'])
# 续期
token_cache.touch(token, timeout=JWT_AUTH['JWT_EXPIRATION_DELTA'].total_seconds())
token_cache.touch(token, timeout=CONFIG.get_session_timeout())
rule = RoleConstants[user.role]
permission_list = get_permission_list_by_role(RoleConstants[user.role])
# 获取用户的应用和知识库的权限

View File

@ -24,6 +24,7 @@ from common.util.file_util import get_file_content
from common.util.lock import try_lock, un_lock
from common.util.page_utils import page_desc
from dataset.models import Paragraph, Status, Document, ProblemParagraphMapping, TaskType, State
from dataset.serializers.common_serializers import create_dataset_index
from embedding.models import SourceType, SearchMode
from smartdoc.conf import PROJECT_DIR
from django.utils.translation import gettext_lazy as _
@ -281,6 +282,8 @@ class ListenerManagement:
ListenerManagement.get_aggregation_document_status(
document_id)),
is_the_task_interrupted)
# 检查是否存在索引
create_dataset_index(document_id=document_id)
except Exception as e:
max_kb_error.error(_('Vectorized document: {document_id} error {error} {traceback}').format(
document_id=document_id, error=str(e), traceback=traceback.format_exc()))

View File

@ -9,43 +9,102 @@
from django.http import HttpResponse
from django.utils.deprecation import MiddlewareMixin
from common.auth import handles, TokenDetails
content = """
<!doctype html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Document</title>
<script>
window.onload = () => {
var xhr = new XMLHttpRequest()
xhr.open('GET', '/api/user', true)
xhr.setRequestHeader('Content-Type', 'application/json')
const token = localStorage.getItem('token')
const pathname = window.location.pathname
if (token) {
xhr.setRequestHeader('Authorization', token)
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
window.location.href = pathname
}
if (xhr.status === 401) {
window.location.href = '/ui/login'
}
}
}
xhr.send()
} else {
window.location.href = '/ui/login'
}
}
</script>
</head>
<body></body>
<style>
/* 弹框内容样式 */
.modal-content {
background-color: #fefefe;
margin: 15% auto; /* 15% 从顶部和自动水平居中 */
padding: 20px;
border: 1px solid #888;
width: 80%; /* 宽度 */
}
</style>
<body>
<div class="modal-content">
<input type="text" id="auth-input" />
<button id="auth">认证</button>
<button id="goLogin">去登录</button>
</div>
<script>
const setCookie = (name, value, days) => {
var expires = "";
if (days) {
var date = new Date();
date.setTime(date.getTime() + days * 2);
expires = "; expires=" + date.toUTCString();
}
document.cookie = name + "=" + (value || "") + expires + "; path=/";
};
const authToken = (token) => {
return new Promise((resolve, reject) => {
try {
var xhr = new XMLHttpRequest();
xhr.open("GET", "/api/user", true);
xhr.setRequestHeader("Content-Type", "application/json");
const pathname = window.location.pathname;
if (token) {
xhr.setRequestHeader("Authorization", token);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
resolve(true);
} else {
reject(true);
}
}
};
xhr.send();
}
} catch (e) {
reject(false);
}
});
};
window.onload = () => {
const token = localStorage.getItem("token");
authToken(token)
.then(() => {
setCookie("Authorization", token);
window.location.href = window.location.pathname;
})
.catch((e) => {});
};
// 获取元素
const auth = document.getElementById("auth");
const goLogin = document.getElementById("goLogin");
// 打开弹框函数
auth.onclick = ()=> {
const authInput = document.getElementById("auth-input");
const token = authInput.value
authToken(token)
.then(() => {
setCookie("Authorization", token);
window.location.href = window.location.pathname;
})
.catch((e) => {
alert("令牌错误");
});
};
// 去系统的登录页面
goLogin.onclick = ()=> {
window.location.href = "/ui/login";
};
</script>
</body>
</html>
"""
@ -54,9 +113,18 @@ content = """
class DocHeadersMiddleware(MiddlewareMixin):
def process_response(self, request, response):
if request.path.startswith('/doc/') or request.path.startswith('/doc/chat/'):
HTTP_REFERER = request.META.get('HTTP_REFERER')
if HTTP_REFERER is None:
auth = request.COOKIES.get('Authorization')
if auth is None:
return HttpResponse(content)
if HTTP_REFERER == request._current_scheme_host + request.path:
return response
else:
try:
token = auth
token_details = TokenDetails(token)
for handle in handles:
if handle.support(request, token, token_details.get_token_details):
handle.handle(request, token, token_details.get_token_details)
return response
return HttpResponse(content)
except Exception as e:
return HttpResponse(content)
return response

View File

@ -15,33 +15,21 @@ from django.utils.translation import gettext_lazy as _
class CommonApi:
class HitTestApi(ApiMixin):
@staticmethod
def get_request_params_api():
return [
openapi.Parameter(name='query_text',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=True,
description=_('query text')),
openapi.Parameter(name='top_number',
in_=openapi.IN_QUERY,
type=openapi.TYPE_NUMBER,
default=10,
required=True,
description='topN'),
openapi.Parameter(name='similarity',
in_=openapi.IN_QUERY,
type=openapi.TYPE_NUMBER,
default=0.6,
required=True,
description=_('similarity')),
openapi.Parameter(name='search_mode',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
default="embedding",
required=True,
description=_('Retrieval pattern embedding|keywords|blend')
)
]
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['query_text', 'top_number', 'similarity', 'search_mode'],
properties={
'query_text': openapi.Schema(type=openapi.TYPE_STRING, title=_('query text'),
description=_('query text')),
'top_number': openapi.Schema(type=openapi.TYPE_NUMBER, title=_('top number'),
description=_('top number')),
'similarity': openapi.Schema(type=openapi.TYPE_NUMBER, title=_('similarity'),
description=_('similarity')),
'search_mode': openapi.Schema(type=openapi.TYPE_STRING, title=_('search mode'),
description=_('search mode'))
}
)
@staticmethod
def get_response_body_api():

View File

@ -3,6 +3,7 @@ import logging
import re
import traceback
from functools import reduce
from pathlib import Path
from typing import List, Set
from urllib.parse import urljoin, urlparse, ParseResult, urlsplit, urlunparse
@ -52,6 +53,28 @@ def remove_fragment(url: str) -> str:
return urlunparse(modified_url)
def remove_last_path_robust(url):
"""健壮地删除URL的最后一个路径部分"""
parsed = urlparse(url)
# 分割路径并过滤空字符串
paths = [p for p in parsed.path.split('/') if p]
if paths:
paths.pop() # 移除最后一个路径
# 重建路径
new_path = '/' + '/'.join(paths) if paths else '/'
# 重建URL
return urlunparse((
parsed.scheme,
parsed.netloc,
new_path,
parsed.params,
parsed.query,
parsed.fragment
))
class Fork:
class Response:
def __init__(self, content: str, child_link_list: List[ChildLink], status, message: str):
@ -70,6 +93,8 @@ class Fork:
def __init__(self, base_fork_url: str, selector_list: List[str]):
base_fork_url = remove_fragment(base_fork_url)
if any([True for end_str in ['index.html', '.htm', '.html'] if base_fork_url.endswith(end_str)]):
base_fork_url =remove_last_path_robust(base_fork_url)
self.base_fork_url = urljoin(base_fork_url if base_fork_url.endswith("/") else base_fork_url + '/', '.')
parsed = urlsplit(base_fork_url)
query = parsed.query
@ -137,18 +162,30 @@ class Fork:
html_content = response.content.decode(encoding)
beautiful_soup = BeautifulSoup(html_content, "html.parser")
meta_list = beautiful_soup.find_all('meta')
charset_list = [meta.attrs.get('charset') for meta in meta_list if
meta.attrs is not None and 'charset' in meta.attrs]
charset_list = Fork.get_charset_list(meta_list)
if len(charset_list) > 0:
charset = charset_list[0]
if charset != encoding:
try:
html_content = response.content.decode(charset)
html_content = response.content.decode(charset, errors='replace')
except Exception as e:
logging.getLogger("max_kb").error(f'{e}')
logging.getLogger("max_kb").error(f'{e}: {traceback.format_exc()}')
return BeautifulSoup(html_content, "html.parser")
return beautiful_soup
@staticmethod
def get_charset_list(meta_list):
charset_list = []
for meta in meta_list:
if meta.attrs is not None:
if 'charset' in meta.attrs:
charset_list.append(meta.attrs.get('charset'))
elif meta.attrs.get('http-equiv', '').lower() == 'content-type' and 'content' in meta.attrs:
match = re.search(r'charset=([^\s;]+)', meta.attrs['content'], re.I)
if match:
charset_list.append(match.group(1))
return charset_list
def fork(self):
try:
@ -175,4 +212,4 @@ class Fork:
def handler(base_url, response: Fork.Response):
print(base_url.url, base_url.tag.text if base_url.tag else None, response.content)
# ForkManage('https://bbs.fit2cloud.com/c/de/6', ['.md-content']).fork(3, set(), handler)
# ForkManage('https://hzqcgc.htc.edu.cn/jxky.htm', ['.md-content']).fork(3, set(), handler)

View File

@ -18,13 +18,13 @@ from rest_framework import serializers
from common.config.embedding_config import ModelManage
from common.db.search import native_search
from common.db.sql_execute import update_execute
from common.db.sql_execute import update_execute, sql_execute
from common.exception.app_exception import AppApiException
from common.mixins.api_mixin import ApiMixin
from common.util.field_message import ErrMessage
from common.util.file_util import get_file_content
from common.util.fork import Fork
from dataset.models import Paragraph, Problem, ProblemParagraphMapping, DataSet, File, Image
from dataset.models import Paragraph, Problem, ProblemParagraphMapping, DataSet, File, Image, Document
from setting.models_provider import get_model
from smartdoc.conf import PROJECT_DIR
from django.utils.translation import gettext_lazy as _
@ -224,6 +224,46 @@ def get_embedding_model_id_by_dataset_id_list(dataset_id_list: List):
return str(dataset_list[0].embedding_mode_id)
def create_dataset_index(dataset_id=None, document_id=None):
if dataset_id is None and document_id is None:
raise AppApiException(500, _('Dataset ID or Document ID must be provided'))
if dataset_id is not None:
k_id = dataset_id
else:
document = QuerySet(Document).filter(id=document_id).first()
k_id = document.dataset_id
sql = f"SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'embedding' AND indexname = 'embedding_hnsw_idx_{k_id}'"
index = sql_execute(sql, [])
if not index:
sql = f"SELECT vector_dims(embedding) AS dims FROM embedding WHERE dataset_id = '{k_id}' LIMIT 1"
result = sql_execute(sql, [])
if len(result) == 0:
return
dims = result[0]['dims']
sql = f"""CREATE INDEX "embedding_hnsw_idx_{k_id}" ON embedding USING hnsw ((embedding::vector({dims})) vector_cosine_ops) WHERE dataset_id = '{k_id}'"""
update_execute(sql, [])
def drop_dataset_index(dataset_id=None, document_id=None):
if dataset_id is None and document_id is None:
raise AppApiException(500, _('Dataset ID or Document ID must be provided'))
if dataset_id is not None:
k_id = dataset_id
else:
document = QuerySet(Document).filter(id=document_id).first()
k_id = document.dataset_id
sql = f"SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'embedding' AND indexname = 'embedding_hnsw_idx_{k_id}'"
index = sql_execute(sql, [])
if index:
sql = f'DROP INDEX "embedding_hnsw_idx_{k_id}"'
update_execute(sql, [])
class GenerateRelatedSerializer(ApiMixin, serializers.Serializer):
model_id = serializers.UUIDField(required=True, error_messages=ErrMessage.uuid(_('Model id')))
prompt = serializers.CharField(required=True, error_messages=ErrMessage.uuid(_('Prompt word')))

View File

@ -44,7 +44,7 @@ from dataset.models.data_set import DataSet, Document, Paragraph, Problem, Type,
State, File, Image
from dataset.serializers.common_serializers import list_paragraph, MetaSerializer, ProblemParagraphManage, \
get_embedding_model_by_dataset_id, get_embedding_model_id_by_dataset_id, write_image, zip_dir, \
GenerateRelatedSerializer
GenerateRelatedSerializer, drop_dataset_index
from dataset.serializers.document_serializers import DocumentSerializers, DocumentInstanceSerializer
from dataset.task import sync_web_dataset, sync_replace_web_dataset, generate_related_by_dataset_id
from embedding.models import SearchMode
@ -526,7 +526,7 @@ class DataSetSerializers(serializers.ModelSerializer):
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name', 'desc'],
required=['name', 'desc', 'embedding_mode_id'],
properties={
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name')),
@ -788,6 +788,7 @@ class DataSetSerializers(serializers.ModelSerializer):
QuerySet(ProblemParagraphMapping).filter(dataset=dataset).delete()
QuerySet(Paragraph).filter(dataset=dataset).delete()
QuerySet(Problem).filter(dataset=dataset).delete()
drop_dataset_index(dataset_id=dataset.id)
dataset.delete()
delete_embedding_by_dataset(self.data.get('id'))
return True

View File

@ -141,7 +141,8 @@ class DocumentEditInstanceSerializer(ApiMixin, serializers.Serializer):
if 'meta' in self.data and self.data.get('meta') is not None:
dataset_meta_valid_map = self.get_meta_valid_map()
valid_class = dataset_meta_valid_map.get(document.type)
valid_class(data=self.data.get('meta')).is_valid(raise_exception=True)
if valid_class is not None:
valid_class(data=self.data.get('meta')).is_valid(raise_exception=True)
class DocumentWebInstanceSerializer(ApiMixin, serializers.Serializer):
@ -808,27 +809,40 @@ class DocumentSerializers(ApiMixin, serializers.Serializer):
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'char_length', 'user_id', 'paragraph_count', 'is_active'
'update_time', 'create_time'],
required=['create_time', 'update_time', 'id', 'name', 'char_length', 'status', 'is_active',
'type', 'meta', 'dataset_id', 'hit_handling_method', 'directly_return_similarity',
'status_meta', 'paragraph_count'],
properties={
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'id': openapi.Schema(type=openapi.TYPE_STRING, title="id",
description="id", default="xx"),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('name'),
description=_('name'), default="xx"),
'char_length': openapi.Schema(type=openapi.TYPE_INTEGER, title=_('char length'),
description=_('char length'), default=10),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('user id'), description=_('user id')),
'paragraph_count': openapi.Schema(type=openapi.TYPE_INTEGER, title="_('document count')",
description="_('document count')", default=1),
'status':openapi.Schema(type=openapi.TYPE_STRING, title=_('status'),
description=_('status'), default="xx"),
'is_active': openapi.Schema(type=openapi.TYPE_BOOLEAN, title=_('Is active'),
description=_('Is active'), default=True),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"
)
'type': openapi.Schema(type=openapi.TYPE_STRING, title=_('type'),
description=_('type'), default="xx"),
'meta': openapi.Schema(type=openapi.TYPE_OBJECT, title=_('meta'),
description=_('meta'), default="{}"),
'dataset_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset_id'),
description=_('dataset_id'), default="xx"),
'hit_handling_method': openapi.Schema(type=openapi.TYPE_STRING, title=_('hit_handling_method'),
description=_('hit_handling_method'), default="xx"),
'directly_return_similarity': openapi.Schema(type=openapi.TYPE_NUMBER, title=_('directly_return_similarity'),
description=_('directly_return_similarity'), default="xx"),
'status_meta': openapi.Schema(type=openapi.TYPE_OBJECT, title=_('status_meta'),
description=_('status_meta'), default="{}"),
'paragraph_count': openapi.Schema(type=openapi.TYPE_INTEGER, title="_('document count')",
description="_('document count')", default=1),
}
)
@ -855,7 +869,7 @@ class DocumentSerializers(ApiMixin, serializers.Serializer):
class Create(ApiMixin, serializers.Serializer):
dataset_id = serializers.UUIDField(required=True, error_messages=ErrMessage.char(
_('document id')))
_('dataset id')))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
@ -983,7 +997,7 @@ class DocumentSerializers(ApiMixin, serializers.Serializer):
in_=openapi.IN_PATH,
type=openapi.TYPE_STRING,
required=True,
description=_('document id'))
description=_('dataset id'))
]
class Split(ApiMixin, serializers.Serializer):

View File

@ -226,6 +226,14 @@ class ParagraphSerializers(ApiMixin, serializers.Serializer):
def association(self, with_valid=True, with_embedding=True):
if with_valid:
self.is_valid(raise_exception=True)
# 已关联则直接返回
if QuerySet(ProblemParagraphMapping).filter(
dataset_id=self.data.get('dataset_id'),
document_id=self.data.get('document_id'),
paragraph_id=self.data.get('paragraph_id'),
problem_id=self.data.get('problem_id')
).exists():
return True
problem = QuerySet(Problem).filter(id=self.data.get("problem_id")).first()
problem_paragraph_mapping = ProblemParagraphMapping(id=uuid.uuid1(),
document_id=self.data.get('document_id'),

View File

@ -7,13 +7,13 @@
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from rest_framework.views import APIView
from rest_framework.views import Request
import dataset.models
from common.auth import TokenAuth, has_permissions
from common.constants.permission_constants import PermissionConstants, CompareConstants, Permission, Group, Operate, \
ViewPermission, RoleConstants
@ -25,7 +25,6 @@ from dataset.serializers.common_serializers import GenerateRelatedSerializer
from dataset.serializers.dataset_serializers import DataSetSerializers
from dataset.views.common import get_dataset_operation_object
from setting.serializers.provider_serializers import ModelSerializer
from django.utils.translation import gettext_lazy as _
class Dataset(APIView):
@ -141,21 +140,22 @@ class Dataset(APIView):
class HitTest(APIView):
authentication_classes = [TokenAuth]
@action(methods="GET", detail=False)
@action(methods="PUT", detail=False)
@swagger_auto_schema(operation_summary=_('Hit test list'), operation_id=_('Hit test list'),
manual_parameters=CommonApi.HitTestApi.get_request_params_api(),
request_body=CommonApi.HitTestApi.get_request_body_api(),
responses=result.get_api_array_response(CommonApi.HitTestApi.get_response_body_api()),
tags=[_('Knowledge Base')])
@has_permissions(lambda r, keywords: Permission(group=Group.DATASET, operate=Operate.USE,
dynamic_tag=keywords.get('dataset_id')))
def get(self, request: Request, dataset_id: str):
return result.success(
DataSetSerializers.HitTest(data={'id': dataset_id, 'user_id': request.user.id,
"query_text": request.query_params.get("query_text"),
"top_number": request.query_params.get("top_number"),
'similarity': request.query_params.get('similarity'),
'search_mode': request.query_params.get('search_mode')}).hit_test(
))
def put(self, request: Request, dataset_id: str):
return result.success(DataSetSerializers.HitTest(data={
'id': dataset_id,
'user_id': request.user.id,
"query_text": request.data.get("query_text"),
"top_number": request.data.get("top_number"),
'similarity': request.data.get('similarity'),
'search_mode': request.data.get('search_mode')}
).hit_test())
class Embedding(APIView):
authentication_classes = [TokenAuth]

View File

@ -5,15 +5,17 @@ SELECT
FROM
(
SELECT DISTINCT ON
( "paragraph_id" ) ( similarity ),* ,
similarity AS comprehensive_score
( "paragraph_id" ) ( 1 - distince + ts_similarity ) as similarity, *,
(1 - distince + ts_similarity) AS comprehensive_score
FROM
(
SELECT
*,
(( 1 - ( embedding.embedding <=> %s ) )+ts_rank_cd( embedding.search_vector, websearch_to_tsquery('simple', %s ), 32 )) AS similarity
(embedding.embedding::vector(%s) <=> %s) as distince,
(ts_rank_cd( embedding.search_vector, websearch_to_tsquery('simple', %s ), 32 )) AS ts_similarity
FROM
embedding ${embedding_query}
ORDER BY distince
) TEMP
ORDER BY
paragraph_id,

View File

@ -5,12 +5,12 @@ SELECT
FROM
(
SELECT DISTINCT ON
("paragraph_id") ( similarity ),* ,similarity AS comprehensive_score
("paragraph_id") ( 1 - distince ),* ,(1 - distince) AS comprehensive_score
FROM
( SELECT *, ( 1 - ( embedding.embedding <=> %s ) ) AS similarity FROM embedding ${embedding_query}) TEMP
( SELECT *, ( embedding.embedding::vector(%s) <=> %s ) AS distince FROM embedding ${embedding_query} ORDER BY distince) TEMP
ORDER BY
paragraph_id,
similarity DESC
distince
) DISTINCT_TEMP
WHERE comprehensive_score>%s
ORDER BY comprehensive_score DESC

View File

@ -17,6 +17,7 @@ from common.config.embedding_config import ModelManage
from common.event import ListenerManagement, UpdateProblemArgs, UpdateEmbeddingDatasetIdArgs, \
UpdateEmbeddingDocumentIdArgs
from dataset.models import Document, TaskType, State
from dataset.serializers.common_serializers import drop_dataset_index
from ops import celery_app
from setting.models import Model
from setting.models_provider import get_model
@ -110,6 +111,7 @@ def embedding_by_dataset(dataset_id, model_id):
max_kb.info(_('Start--->Vectorized dataset: {dataset_id}').format(dataset_id=dataset_id))
try:
ListenerManagement.delete_embedding_by_dataset(dataset_id)
drop_dataset_index(dataset_id=dataset_id)
document_list = QuerySet(Document).filter(dataset_id=dataset_id)
max_kb.info(_('Dataset documentation: {document_names}').format(
document_names=", ".join([d.name for d in document_list])))

View File

@ -12,7 +12,6 @@ import uuid
from abc import ABC, abstractmethod
from typing import Dict, List
import jieba
from django.contrib.postgres.search import SearchVector
from django.db.models import QuerySet, Value
from langchain_core.embeddings import Embeddings
@ -169,8 +168,13 @@ class EmbeddingSearch(ISearch):
os.path.join(PROJECT_DIR, "apps", "embedding", 'sql',
'embedding_search.sql')),
with_table_name=True)
embedding_model = select_list(exec_sql,
[json.dumps(query_embedding), *exec_params, similarity, top_number])
embedding_model = select_list(exec_sql, [
len(query_embedding),
json.dumps(query_embedding),
*exec_params,
similarity,
top_number
])
return embedding_model
def support(self, search_mode: SearchMode):
@ -190,8 +194,12 @@ class KeywordsSearch(ISearch):
os.path.join(PROJECT_DIR, "apps", "embedding", 'sql',
'keywords_search.sql')),
with_table_name=True)
embedding_model = select_list(exec_sql,
[to_query(query_text), *exec_params, similarity, top_number])
embedding_model = select_list(exec_sql, [
to_query(query_text),
*exec_params,
similarity,
top_number
])
return embedding_model
def support(self, search_mode: SearchMode):
@ -211,9 +219,14 @@ class BlendSearch(ISearch):
os.path.join(PROJECT_DIR, "apps", "embedding", 'sql',
'blend_search.sql')),
with_table_name=True)
embedding_model = select_list(exec_sql,
[json.dumps(query_embedding), to_query(query_text), *exec_params, similarity,
top_number])
embedding_model = select_list(exec_sql, [
len(query_embedding),
json.dumps(query_embedding),
to_query(query_text),
*exec_params,
similarity,
top_number
])
return embedding_model
def support(self, search_mode: SearchMode):

View File

@ -0,0 +1,127 @@
# Generated by Django 4.2.15 on 2025-03-13 07:21
from django.db import migrations
from django.db.models import Q
mysql_template = """
def query_mysql(host,port, user, password, database, sql):
import pymysql
import json
from pymysql.cursors import DictCursor
from datetime import datetime, date
def default_serializer(obj):
from decimal import Decimal
if isinstance(obj, (datetime, date)):
return obj.isoformat() # 将 datetime/date 转换为 ISO 格式字符串
elif isinstance(obj, Decimal):
return float(obj) # 将 Decimal 转换为 float
raise TypeError(f"Type {type(obj)} not serializable")
try:
# 创建连接
db = pymysql.connect(
host=host,
port=int(port),
user=user,
password=password,
database=database,
cursorclass=DictCursor # 使用字典游标
)
# 使用 cursor() 方法创建一个游标对象 cursor
cursor = db.cursor()
# 使用 execute() 方法执行 SQL 查询
cursor.execute(sql)
# 使用 fetchall() 方法获取所有数据
data = cursor.fetchall()
# 处理 bytes 类型的数据
for row in data:
for key, value in row.items():
if isinstance(value, bytes):
row[key] = value.decode("utf-8") # 转换为字符串
# 将数据序列化为 JSON
json_data = json.dumps(data, default=default_serializer, ensure_ascii=False)
return json_data
# 关闭数据库连接
db.close()
except Exception as e:
print(f"Error while connecting to MySQL: {e}")
raise e
"""
pgsql_template = """
def queryPgSQL(database, user, password, host, port, query):
import psycopg2
import json
from datetime import datetime
# 自定义 JSON 序列化函数
def default_serializer(obj):
from decimal import Decimal
if isinstance(obj, datetime):
return obj.isoformat() # 将 datetime 转换为 ISO 格式字符串
elif isinstance(obj, Decimal):
return float(obj) # 将 Decimal 转换为 float
raise TypeError(f"Type {type(obj)} not serializable")
# 数据库连接信息
conn_params = {
"dbname": database,
"user": user,
"password": password,
"host": host,
"port": port
}
try:
# 建立连接
conn = psycopg2.connect(**conn_params)
print("连接成功!")
# 创建游标对象
cursor = conn.cursor()
# 执行查询语句
cursor.execute(query)
# 获取查询结果
rows = cursor.fetchall()
# 处理 bytes 类型的数据
columns = [desc[0] for desc in cursor.description]
result = [dict(zip(columns, row)) for row in rows]
# 转换为 JSON 格式
json_result = json.dumps(result, default=default_serializer, ensure_ascii=False)
return json_result
except Exception as e:
print(f"发生错误:{e}")
raise e
finally:
# 关闭游标和连接
if cursor:
cursor.close()
if conn:
conn.close()
"""
def fix_type(apps, schema_editor):
FunctionLib = apps.get_model('function_lib', 'FunctionLib')
FunctionLib.objects.filter(
Q(id='22c21b76-0308-11f0-9694-5618c4394482') | Q(template_id='22c21b76-0308-11f0-9694-5618c4394482')
).update(code=mysql_template)
FunctionLib.objects.filter(
Q(id='bd1e8b88-0302-11f0-87bb-5618c4394482') | Q(template_id='bd1e8b88-0302-11f0-87bb-5618c4394482')
).update(code=pgsql_template)
class Migration(migrations.Migration):
dependencies = [
('function_lib', '0003_functionlib_function_type_functionlib_icon_and_more'),
]
operations = [
migrations.RunPython(fix_type)
]

View File

@ -33,11 +33,13 @@ from smartdoc.const import CONFIG
function_executor = FunctionExecutor(CONFIG.get('SANDBOX'))
class FlibInstance:
def __init__(self, function_lib: dict, version: str):
self.function_lib = function_lib
self.version = version
def encryption(message: str):
"""
加密敏感字段数据 加密方式是 如果密码是 1234567890 那么给前端则是 123******890
@ -68,7 +70,8 @@ def encryption(message: str):
class FunctionLibModelSerializer(serializers.ModelSerializer):
class Meta:
model = FunctionLib
fields = ['id', 'name', 'icon', 'desc', 'code', 'input_field_list','init_field_list', 'init_params', 'permission_type', 'is_active', 'user_id', 'template_id',
fields = ['id', 'name', 'icon', 'desc', 'code', 'input_field_list', 'init_field_list', 'init_params',
'permission_type', 'is_active', 'user_id', 'template_id',
'create_time', 'update_time']
@ -148,7 +151,6 @@ class FunctionLibSerializer(serializers.Serializer):
select_user_id = serializers.CharField(required=False, allow_null=True, allow_blank=True)
function_type = serializers.CharField(required=False, allow_null=True, allow_blank=True)
def get_query_set(self):
query_set = QuerySet(FunctionLib).filter(
(Q(user_id=self.data.get('user_id')) | Q(permission_type='PUBLIC')))
@ -269,7 +271,7 @@ class FunctionLibSerializer(serializers.Serializer):
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
if not QuerySet(FunctionLib).filter(id=self.data.get('id')).exists():
if not QuerySet(FunctionLib).filter(user_id=self.data.get('user_id'), id=self.data.get('id')).exists():
raise AppApiException(500, _('Function does not exist'))
def delete(self, with_valid=True):
@ -285,7 +287,8 @@ class FunctionLibSerializer(serializers.Serializer):
if with_valid:
self.is_valid(raise_exception=True)
EditFunctionLib(data=instance).is_valid(raise_exception=True)
edit_field_list = ['name', 'desc', 'code', 'icon', 'input_field_list', 'init_field_list', 'init_params', 'permission_type', 'is_active']
edit_field_list = ['name', 'desc', 'code', 'icon', 'input_field_list', 'init_field_list', 'init_params',
'permission_type', 'is_active']
edit_dict = {field: instance.get(field) for field in edit_field_list if (
field in instance and instance.get(field) is not None)}
@ -317,7 +320,8 @@ class FunctionLibSerializer(serializers.Serializer):
if function_lib.init_params:
function_lib.init_params = json.loads(rsa_long_decrypt(function_lib.init_params))
if function_lib.init_field_list:
password_fields = [i["field"] for i in function_lib.init_field_list if i.get("input_type") == "PasswordInput"]
password_fields = [i["field"] for i in function_lib.init_field_list if
i.get("input_type") == "PasswordInput"]
if function_lib.init_params:
for k in function_lib.init_params:
if k in password_fields and function_lib.init_params[k]:

View File

@ -7238,7 +7238,7 @@ msgstr ""
msgid ""
"The confirmation password must be 6-20 characters long and must be a "
"combination of letters, numbers, and special characters."
msgstr ""
msgstr "The confirmation password must be 6-20 characters long and must be a combination of letters, numbers, and special characters.(Special character support:_、!、@、#、$、(、) ……)"
#: community/apps/users/serializers/user_serializers.py:380
#, python-brace-format
@ -7499,4 +7499,13 @@ msgid "Captcha code error or expiration"
msgstr ""
msgid "captcha"
msgstr ""
msgid "Reasoning enable"
msgstr ""
msgid "Reasoning start tag"
msgstr ""
msgid "Reasoning end tag"
msgstr ""

View File

@ -7395,7 +7395,7 @@ msgstr "语言只支持:"
msgid ""
"The confirmation password must be 6-20 characters long and must be a "
"combination of letters, numbers, and special characters."
msgstr "确认密码长度6-20个字符必须字母、数字、特殊字符组合"
msgstr "确认密码长度6-20个字符必须字母、数字、特殊字符组合特殊字符支持_、!、@、#、$、(、) ……)"
#: community/apps/users/serializers/user_serializers.py:380
#, python-brace-format
@ -7662,4 +7662,13 @@ msgid "Captcha code error or expiration"
msgstr "验证码错误或过期"
msgid "captcha"
msgstr "验证码"
msgstr "验证码"
msgid "Reasoning enable"
msgstr "开启思考过程"
msgid "Reasoning start tag"
msgstr "思考过程开始标签"
msgid "Reasoning end tag"
msgstr "思考过程结束标签"

View File

@ -7405,7 +7405,7 @@ msgstr "語言只支持:"
msgid ""
"The confirmation password must be 6-20 characters long and must be a "
"combination of letters, numbers, and special characters."
msgstr "確認密碼長度6-20個字符必須字母、數字、特殊字符組合"
msgstr "確認密碼長度6-20個字符必須字母、數字、特殊字符組合特殊字元支持_、!、@、#、$、(、) ……)"
#: community/apps/users/serializers/user_serializers.py:380
#, python-brace-format
@ -7672,4 +7672,13 @@ msgid "Captcha code error or expiration"
msgstr "驗證碼錯誤或過期"
msgid "captcha"
msgstr "驗證碼"
msgstr "驗證碼"
msgid "Reasoning enable"
msgstr "開啟思考過程"
msgid "Reasoning start tag"
msgstr "思考過程開始標籤"
msgid "Reasoning end tag"
msgstr "思考過程結束標籤"

View File

@ -0,0 +1,61 @@
import logging
import psycopg
from django.db import migrations
from smartdoc.const import CONFIG
def get_connect(db_name):
conn_params = {
"dbname": db_name,
"user": CONFIG.get('DB_USER'),
"password": CONFIG.get('DB_PASSWORD'),
"host": CONFIG.get('DB_HOST'),
"port": CONFIG.get('DB_PORT')
}
# 建立连接
connect = psycopg.connect(**conn_params)
return connect
def sql_execute(conn, reindex_sql: str, alter_database_sql: str):
"""
执行一条sql
@param reindex_sql:
@param conn:
@param alter_database_sql:
"""
conn.autocommit = True
with conn.cursor() as cursor:
cursor.execute(reindex_sql, [])
cursor.execute(alter_database_sql, [])
cursor.close()
def re_index(apps, schema_editor):
app_db_name = CONFIG.get('DB_NAME')
try:
re_index_database(app_db_name)
except Exception as e:
logging.error(f'reindex database {app_db_name}发送错误:{str(e)}')
try:
re_index_database('root')
except Exception as e:
logging.error(f'reindex database root 发送错误:{str(e)}')
def re_index_database(db_name):
db_conn = get_connect(db_name)
sql_execute(db_conn, f'REINDEX DATABASE "{db_name}";', f'ALTER DATABASE "{db_name}" REFRESH COLLATION VERSION;')
db_conn.close()
class Migration(migrations.Migration):
dependencies = [
('setting', '0010_log'),
]
operations = [
migrations.RunPython(re_index, atomic=False)
]

View File

@ -99,7 +99,7 @@ class BaseChatOpenAI(ChatOpenAI):
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
return self.usage_metadata.get('input_tokens', 0)
return self.usage_metadata.get('input_tokens', self.usage_metadata.get('prompt_tokens', 0))
def get_num_tokens(self, text: str) -> int:
if self.usage_metadata is None or self.usage_metadata == {}:
@ -108,7 +108,8 @@ class BaseChatOpenAI(ChatOpenAI):
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return len(tokenizer.encode(text))
return self.get_last_generation_info().get('output_tokens', 0)
return self.get_last_generation_info().get('output_tokens',
self.get_last_generation_info().get('completion_tokens', 0))
def _stream(self, *args: Any, **kwargs: Any) -> Iterator[ChatGenerationChunk]:
kwargs['stream_usage'] = True

View File

@ -7,6 +7,7 @@
2. 程序需要, 用户不需要更改的写到settings中
3. 程序需要, 用户需要更改的写到本config中
"""
import datetime
import errno
import logging
import os
@ -112,13 +113,19 @@ class Config(dict):
"USER": self.get('DB_USER'),
"PASSWORD": self.get('DB_PASSWORD'),
"ENGINE": self.get('DB_ENGINE'),
"CONN_MAX_AGE": 0,
"POOL_OPTIONS": {
"POOL_SIZE": 20,
"MAX_OVERFLOW": int(self.get('DB_MAX_OVERFLOW')),
'RECYCLE': 30 * 60
"RECYCLE": 1800,
"TIMEOUT": 30,
'PRE_PING': True
}
}
def get_session_timeout(self):
return datetime.timedelta(seconds=int(self.get('SESSION_TIMEOUT', 60 * 60 * 2)))
def get_language_code(self):
return self.get('LANGUAGE_CODE', 'zh-CN')

View File

@ -8,6 +8,7 @@
"""
import base64
import datetime
import json
import os
import random
import re
@ -37,6 +38,7 @@ from common.response.result import get_api_response
from common.util.common import valid_license, get_random_chars
from common.util.field_message import ErrMessage
from common.util.lock import lock
from common.util.rsa_util import decrypt, get_key_pair_by_sql
from dataset.models import DataSet, Document, Paragraph, Problem, ProblemParagraphMapping
from embedding.task import delete_embedding_by_dataset_id_list
from function_lib.models.function import FunctionLib
@ -75,7 +77,8 @@ class SystemSerializer(ApiMixin, serializers.Serializer):
xpack_cache = DBModelManage.get_model('xpack_cache')
return {'version': version, 'IS_XPACK': hasattr(settings, 'IS_XPACK'),
'XPACK_LICENSE_IS_VALID': False if xpack_cache is None else xpack_cache.get('XPACK_LICENSE_IS_VALID',
False)}
False),
'ras': get_key_pair_by_sql().get('key')}
@staticmethod
def get_response_body_api():
@ -96,35 +99,13 @@ class LoginSerializer(ApiMixin, serializers.Serializer):
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")))
captcha = serializers.CharField(required=True, error_messages=ErrMessage.char(_("captcha")))
encryptedData = serializers.CharField(required=False, label=_('encryptedData'), allow_null=True,
allow_blank=True)
def is_valid(self, *, raise_exception=False):
def get_user_token(self, user):
"""
校验参数
:param raise_exception: Whether to throw an exception can only be True
:return: User information
"""
super().is_valid(raise_exception=True)
captcha = self.data.get('captcha')
captcha_value = captcha_cache.get(f"LOGIN:{captcha.lower()}")
if captcha_value is None:
raise AppApiException(1005, _("Captcha code error or expiration"))
username = self.data.get("username")
password = password_encrypt(self.data.get("password"))
user = QuerySet(User).filter(Q(username=username,
password=password) | Q(email=username,
password=password)).first()
if user is None:
raise ExceptionCodeConstants.INCORRECT_USERNAME_AND_PASSWORD.value.to_app_api_exception()
if not user.is_active:
raise AppApiException(1005, _("The user has been disabled, please contact the administrator!"))
return user
def get_user_token(self):
"""
Get user token
:return: User Token (authentication information)
"""
user = self.is_valid()
token = signing.dumps({'username': user.username, 'id': str(user.id), 'email': user.email,
'type': AuthenticationType.USER.value})
return token
@ -136,11 +117,13 @@ class LoginSerializer(ApiMixin, serializers.Serializer):
def get_request_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'password'],
required=['username', 'encryptedData'],
properties={
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"), description=_("Password")),
'captcha': openapi.Schema(type=openapi.TYPE_STRING, title=_("captcha"), description=_("captcha"))
'captcha': openapi.Schema(type=openapi.TYPE_STRING, title=_("captcha"), description=_("captcha")),
'encryptedData': openapi.Schema(type=openapi.TYPE_STRING, title=_("encryptedData"),
description=_("encryptedData"))
}
)
@ -152,6 +135,29 @@ class LoginSerializer(ApiMixin, serializers.Serializer):
description="认证token"
))
@staticmethod
def login(instance):
username = instance.get("username", "")
encryptedData = instance.get("encryptedData", "")
if encryptedData:
json_data = json.loads(decrypt(encryptedData))
instance.update(json_data)
LoginSerializer(data=instance).is_valid(raise_exception=True)
password = instance.get("password")
captcha = instance.get("captcha", "")
captcha_value = captcha_cache.get(f"LOGIN:{captcha.lower()}")
if captcha_value is None:
raise AppApiException(1005, _("Captcha code error or expiration"))
user = QuerySet(User).filter(Q(username=username,
password=password_encrypt(password)) | Q(email=username,
password=password_encrypt(
password))).first()
if user is None:
raise ExceptionCodeConstants.INCORRECT_USERNAME_AND_PASSWORD.value.to_app_api_exception()
if not user.is_active:
raise AppApiException(1005, _("The user has been disabled, please contact the administrator!"))
return user
class RegisterSerializer(ApiMixin, serializers.Serializer):
"""

View File

@ -22,7 +22,7 @@ from common.constants.permission_constants import PermissionConstants, CompareCo
from common.log.log import log
from common.response import result
from common.util.common import encryption
from smartdoc.settings import JWT_AUTH
from smartdoc.const import CONFIG
from users.serializers.user_serializers import RegisterSerializer, LoginSerializer, CheckCodeSerializer, \
RePasswordSerializer, \
SendEmailSerializer, UserProfile, UserSerializer, UserManageSerializer, UserInstanceSerializer, SystemSerializer, \
@ -84,7 +84,7 @@ class SwitchUserLanguageView(APIView):
description=_("language")),
}
),
responses=RePasswordSerializer().get_response_body_api(),
responses=result.get_default_response(),
tags=[_("User management")])
@log(menu='User management', operate='Switch Language',
get_operation_object=lambda r, k: {'name': r.user.username})
@ -111,7 +111,7 @@ class ResetCurrentUserPasswordView(APIView):
description=_("Password"))
}
),
responses=RePasswordSerializer().get_response_body_api(),
responses=result.get_default_response(),
tags=[_("User management")])
@log(menu='User management', operate='Modify current user password',
get_operation_object=lambda r, k: {'name': r.user.username},
@ -195,11 +195,9 @@ class Login(APIView):
get_details=_get_details,
get_operation_object=lambda r, k: {'name': r.data.get('username')})
def post(self, request: Request):
login_request = LoginSerializer(data=request.data)
# 校验请求参数
user = login_request.is_valid(raise_exception=True)
token = login_request.get_user_token()
token_cache.set(token, user, timeout=JWT_AUTH['JWT_EXPIRATION_DELTA'])
user = LoginSerializer().login(request.data)
token = LoginSerializer().get_user_token(user)
token_cache.set(token, user, timeout=CONFIG.get_session_timeout())
return result.success(token)

View File

@ -5,7 +5,7 @@ RUN cd ui && \
npm install && \
npm run build && \
rm -rf ./node_modules
FROM ghcr.io/1panel-dev/maxkb-python-pg:python3.11-pg15.8 AS stage-build
FROM ghcr.io/1panel-dev/maxkb-python-pg:python3.11-pg15.14 AS stage-build
ARG DEPENDENCIES=" \
python3-pip"
@ -29,7 +29,7 @@ RUN python3 -m venv /opt/py3 && \
poetry install && \
export MAXKB_CONFIG_TYPE=ENV && python3 /opt/maxkb/app/apps/manage.py compilemessages
FROM ghcr.io/1panel-dev/maxkb-python-pg:python3.11-pg15.8
FROM ghcr.io/1panel-dev/maxkb-python-pg:python3.11-pg15.14
ARG DOCKER_IMAGE_TAG=dev \
BUILD_AT \
GITHUB_COMMIT
@ -70,6 +70,7 @@ RUN chmod 755 /opt/maxkb/app/installer/run-maxkb.sh && \
useradd --no-create-home --home /opt/maxkb/app/sandbox sandbox -g root && \
chown -R sandbox:root /opt/maxkb/app/sandbox && \
chmod g-x /usr/local/bin/* /usr/bin/* /bin/* /usr/sbin/* /sbin/* /usr/lib/postgresql/15/bin/* && \
chmod g+xr /usr/bin/ld.so && \
chmod g+x /usr/local/bin/python* && \
find /etc/ -type f ! -path '/etc/resolv.conf' ! -path '/etc/hosts' | xargs chmod g-rx

View File

@ -1,5 +1,5 @@
FROM python:3.11-slim-bullseye AS python-stage
FROM postgres:15.8-bullseye
FROM python:3.11-slim-trixie AS python-stage
FROM postgres:15.14-trixie
ARG DEPENDENCIES=" \
libexpat1-dev \

View File

@ -23,7 +23,7 @@ langchain-huggingface = "0.1.2"
langchain-ollama = "0.3.2"
langgraph = "0.3.27"
mcp = "1.8.0"
psycopg2-binary = "2.9.10"
psycopg = { extras = ["binary"], version = "3.2.9" }
jieba = "0.42.1"
diskcache = "5.6.3"
pillow = "10.4.0"
@ -39,7 +39,7 @@ html2text = "2024.2.26"
django-ipware = "6.0.5"
django-apscheduler = "0.6.2"
pymupdf = "1.24.9"
pypdf = "4.3.1"
pypdf = "6.0.0"
rapidocr-onnxruntime = "1.3.24"
python-docx = "1.1.2"
xlwt = "1.3.0"
@ -64,7 +64,7 @@ pylint = "3.3.6"
pydub = "0.25.1"
cffi = "1.17.1"
pysilk = "0.0.1"
django-db-connection-pool = "1.2.5"
django-db-connection-pool = "1.2.6"
opencv-python-headless = "4.11.0.86"
pymysql = "1.1.1"
accelerate = "1.6.0"

View File

@ -34,10 +34,12 @@
"katex": "^0.16.10",
"lodash": "^4.17.21",
"marked": "^12.0.2",
"md-editor-v3": "^4.16.7",
"md-editor-v3": "^5.8.4",
"mermaid": "^10.9.0",
"mitt": "^3.0.0",
"moment": "^2.30.1",
"nanoid": "^5.1.5",
"node-forge": "^1.3.1",
"npm": "^10.2.4",
"nprogress": "^0.2.0",
"pinia": "^2.1.6",
@ -53,8 +55,7 @@
"vue-draggable-plus": "^0.6.0",
"vue-i18n": "^9.13.1",
"vue-router": "^4.2.4",
"vue3-menus": "^1.1.2",
"vuedraggable": "^4.1.0"
"vue3-menus": "^1.1.2"
},
"devDependencies": {
"@rushstack/eslint-patch": "^1.3.2",
@ -62,6 +63,7 @@
"@types/file-saver": "^2.0.7",
"@types/jsdom": "^21.1.1",
"@types/node": "^18.17.5",
"@types/node-forge": "^1.3.14",
"@types/nprogress": "^0.2.0",
"@vitejs/plugin-vue": "^4.3.1",
"@vue/eslint-config-prettier": "^8.0.0",

View File

@ -227,7 +227,7 @@ const getApplicationHitTest: (
data: any,
loading?: Ref<boolean>
) => Promise<Result<Array<any>>> = (application_id, data, loading) => {
return get(`${prefix}/${application_id}/hit_test`, data, loading)
return put(`${prefix}/${application_id}/hit_test`, data, undefined, loading)
}
/**

View File

@ -186,7 +186,7 @@ const getDatasetHitTest: (
data: any,
loading?: Ref<boolean>
) => Promise<Result<Array<any>>> = (dataset_id, data, loading) => {
return get(`${prefix}/${dataset_id}/hit_test`, data, loading)
return put(`${prefix}/${dataset_id}/hit_test`, data, undefined, loading)
}
/**

View File

@ -41,6 +41,7 @@ interface LoginRequest {
*
*/
captcha: string
encryptedData?: string
}
interface RegisterRequest {

View File

@ -10,23 +10,20 @@ import type {
} from '@/api/type/user'
import type { Ref } from 'vue'
/**
*
* @param auth_type
* @param request
* @param loading
* @returns
*/
const login: (
auth_type: string,
request: LoginRequest,
loading?: Ref<boolean>
) => Promise<Result<string>> = (auth_type, request, loading) => {
if (auth_type !== '') {
return post(`/${auth_type}/login`, request, undefined, loading)
}
const login: (request: LoginRequest, loading?: Ref<boolean>) => Promise<Result<any>> = (
request,
loading
) => {
return post('/user/login', request, undefined, loading)
}
const ldapLogin: (request: LoginRequest, loading?: Ref<boolean>) => Promise<Result<any>> = (
request,
loading
) => {
return post('/ldap/login', request, undefined, loading)
}
/**
*
* @returns
@ -234,5 +231,6 @@ export default {
getDingOauth2Callback,
getlarkCallback,
getQrSource,
getCaptcha
getCaptcha,
ldapLogin
}

View File

@ -163,7 +163,7 @@
@TouchEnd="TouchEnd"
:time="recorderTime"
:start="recorderStatus === 'START'"
:disabled="loading"
:disabled="localLoading"
/>
<el-input
v-else
@ -182,6 +182,7 @@
@keydown.enter="sendChatHandle($event)"
@paste="handlePaste"
@drop="handleDrop"
@dragover.prevent="handleDragOver"
/>
<div class="operate flex align-center">
@ -198,7 +199,7 @@
</span>
<span class="flex align-center" v-else>
<el-button
:disabled="loading"
:disabled="localLoading"
text
@click="startRecording"
v-if="recorderStatus === 'STOP'"
@ -251,7 +252,7 @@
}}{{ getAcceptList().replace(/\./g, '').replace(/,/g, '、').toUpperCase() }}
</div>
</template>
<el-button text :disabled="checkMaxFilesLimit() || loading" class="mt-4">
<el-button text :disabled="checkMaxFilesLimit() || localLoading" class="mt-4">
<el-icon><Paperclip /></el-icon>
</el-button>
</el-tooltip>
@ -267,11 +268,11 @@
<el-button
text
class="sent-button"
:disabled="isDisabledChat || loading"
:disabled="isDisabledChat || localLoading"
@click="sendChatHandle"
>
<img v-show="isDisabledChat || loading" src="@/assets/icon_send.svg" alt="" />
<SendIcon v-show="!isDisabledChat && !loading" />
<img v-show="isDisabledChat || localLoading" src="@/assets/icon_send.svg" alt="" />
<SendIcon v-show="!isDisabledChat && !localLoading" />
</el-button>
</template>
</div>
@ -287,7 +288,7 @@
</div>
</template>
<script setup lang="ts">
import { ref, computed, onMounted, nextTick, watch } from 'vue'
import { ref, computed, onMounted, nextTick, watch, reactive } from 'vue'
import Recorder from 'recorder-core'
import TouchChat from './TouchChat.vue'
import applicationApi from '@/api/application'
@ -338,9 +339,13 @@ const chatId_context = computed({
emit('update:chatId', v)
}
})
const uploadLoading = computed(() => {
return Object.values(filePromisionDict.value).length > 0
})
const localLoading = computed({
get: () => {
return props.loading
return props.loading || uploadLoading.value
},
set: (v) => {
emit('update:loading', v)
@ -392,17 +397,7 @@ const checkMaxFilesLimit = () => {
uploadOtherList.value.length
)
}
const file_name_eq = (str: string, str1: string) => {
return (
str.replaceAll(' ', '') === str1.replaceAll(' ', '') ||
decodeHtmlEntities(str) === decodeHtmlEntities(str1)
)
}
function decodeHtmlEntities(str: string) {
const tempDiv = document.createElement('div')
tempDiv.innerHTML = str
return tempDiv.textContent || tempDiv.innerText || ''
}
const filePromisionDict: any = ref<any>({})
const uploadFile = async (file: any, fileList: any) => {
const { maxFiles, fileLimit } = props.applicationDetails.file_upload_setting
//
@ -423,10 +418,11 @@ const uploadFile = async (file: any, fileList: any) => {
fileList.splice(0, fileList.length)
return
}
filePromisionDict.value[file.uid] = false
const formData = new FormData()
formData.append('file', file.raw, file.name)
//
file = reactive(file)
const extension = file.name.split('.').pop().toUpperCase() //
if (imageExtensions.includes(extension)) {
uploadImageList.value.push(file)
@ -460,44 +456,9 @@ const uploadFile = async (file: any, fileList: any) => {
)
.then((response) => {
fileList.splice(0, fileList.length)
uploadImageList.value.forEach((file: any) => {
const f = response.data.filter((f: any) => file_name_eq(f.name, file.name))
if (f.length > 0) {
file.url = f[0].url
file.file_id = f[0].file_id
}
})
uploadDocumentList.value.forEach((file: any) => {
const f = response.data.filter((f: any) => file_name_eq(f.name, file.name))
if (f.length > 0) {
file.url = f[0].url
file.file_id = f[0].file_id
}
})
uploadAudioList.value.forEach((file: any) => {
const f = response.data.filter((f: any) => file_name_eq(f.name, file.name))
if (f.length > 0) {
file.url = f[0].url
file.file_id = f[0].file_id
}
})
uploadVideoList.value.forEach((file: any) => {
const f = response.data.filter((f: any) => file_name_eq(f.name, file.name))
if (f.length > 0) {
file.url = f[0].url
file.file_id = f[0].file_id
}
})
uploadOtherList.value.forEach((file: any) => {
const f = response.data.filter((f: any) => file_name_eq(f.name, file.name))
if (f.length > 0) {
file.url = f[0].url
file.file_id = f[0].file_id
}
})
if (!inputValue.value && uploadImageList.value.length > 0) {
inputValue.value = t('chat.uploadFile.imageMessage')
}
file.url = response.data[0].url
file.file_id = response.data[0].file_id
delete filePromisionDict.value[file.uid]
})
}
//
@ -529,6 +490,7 @@ const handlePaste = (event: ClipboardEvent) => {
//
event.preventDefault()
}
//
const handleDrop = (event: DragEvent) => {
if (!props.applicationDetails.file_upload_enable) return
@ -548,6 +510,12 @@ const handleDrop = (event: DragEvent) => {
uploadFile(elFile, [elFile])
})
}
const handleDragOver = (event: DragEvent) => {
if (event.dataTransfer) {
event.dataTransfer.dropEffect = 'copy' // Firefox
}
}
// id
const intervalId = ref<any | null>(null)
//
@ -565,7 +533,16 @@ const uploadOtherList = ref<Array<any>>([])
const showDelete = ref('')
const isDisabledChat = computed(
() => !(inputValue.value.trim() && (props.appId || props.applicationDetails?.name))
() =>
!(
(inputValue.value.trim() ||
uploadImageList.value.length > 0 ||
uploadDocumentList.value.length > 0 ||
uploadVideoList.value.length > 0 ||
uploadAudioList.value.length > 0 ||
uploadOtherList.value.length > 0) &&
(props.appId || props.applicationDetails?.name)
)
)
//
const isMicrophone = ref(false)
@ -765,11 +742,34 @@ const stopTimer = () => {
}
}
const getQuestion = () => {
if (!inputValue.value.trim()) {
const fileLenth = [
uploadImageList.value.length > 0,
uploadDocumentList.value.length > 0,
uploadAudioList.value.length > 0,
uploadOtherList.value.length > 0
]
if (fileLenth.filter((f) => f).length > 1) {
return t('chat.uploadFile.otherMessage')
} else if (fileLenth[0]) {
return t('chat.uploadFile.imageMessage')
} else if (fileLenth[1]) {
return t('chat.uploadFile.documentMessage')
} else if (fileLenth[2]) {
return t('chat.uploadFile.audioMessage')
} else if (fileLenth[3]) {
return t('chat.uploadFile.otherMessage')
}
}
return inputValue.value.trim()
}
function autoSendMessage() {
props
.validate()
.then(() => {
props.sendMessage(inputValue.value, {
props.sendMessage(getQuestion(), {
image_list: uploadImageList.value,
document_list: uploadDocumentList.value,
audio_list: uploadAudioList.value,
@ -803,8 +803,15 @@ function sendChatHandle(event?: any) {
if (!event?.ctrlKey && !event?.shiftKey && !event?.altKey && !event?.metaKey) {
//
event?.preventDefault()
if (!isDisabledChat.value && !props.loading && !event?.isComposing) {
if (inputValue.value.trim()) {
if (!isDisabledChat.value && !localLoading.value && !event?.isComposing) {
if (
inputValue.value.trim() ||
uploadImageList.value.length > 0 ||
uploadDocumentList.value.length > 0 ||
uploadAudioList.value.length > 0 ||
uploadVideoList.value.length > 0 ||
uploadOtherList.value.length > 0
) {
autoSendMessage()
}
}

View File

@ -454,14 +454,22 @@ class AudioManage {
this.statusList[index] = AudioStatus.ERROR
}
} else {
if (window.speechSynthesis.paused) {
if (window.speechSynthesis.paused && self) {
window.speechSynthesis.resume()
this.statusList[index] = AudioStatus.PLAY_INT
} else {
if (window.speechSynthesis.pending) {
//
if (window.speechSynthesis.speaking) {
window.speechSynthesis.cancel()
}
speechSynthesis.speak(audioElement)
this.statusList[index] = AudioStatus.PLAY_INT
//
setTimeout(() => {
if (speechSynthesis.speaking) {
return
}
speechSynthesis.speak(audioElement)
this.statusList[index] = AudioStatus.PLAY_INT
}, 500)
}
}
}
@ -482,11 +490,6 @@ class AudioManage {
this.statusList[index] = AudioStatus.READY
if (self) {
window.speechSynthesis.pause()
nextTick(() => {
if (!window.speechSynthesis.paused) {
window.speechSynthesis.cancel()
}
})
} else {
window.speechSynthesis.cancel()
}

View File

@ -17,6 +17,7 @@
:source="prologue"
:send-message="sendMessage"
reasoning_content=""
:type="type"
></MdRenderer>
</el-card>
</div>
@ -58,7 +59,7 @@ const prologue = computed(() => {
]
let _temp = temp
for (const index in tag_list) {
_temp = _temp.replaceAll(tag_list[index], '')
_temp = _temp.replace(new RegExp(tag_list[index], 'g'), '')
}
const quick_question_list = _temp.match(/-\s.+/g)
let result = temp

View File

@ -163,13 +163,13 @@ const initialApiFormData = ref({})
const isUserInput = computed(
() =>
props.applicationDetails.work_flow?.nodes?.filter((v: any) => v.id === 'base-node')[0]
.properties.user_input_field_list.length > 0
?.properties.user_input_field_list.length > 0
)
const isAPIInput = computed(
() =>
props.type === 'debug-ai-chat' &&
props.applicationDetails.work_flow?.nodes?.filter((v: any) => v.id === 'base-node')[0]
.properties.api_input_field_list.length > 0
?.properties.api_input_field_list.length > 0
)
const showUserInputContent = computed(() => {
return (

View File

@ -5,11 +5,13 @@
<template v-for="(item, index) in md_view_list" :key="index">
<div
v-if="item.type === 'question'"
@click="sendMessage ? sendMessage(item.content, 'new') : (content: string) => {}"
@click="
sendMessage && type !== 'log' ? sendMessage(item.content, 'new') : (content: string) => {}
"
class="problem-button mt-4 mb-4 flex"
:class="sendMessage ? 'cursor' : 'disabled'"
:class="sendMessage && type !== 'log' ? 'cursor' : 'disabled'"
>
<el-icon class="mr-8" style="margin-top: 2px;">
<el-icon class="mr-8" style="margin-top: 2px">
<EditPen />
</el-icon>
{{ item.content }}
@ -76,6 +78,7 @@ const props = withDefaults(
chat_record_id?: string
runtime_node_id?: string
disabled?: boolean
type?: 'log' | 'ai-chat' | 'debug-ai-chat'
}>(),
{
source: '',
@ -237,7 +240,7 @@ const split_form_rander_ = (source: string, type: string) => {
padding: 12px;
box-sizing: border-box;
color: var(--el-text-color-regular);
word-break: break-all;
word-break: break-word;
&:hover {
background: var(--el-color-primary-light-9);

View File

@ -29,7 +29,7 @@ const showThink = ref<boolean>(true)
}
.reasoning-md {
padding-left: 8px;
--md-color: var(--app-text-color-secondary) !important;
--md-color: var(--app-input-color-placeholder) !important;
}
}
</style>

View File

@ -72,7 +72,7 @@
<SelectProviderDialog
v-if="showFooter"
ref="selectProviderRef"
@change="(provider, modelType) => openCreateModel(provider, modelType)"
@change="(provider: any, modelType: any) => openCreateModel(provider, modelType)"
/>
</div>
</template>
@ -82,8 +82,6 @@ import type { Provider } from '@/api/type/model'
import { relatedObject } from '@/utils/utils'
import CreateModelDialog from '@/views/template/component/CreateModelDialog.vue'
import SelectProviderDialog from '@/views/template/component/SelectProviderDialog.vue'
import { t } from '@/locales'
import useStore from '@/stores'
defineOptions({ name: 'ModelSelect' })

View File

@ -63,6 +63,9 @@ export default {
limitMessage2: 'files',
sizeLimit: 'Each file must not exceed',
imageMessage: 'Please process the image content',
documentMessage: 'Please understand the content of the document',
audioMessage: 'Please understand the audio content',
otherMessage: 'Please understand the file content',
errorMessage: 'Upload Failed'
},
executionDetails: {

View File

@ -139,7 +139,7 @@ Response requirements:
hybridSearch: 'Hybrid Search',
hybridSearchTooltip:
'Hybrid search is a retrieval method based on both vector and text similarity, suitable for medium data volumes in the knowledge.',
similarityThreshold: 'Similarity higher than',
similarityThreshold: 'Similarity not lower than',
similarityTooltip: 'The higher the similarity, the stronger the correlation.',
topReferences: 'Top N Segments',
maxCharacters: 'Maximum Characters per Reference',

View File

@ -149,7 +149,7 @@ export default {
tooltip: 'When user asks a question, handle matched segments according to the set method.'
},
similarity: {
label: 'Similarity Higher Than',
label: 'Similarity not lower than',
placeholder: 'Directly return segment content',
requiredMessage: 'Please enter similarity value'
}

View File

@ -61,6 +61,9 @@ export default {
limitMessage2: '个文件',
sizeLimit: '单个文件大小不能超过',
imageMessage: '请解析图片内容',
documentMessage: '请理解文档内容',
audioMessage: '请理解音频内容',
otherMessage: '请理解文件内容',
errorMessage: '上传失败'
},
executionDetails: {

View File

@ -130,7 +130,7 @@ export default {
hybridSearch: '混合检索',
hybridSearchTooltip:
'混合检索是一种基于向量和文本相似度的检索方式,适用于知识库中的中等数据量场景。',
similarityThreshold: '相似度于',
similarityThreshold: '相似度不低于',
similarityTooltip: '相似度越高相关性越强。',
topReferences: '引用分段数 TOP',
maxCharacters: '最多引用字符数',

View File

@ -147,7 +147,7 @@ export default {
tooltip: '用户提问时,命中文档下的分段时按照设置的方式进行处理。'
},
similarity: {
label: '相似度于',
label: '相似度不低于',
placeholder: '直接返回分段内容',
requiredMessage: '请输入相似度'
}

View File

@ -61,6 +61,9 @@ export default {
limitMessage2: '個文件',
sizeLimit: '單個文件大小不能超過',
imageMessage: '請解析圖片內容',
documentMessage: '請理解檔案內容',
audioMessage: '請理解音訊內容',
otherMessage: '請理解檔案內容',
errorMessage: '上傳失敗'
},
executionDetails: {

View File

@ -129,7 +129,7 @@ export default {
hybridSearch: '混合檢索',
hybridSearchTooltip:
'混合檢索是一種基於向量和文本相似度的檢索方式,適用於知識庫中的中等數據量場景。',
similarityThreshold: '相似度於',
similarityThreshold: '相似度不低於',
similarityTooltip: '相似度越高相關性越強。',
topReferences: '引用分段數 TOP',
maxCharacters: '最多引用字元數',

View File

@ -146,7 +146,7 @@ export default {
tooltip: '用戶提問時,命中文檔下的分段時按照設置的方式進行處理。'
},
similarity: {
label: '相似度高于',
label: '相似度不低於',
placeholder: '直接返回分段内容',
requiredMessage: '请输入相似度'
}

View File

@ -8,6 +8,7 @@ import { useElementPlusTheme } from 'use-element-plus-theme'
import { defaultPlatformSetting } from '@/utils/theme'
import { useLocalStorage } from '@vueuse/core'
import { localeConfigKey, getBrowserLang } from '@/locales/index'
export interface userStateTypes {
userType: number // 1 系统操作者 2 对话用户
userInfo: User | null
@ -17,6 +18,7 @@ export interface userStateTypes {
XPACK_LICENSE_IS_VALID: false
isXPack: false
themeInfo: any
rasKey: string
}
const useUserStore = defineStore({
@ -29,7 +31,8 @@ const useUserStore = defineStore({
userAccessToken: '',
XPACK_LICENSE_IS_VALID: false,
isXPack: false,
themeInfo: null
themeInfo: null,
rasKey: ''
}),
actions: {
getLanguage() {
@ -100,6 +103,7 @@ const useUserStore = defineStore({
this.version = ok.data?.version || '-'
this.isXPack = ok.data?.IS_XPACK
this.XPACK_LICENSE_IS_VALID = ok.data?.XPACK_LICENSE_IS_VALID
this.rasKey = ok.data?.ras || ''
if (this.isEnterprise()) {
await this.theme()
@ -135,8 +139,15 @@ const useUserStore = defineStore({
})
},
async login(auth_type: string, username: string, password: string, captcha: string) {
return UserApi.login(auth_type, { username, password, captcha }).then((ok) => {
async login(data: any, loading?: Ref<boolean>) {
return UserApi.login(data).then((ok) => {
this.token = ok.data
localStorage.setItem('token', ok.data)
return this.profile()
})
},
async asyncLdapLogin(data: any, loading?: Ref<boolean>) {
return UserApi.ldapLogin(data).then((ok) => {
this.token = ok.data
localStorage.setItem('token', ok.data)
return this.profile()

View File

@ -62,7 +62,7 @@
}
.el-form-item__label {
font-weight: 400;
width: 100%;
width: 100% !important;
}
.el-form-item__error {

View File

@ -1,5 +1,5 @@
import { MsgError } from '@/utils/message'
import { nanoid } from 'nanoid'
export function toThousands(num: any) {
return num?.toString().replace(/\d+/, function (n: any) {
return n.replace(/(\d)(?=(?:\d{3})+$)/g, '$1,')
@ -25,7 +25,7 @@ export function filesize(size: number) {
id
*/
export const randomId = function () {
return Math.floor(Math.random() * 10000) + ''
return nanoid()
}
/*
@ -48,7 +48,9 @@ const typeList: any = {
export function getImgUrl(name: string) {
const list = Object.values(typeList).flat()
const type = list.includes(fileType(name).toLowerCase()) ? fileType(name).toLowerCase() : 'unknown'
const type = list.includes(fileType(name).toLowerCase())
? fileType(name).toLowerCase()
: 'unknown'
return new URL(`../assets/fileType/${type}-icon.svg`, import.meta.url).href
}
// 是否是白名单后缀

View File

@ -3,7 +3,7 @@
<div class="header border-b flex-between p-12-24">
<div class="flex align-center">
<back-button @click="back"></back-button>
<h4>{{ detail?.name }}</h4>
<h4 class="ellipsis" style="max-width: 270px" :title="detail?.name">{{ detail?.name }}</h4>
<div v-if="showHistory && disablePublic">
<el-text type="info" class="ml-16 color-secondary"
>{{ $t('views.applicationWorkflow.info.previewVersion') }}
@ -101,7 +101,7 @@
/>
</div>
<h4>
<h4 class="ellipsis" style="max-width: 270px" :title="detail?.name">
{{ detail?.name || $t('views.application.applicationForm.form.appName.label') }}
</h4>
</div>
@ -279,7 +279,6 @@ async function publicHandle() {
return
}
applicationApi.putPublishApplication(id as String, obj, loading).then(() => {
application.asyncGetApplicationDetail(id, loading).then((res: any) => {
detail.value.name = res.data.name
MsgSuccess(t('views.applicationWorkflow.tip.publicSuccess'))

View File

@ -28,7 +28,9 @@
/>
</div>
<h4>{{ applicationDetail?.name }}</h4>
<h4 class="ellipsis-1" style="width: 50%" :title="applicationDetail?.name">
{{ applicationDetail?.name }}
</h4>
</div>
</div>
<div>
@ -263,7 +265,7 @@ function getChatRecord() {
currentChatId.value,
paginationConfig,
loading,
false
true
)
.then((res: any) => {
paginationConfig.total = res.data.total

View File

@ -27,7 +27,9 @@
/>
</div>
<h4>{{ applicationDetail?.name }}</h4>
<h4 class="ellipsis-1" style="width: 66%" :title="applicationDetail?.name">
{{ applicationDetail?.name }}
</h4>
</div>
</div>
<div>
@ -259,7 +261,7 @@ function getChatRecord() {
currentChatId.value,
paginationConfig,
loading,
false
true
)
.then((res: any) => {
paginationConfig.total = res.data.total

View File

@ -27,7 +27,9 @@
:size="32"
/>
</div>
<h4>{{ applicationDetail?.name }}</h4>
<h4 class="ellipsis-1" style="width: 66%" :title="applicationDetail?.name">
{{ applicationDetail?.name }}
</h4>
</div>
</div>
<div>
@ -313,7 +315,7 @@ function getChatRecord() {
currentChatId.value,
paginationConfig.value,
loading,
false
true
)
.then((res: any) => {
paginationConfig.value.total = res.data.total

View File

@ -256,9 +256,7 @@ async function submit() {
} else {
if (detail.value.type === '2') {
datasetApi.putLarkDataset(id, obj, loading).then((res) => {
datasetApi.putReEmbeddingDataset(id).then(() => {
MsgSuccess(t('common.saveSuccess'))
})
MsgSuccess(t('common.saveSuccess'))
})
} else {
datasetApi.putDataset(id, obj, loading).then((res) => {

View File

@ -8,15 +8,16 @@
</h4>
</template>
<div class="hit-test__main p-16" v-loading="loading">
<div class="question-title" :style="{ visibility: questionTitle ? 'visible' : 'hidden' }">
<div class="avatar">
<AppAvatar>
<img src="@/assets/user-icon.svg" style="width: 54%" alt="" />
</AppAvatar>
</div>
<div class="content">
<h4 class="text break-all">{{ questionTitle }}</h4>
</div>
<div
class="question-title flex align-center"
:style="{ visibility: questionTitle ? 'visible' : 'hidden' }"
>
<AppAvatar>
<img src="@/assets/user-icon.svg" style="width: 54%" alt="" />
</AppAvatar>
<h4 class="break-all ellipsis-1 ml-8" style="width: 66%" :title="questionTitle">
{{ questionTitle }}
</h4>
</div>
<el-scrollbar>
<div class="hit-test-height">
@ -349,20 +350,6 @@ onMounted(() => {})
</script>
<style lang="scss" scoped>
.hit-test {
.question-title {
.avatar {
float: left;
}
.content {
padding-left: 40px;
.text {
padding: 6px 0;
height: 34px;
box-sizing: border-box;
}
}
}
&__operate {
.operate-textarea {
box-shadow: 0px 6px 24px 0px rgba(31, 35, 41, 0.08);

View File

@ -45,7 +45,13 @@
>
</el-input>
<img :src="identifyCode" alt="" height="38" class="ml-8 cursor border border-r-4" @click="makeCode" />
<img
:src="identifyCode"
alt=""
height="38"
class="ml-8 cursor border border-r-4"
@click="makeCode"
/>
</div>
</el-form-item>
</div>
@ -124,27 +130,32 @@ import useStore from '@/stores'
import authApi from '@/api/auth-setting'
import useApi from '@/api/user'
import { MsgConfirm, MsgError, MsgSuccess } from '@/utils/message'
import { t, getBrowserLang } from '@/locales'
import QrCodeTab from '@/views/login/components/QrCodeTab.vue'
import { useI18n } from 'vue-i18n'
import * as dd from 'dingtalk-jsapi'
import { loadScript } from '@/utils/utils'
const { locale } = useI18n({ useScope: 'global' })
const loading = ref<boolean>(false)
const { user } = useStore()
const router = useRouter()
import forge from 'node-forge'
const loginForm = ref<LoginRequest>({
username: '',
password: '',
captcha: ''
captcha: '',
encryptedData: ''
})
const identifyCode = ref<string>('')
function makeCode() {
useApi.getCaptcha().then((res: any) => {
identifyCode.value = res.data
})
}
const rules = ref<FormRules<LoginRequest>>({
username: [
{
@ -259,20 +270,35 @@ function changeMode(val: string) {
}
const login = () => {
loginFormRef.value?.validate().then(() => {
loading.value = true
user
.login(
loginMode.value,
loginForm.value.username,
loginForm.value.password,
loginForm.value.captcha
)
.then(() => {
locale.value = localStorage.getItem('MaxKB-locale') || getBrowserLang() || 'en-US'
router.push({ name: 'home' })
})
.finally(() => (loading.value = false))
if (!loginFormRef.value) {
return
}
loginFormRef.value?.validate((valid) => {
if (valid) {
loading.value = true
if (loginMode.value === 'LDAP') {
user
.asyncLdapLogin(loginForm.value)
.then(() => {
locale.value = localStorage.getItem('MaxKB-locale') || getBrowserLang() || 'en-US'
router.push({ name: 'home' })
})
.catch(() => {
loading.value = false
})
} else {
const publicKey = forge.pki.publicKeyFromPem(user.rasKey)
const encrypted = publicKey.encrypt(JSON.stringify(loginForm.value), 'RSAES-PKCS1-V1_5')
const encryptedBase64 = forge.util.encode64(encrypted)
user
.login({ encryptedData: encryptedBase64, username: loginForm.value.username })
.then(() => {
locale.value = localStorage.getItem('MaxKB-locale') || getBrowserLang() || 'en-US'
router.push({ name: 'home' })
})
.finally(() => (loading.value = false))
}
}
})
}

View File

@ -44,7 +44,12 @@
clearable
>
<template #prepend>
<el-select v-model="searchType" placeholder="Select" style="width: 80px">
<el-select
v-model="searchType"
placeholder="Select"
style="width: 80px"
@change="searchTypeChange"
>
<el-option :label="$t('common.title')" value="title" />
<el-option :label="$t('common.content')" value="content" />
</el-select>
@ -133,9 +138,7 @@
<el-dropdown-menu>
<el-dropdown-item @click="openGenerateDialog(item)">
<el-icon><Connection /></el-icon>
{{
$t('views.document.generateQuestion.title')
}}</el-dropdown-item
{{ $t('views.document.generateQuestion.title') }}</el-dropdown-item
>
<el-dropdown-item @click="openSelectDocumentDialog(item)">
<AppIcon iconName="app-migrate"></AppIcon>
@ -207,6 +210,10 @@ const title = ref('')
const search = ref('')
const searchType = ref('title')
const searchTypeChange = () => {
search.value = ''
}
//
const isBatch = ref(false)
const multipleSelection = ref<any[]>([])
@ -313,7 +320,7 @@ function addParagraph() {
ParagraphDialogRef.value.open()
}
function editParagraph(row: any) {
title.value = t('views.paragraph.paragraphDetail')
title.value = t('views.paragraph.paragraphDetail')
ParagraphDialogRef.value.open(row)
}

View File

@ -301,7 +301,8 @@ function clickNodes(item: any) {
type: 'app-edge',
sourceNodeId: props.nodeModel.id,
sourceAnchorId: anchorData.value?.id,
targetNodeId: nodeModel.id
targetNodeId: nodeModel.id,
targetAnchorId: nodeModel.id + '_left'
})
closeNodeMenu()

View File

@ -13,7 +13,7 @@
:data="props.nodeModel.properties.api_input_field_list"
class="mb-16"
ref="tableRef"
row-key="field"
row-key="variable"
>
<el-table-column prop="variable" :label="$t('dynamicsForm.paramForm.field.label')">
<template #default="{ row }">

View File

@ -12,7 +12,6 @@
ref="el"
v-bind:modelValue="form_data.branch"
:disabled="form_data.branch === 2"
:filter="'.no-drag'"
handle=".handle"
:animation="150"
ghostClass="ghost"

View File

@ -264,7 +264,7 @@ function onDragHandle() {
onEnd: (evt) => {
if (evt.oldIndex === undefined || evt.newIndex === undefined) return
//
const items = [...form_data.value.form_field_list]
const items = cloneDeep([...form_data.value.form_field_list])
const [movedItem] = items.splice(evt.oldIndex, 1)
items.splice(evt.newIndex, 0, movedItem)
form_data.value.form_field_list = items

View File

@ -91,6 +91,7 @@ import { ref, computed, onMounted } from 'vue'
import { isLastNode } from '@/workflow/common/data'
import applicationApi from '@/api/application'
import { app } from '@/main'
import {t} from "@/locales";
const props = defineProps<{ nodeModel: any }>()
const nodeCascaderRef = ref()
@ -119,8 +120,16 @@ const chat_data = computed({
const FunctionNodeFormRef = ref<FormInstance>()
const validate = () => {
for (const item of chat_data.value.input_field_list) {
if (item.source === 'reference' && item.is_required && item.value[0] !== 'global') {
if (props.nodeModel.graphModel.nodes.filter((node: any) => node.id === item.value[0]).length === 0 ) {
item.value = []
return Promise.reject({node: props.nodeModel, errMessage: item.name + t('dynamicsForm.tip.requiredMessage')})
}
}
}
return FunctionNodeFormRef.value?.validate().catch((err) => {
return Promise.reject({ node: props.nodeModel, errMessage: err })
return Promise.reject({node: props.nodeModel, errMessage: err})
})
}

View File

@ -83,7 +83,6 @@
$t('views.application.applicationForm.form.prompt.tooltip')
}}</template>
<AppIcon iconName="app-warning" class="app-warning-icon"></AppIcon>
<el-icon><EditPen /></el-icon>
</el-tooltip>
</div>
</template>