V4.9.4 feature (#4470)

* Training status (#4424)

* dataset data training state (#4311)

* dataset data training state

* fix

* fix ts

* fix

* fix api format

* fix

* fix

* perf: count training

* format

* fix: dataset training state (#4417)

* fix

* add test

* fix

* fix

* fix test

* fix test

* perf: training count

* count

* loading status

---------

Co-authored-by: heheer <heheer@sealos.io>

* doc

* website sync feature (#4429)

* perf: introduce BullMQ for website sync (#4403)

* perf: introduce BullMQ for website sync

* feat: new redis module

* fix: remove graceful shutdown

* perf: improve UI in dataset detail

- Updated the "change" icon SVG file.
- Modified i18n strings.
- Added new i18n string "immediate_sync".
- Improved UI in dataset detail page, including button icons and
background colors.

* refactor: Add chunkSettings to DatasetSchema

* perf: website sync ux

* env template

* fix: clean up website dataset when updating chunk settings (#4420)

* perf: check setting updated

* perf: worker currency

* feat: init script for website sync refactor (#4425)

* website feature doc

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>

* pro migration (#4388) (#4433)

* pro migration

* reuse customPdfParseType

Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>

* perf: remove loading ui

* feat: config chat file expired time

* Redis cache (#4436)

* perf: add Redis cache for vector counting (#4432)

* feat: cache

* perf: get cache key

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>

* perf: mobile voice input (#4437)

* update:Mobile voice interaction (#4362)

* Add files via upload

* Add files via upload

* Update ollama.md

* Update ollama.md

* Add files via upload

* Update useSpeech.ts

* Update ChatInput.tsx

* Update useSpeech.ts

* Update ChatInput.tsx

* Update useSpeech.ts

* Update constants.ts

* Add files via upload

* Update ChatInput.tsx

* Update useSpeech.ts

* Update useSpeech.ts

* Update useSpeech.ts

* Update ChatInput.tsx

* Add files via upload

* Update common.json

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update useSpeech.ts

* Update useSpeech.ts

* Update common.json

* Update common.json

* Update common.json

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update useSpeech.ts

* Update common.json

* Update chat.json

* Update common.json

* Update chat.json

* Update common.json

* Update chat.json

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update useSpeech.ts

* Update VoiceInput.tsx

* speech ui

* 优化语音输入组件,调整输入框显示逻辑,修复语音输入遮罩层样式,更新画布背景透明度,增强用户交互体验。 (#4435)

* perf: mobil voice input

---------

Co-authored-by: dreamer6680 <1468683855@qq.com>

* Test completion v2 (#4438)

* add v2 completions (#4364)

* add v2 completions

* completion config

* config version

* fix

* frontend

* doc

* fix

* fix: completions v2 api

---------

Co-authored-by: heheer <heheer@sealos.io>

* package

* Test mongo log (#4443)

* feat: mongodb-log (#4426)

* perf: mongo log

* feat: completions stop reasoner

* mongo db log

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>

* update doc

* Update doc

* fix external var ui (#4444)

* action

* fix: ts (#4458)

* preview doc action

add docs preview permission

update preview action

udpate action

* update doc (#4460)

* update preview action

* update doc

* remove

* update

* schema

* update mq export;perf: redis cache  (#4465)

* perf: redis cache

* update mq export

* perf: website sync error tip

* add error worker

* website sync ui (#4466)

* Updated the dynamic display of the voice input pop-up (#4469)

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* fix: voice input

---------

Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
Co-authored-by: dreamer6680 <1468683855@qq.com>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
This commit is contained in:
Archer 2025-04-08 12:05:04 +08:00 committed by GitHub
parent 5839325f77
commit f642c9603b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
151 changed files with 5434 additions and 1354 deletions

View File

@ -10,6 +10,12 @@ on:
jobs:
# This workflow contains jobs "deploy-production"
deploy-preview:
permissions:
contents: read
packages: write
attestations: write
id-token: write
pull-requests: write
# The environment this job references
environment:
name: Preview
@ -32,6 +38,7 @@ jobs:
repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: recursive # Fetch submodules
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
token: ${{ secrets.GITHUB_TOKEN }}
# Step 2 Detect changes to Docs Content
- name: Detect changes in doc content
@ -58,36 +65,38 @@ jobs:
- name: Build
run: cd docSite && hugo mod get -u github.com/colinwilson/lotusdocs@6d0568e && hugo -v --minify
# Step 5 - Push our generated site to Vercel
- name: Deploy to Vercel
uses: amondnet/vercel-action@v25
id: vercel-action
- name: Test
run: ls ./docSite/public
# Step 5 - Push our generated site to Cloudflare
- name: Deploy to Cloudflare Pages
id: deploy
uses: cloudflare/wrangler-action@v3
with:
vercel-token: ${{ secrets.VERCEL_TOKEN }} # Required
vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} #Required
vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }} #Required
github-comment: false
vercel-args: '--local-config ../vercel.json' # Optional
working-directory: docSite/public
alias-domains: | #Optional
fastgpt-staging.vercel.app
docsOutput:
permissions:
pull-requests: write
needs: [deploy-preview]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy ./docSite/public --project-name=fastgpt-doc
packageManager: npm
- name: Create deployment status comment
if: always()
env:
JOB_STATUS: ${{ job.status }}
PREVIEW_URL: ${{ steps.deploy.outputs.deployment-url }}
uses: actions/github-script@v6
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- uses: actions/github-script@v7
with:
github-token: ${{secrets.GITHUB_TOKEN}}
token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '[👀 Visit preview document](${{ needs.deploy-preview.outputs.url }})'
})
const success = process.env.JOB_STATUS === 'success';
const deploymentUrl = `${process.env.PREVIEW_URL}`;
const status = success ? '✅ Success' : '❌ Failed';
console.log(process.env.JOB_STATUS);
const commentBody = `**Deployment Status: ${status}**
${success ? `🔗 Preview URL: ${deploymentUrl}` : ''}`;
await github.rest.issues.createComment({
...context.repo,
issue_number: context.payload.pull_request.number,
body: commentBody
});

View File

@ -18,7 +18,9 @@ jobs:
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

View File

@ -110,6 +110,18 @@ services:
# 等待docker-entrypoint.sh脚本执行的MongoDB服务进程
wait $$!
redis:
image: redis:7.2-alpine
container_name: redis
# ports:
# - 6379:6379
networks:
- fastgpt
restart: always
command: |
redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction
volumes:
- ./redis/data:/data
# fastgpt
sandbox:
@ -157,6 +169,8 @@ services:
# zilliz 连接参数
- MILVUS_ADDRESS=http://milvusStandalone:19530
- MILVUS_TOKEN=none
# Redis 地址
- REDIS_URL=redis://default:mypassword@redis:6379
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
# 日志等级: debug, info, warn, error

View File

@ -69,6 +69,19 @@ services:
# 等待docker-entrypoint.sh脚本执行的MongoDB服务进程
wait $$!
redis:
image: redis:7.2-alpine
container_name: redis
# ports:
# - 6379:6379
networks:
- fastgpt
restart: always
command: |
redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction
volumes:
- ./redis/data:/data
# fastgpt
sandbox:
container_name: sandbox
@ -114,6 +127,8 @@ services:
- MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin
# pg 连接参数
- PG_URL=postgresql://username:password@pg:5432/postgres
# Redis 连接参数
- REDIS_URL=redis://default:mypassword@redis:6379
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
# 日志等级: debug, info, warn, error

View File

@ -51,6 +51,19 @@ services:
# 等待docker-entrypoint.sh脚本执行的MongoDB服务进程
wait $$!
redis:
image: redis:7.2-alpine
container_name: redis
# ports:
# - 6379:6379
networks:
- fastgpt
restart: always
command: |
redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction
volumes:
- ./redis/data:/data
sandbox:
container_name: sandbox
image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git
@ -92,6 +105,8 @@ services:
- FILE_TOKEN_KEY=filetoken
# MongoDB 连接参数. 用户名myusername,密码mypassword。
- MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin
# Redis 连接参数
- REDIS_URI=redis://default:mypassword@redis:6379
# zilliz 连接参数
- MILVUS_ADDRESS=zilliz_cloud_address
- MILVUS_TOKEN=zilliz_cloud_token

Binary file not shown.

After

Width:  |  Height:  |  Size: 284 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View File

@ -18,12 +18,14 @@ weight: 852
{{% alert icon="🤖 " context="success" %}}
* 该接口的 API Key 需使用`应用特定的 key`,否则会报错。
<!-- * 对话现在有`v1`和`v2`两个接口可以按需使用v2 自 4.9.4 版本新增v1 接口同时不再维护 -->
* 有些包调用时,`BaseUrl`需要添加`v1`路径有些不需要如果出现404情况可补充`v1`重试。
{{% /alert %}}
## 请求简易应用和工作流
对话接口兼容`GPT`的接口!如果你的项目使用的是标准的`GPT`官方接口,可以直接通过修改`BaseUrl`和 `Authorization`来访问 FastGpt 应用,不过需要注意下面几个规则:
`v1`对话接口兼容`GPT`的接口!如果你的项目使用的是标准的`GPT`官方接口,可以直接通过修改`BaseUrl`和 `Authorization`来访问 FastGpt 应用,不过需要注意下面几个规则:
{{% alert icon="🤖 " context="success" %}}
* 传入的`model``temperature`等参数字段均无效,这些字段由编排决定,不会根据 API 参数改变。
@ -32,6 +34,100 @@ weight: 852
### 请求
<!-- #### v2
v1,v2 接口请求参数一致,仅请求地址不一样。
{{< tabs tabTotal="3" >}}
{{< tab tabName="基础请求示例" >}}
{{< markdownify >}}
```bash
curl --location --request POST 'http://localhost:3000/api/v2/chat/completions' \
--header 'Authorization: fastgpt-xxxxxx' \
--header 'Content-Type: application/json' \
--data-raw '{
"chatId": "my_chatId",
"stream": false,
"detail": false,
"responseChatItemId": "my_responseChatItemId",
"variables": {
"uid": "asdfadsfasfd2323",
"name": "张三"
},
"messages": [
{
"role": "user",
"content": "你是谁"
}
]
}'
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="图片/文件请求示例" >}}
{{< markdownify >}}
* 仅`messages`有部分区别,其他参数一致。
* 目前不支持上传文件,需上传到自己的对象存储中,获取对应的文件链接。
```bash
curl --location --request POST 'http://localhost:3000/api/v2/chat/completions' \
--header 'Authorization: Bearer fastgpt-xxxxxx' \
--header 'Content-Type: application/json' \
--data-raw '{
"chatId": "abcd",
"stream": false,
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": "导演是谁"
},
{
"type": "image_url",
"image_url": {
"url": "图片链接"
}
},
{
"type": "file_url",
"name": "文件名",
"url": "文档链接,支持 txt md html word pdf ppt csv excel"
}
]
}
]
}'
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="参数说明" >}}
{{< markdownify >}}
{{% alert context="info" %}}
- headers.Authorization: Bearer {{apikey}}
- chatId: string | undefined 。
- 为 `undefined` 时(不传入),不使用 FastGpt 提供的上下文功能,完全通过传入的 messages 构建上下文。
- 为`非空字符串`时,意味着使用 chatId 进行对话,自动从 FastGpt 数据库取历史记录,并使用 messages 数组最后一个内容作为用户问题,其余 message 会被忽略。请自行确保 chatId 唯一长度小于250通常可以是自己系统的对话框ID。
- messages: 结构与 [GPT接口](https://platform.openai.com/docs/api-reference/chat/object) chat模式一致。
- responseChatItemId: string | undefined 。如果传入,则会将该值作为本次对话的响应消息的 IDFastGPT 会自动将该 ID 存入数据库。请确保,在当前`chatId`下,`responseChatItemId`是唯一的。
- detail: 是否返回中间值(模块状态,响应的完整结果等),`stream模式`下会通过`event`进行区分,`非stream模式`结果保存在`responseData`中。
- variables: 模块变量,一个对象,会替换模块中,输入框内容里的`{{key}}`
{{% /alert %}}
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}
#### v1
{{< tabs tabTotal="3" >}}
{{< tab tabName="基础请求示例" >}}
{{< markdownify >}}
@ -65,7 +161,7 @@ curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \
{{< markdownify >}}
* 仅`messages`有部分区别,其他参数一致。
* 目前不支持上文件,需上传到自己的对象存储中,获取对应的文件链接。
* 目前不支持上文件,需上传到自己的对象存储中,获取对应的文件链接。
```bash
curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \
@ -116,14 +212,188 @@ curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \
- variables: 模块变量,一个对象,会替换模块中,输入框内容里的`{{key}}`
{{% /alert %}}
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}
### 响应
#### v2
v2 接口比起 v1主要变变化在于会在每个节点运行结束后及时返回 response而不是等工作流结束后再统一返回。
{{< tabs tabTotal="5" >}}
{{< tab tabName="detail=false,stream=false 响应" >}}
{{< markdownify >}}
```json
{
"id": "",
"model": "",
"usage": {
"prompt_tokens": 1,
"completion_tokens": 1,
"total_tokens": 1
},
"choices": [
{
"message": {
"role": "assistant",
"content": "我是一个人工智能助手,旨在回答问题和提供信息。如果你有任何问题或者需要帮助,随时问我!"
},
"finish_reason": "stop",
"index": 0
}
]
}
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=false,stream=true 响应" >}}
{{< markdownify >}}
```bash
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"你好"},"index":0,"finish_reason":null}]}
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":""},"index":0,"finish_reason":null}]}
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"今天"},"index":0,"finish_reason":null}]}
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"过得怎么样?"},"index":0,"finish_reason":null}]}
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":null},"index":0,"finish_reason":"stop"}]}
data: [DONE]
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=true,stream=false 响应" >}}
{{< markdownify >}}
```json
{
"responseData": [
{
"id": "iSol79OFrBH1I9kC",
"nodeId": "448745",
"moduleName": "common:core.module.template.work_start",
"moduleType": "workflowStart",
"runningTime": 0
},
{
"id": "t1T94WCy6Su3BK4V",
"nodeId": "fjLpE3XPegmoGtbU",
"moduleName": "AI 对话",
"moduleType": "chatNode",
"runningTime": 1.46,
"totalPoints": 0,
"model": "GPT-4o-mini",
"tokens": 64,
"inputTokens": 10,
"outputTokens": 54,
"query": "你是谁",
"reasoningText": "",
"historyPreview": [
{
"obj": "Human",
"value": "你是谁"
},
{
"obj": "AI",
"value": "我是一个人工智能助手,旨在帮助回答问题和提供信息。如果你有任何问题或需要帮助,请告诉我!"
}
],
"contextTotalLen": 2
}
],
"newVariables": {
},
"id": "",
"model": "",
"usage": {
"prompt_tokens": 1,
"completion_tokens": 1,
"total_tokens": 1
},
"choices": [
{
"message": {
"role": "assistant",
"content": "我是一个人工智能助手,旨在帮助回答问题和提供信息。如果你有任何问题或需要帮助,请告诉我!"
},
"finish_reason": "stop",
"index": 0
}
]
}
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=true,stream=true 响应" >}}
{{< markdownify >}}
```bash
event: flowNodeResponse
data: {"id":"iYv2uA9rCWAtulWo","nodeId":"workflowStartNodeId","moduleName":"流程开始","moduleType":"workflowStart","runningTime":0}
event: flowNodeStatus
data: {"status":"running","name":"AI 对话"}
event: answer
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"你好"},"index":0,"finish_reason":null}]}
event: answer
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":""},"index":0,"finish_reason":null}]}
event: answer
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"今天"},"index":0,"finish_reason":null}]}
event: answer
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":"过得怎么样?"},"index":0,"finish_reason":null}]}
event: flowNodeResponse
data: {"id":"pVzLBF7M3Ol4n7s6","nodeId":"ixe20AHN3jy74pKf","moduleName":"AI 对话","moduleType":"chatNode","runningTime":1.48,"totalPoints":0.0042,"model":"Qwen-plus","tokens":28,"inputTokens":8,"outputTokens":20,"query":"你好","reasoningText":"","historyPreview":[{"obj":"Human","value":"你好"},{"obj":"AI","value":"你好!今天过得怎么样?"}],"contextTotalLen":2}
event: answer
data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{"role":"assistant","content":null},"index":0,"finish_reason":"stop"}]}
event: answer
data: [DONE]
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="event值" >}}
{{< markdownify >}}
event取值
- answer: 返回给客户端的文本(最终会算作回答)
- fastAnswer: 指定回复返回给客户端的文本(最终会算作回答)
- toolCall: 执行工具
- toolParams: 工具参数
- toolResponse: 工具返回
- flowNodeStatus: 运行到的节点状态
- flowNodeResponse: 单个节点详细响应
- updateVariables: 更新变量
- error: 报错
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}
#### v1 -->
{{< tabs tabTotal="5" >}}
{{< tab tabName="detail=false,stream=false 响应" >}}
{{< markdownify >}}
@ -475,6 +745,8 @@ curl --location --request POST 'https://api.fastgpt.in/api/v1/chat/completions'
### 请求示例
#### v1
```bash
curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \
--header 'Authorization: Bearer test-xxxxx' \
@ -488,8 +760,25 @@ curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \
}'
```
#### v2
```bash
curl --location --request POST 'http://localhost:3000/api/v2/chat/completions' \
--header 'Authorization: Bearer test-xxxxx' \
--header 'Content-Type: application/json' \
--data-raw '{
"stream": false,
"chatId": "test",
"variables": {
"query":"你好"
}
}'
```
### 响应示例
#### v1
{{< tabs tabTotal="3" >}}
{{< tab tabName="detail=true,stream=false 响应" >}}
@ -649,6 +938,149 @@ event取值
{{< /tabs >}}
#### v2
{{< tabs tabTotal="3" >}}
{{< tab tabName="detail=true,stream=false 响应" >}}
{{< markdownify >}}
* 插件的输出可以通过查找`responseData`中, `moduleType=pluginOutput`的元素,其`pluginOutput`是插件的输出。
* 流输出,仍可以通过`choices`进行获取。
```json
{
"responseData": [
{
"id": "bsH1ZdbYkz9iJwYa",
"nodeId": "pluginInput",
"moduleName": "workflow:template.plugin_start",
"moduleType": "pluginInput",
"runningTime": 0
},
{
"id": "zDgfqSPhbYZFHVIn",
"nodeId": "h4Gr4lJtFVQ6qI4c",
"moduleName": "AI 对话",
"moduleType": "chatNode",
"runningTime": 1.44,
"totalPoints": 0,
"model": "GPT-4o-mini",
"tokens": 34,
"inputTokens": 8,
"outputTokens": 26,
"query": "你好",
"reasoningText": "",
"historyPreview": [
{
"obj": "Human",
"value": "你好"
},
{
"obj": "AI",
"value": "你好!有什么我可以帮助你的吗?"
}
],
"contextTotalLen": 2
},
{
"id": "uLLwKKRZvufXzgF4",
"nodeId": "pluginOutput",
"moduleName": "common:core.module.template.self_output",
"moduleType": "pluginOutput",
"runningTime": 0,
"totalPoints": 0,
"pluginOutput": {
"result": "你好!有什么我可以帮助你的吗?"
}
}
],
"newVariables": {
},
"id": "test",
"model": "",
"usage": {
"prompt_tokens": 1,
"completion_tokens": 1,
"total_tokens": 1
},
"choices": [
{
"message": {
"role": "assistant",
"content": "你好!有什么我可以帮助你的吗?"
},
"finish_reason": "stop",
"index": 0
}
]
}
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=true,stream=true 响应" >}}
{{< markdownify >}}
* 插件的输出可以通过获取`event=flowResponses`中的字符串,并将其反序列化后得到一个数组。同样的,查找 `moduleType=pluginOutput`的元素,其`pluginOutput`是插件的输出。
* 流输出,仍和对话接口一样获取。
```bash
data: {"event":"flowNodeResponse","data":"{\"id\":\"q8ablUOqHGgqLIRM\",\"nodeId\":\"pluginInput\",\"moduleName\":\"workflow:template.plugin_start\",\"moduleType\":\"pluginInput\",\"runningTime\":0}"}
data: {"event":"flowNodeStatus","data":"{\"status\":\"running\",\"name\":\"AI 对话\"}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"你好\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"有什么\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"我\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"可以\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"帮助\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"你\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"的吗\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"\"},\"index\":0,\"finish_reason\":null}]}"}
data: {"event":"flowNodeResponse","data":"{\"id\":\"rqlXLUap8QeiN7Kf\",\"nodeId\":\"h4Gr4lJtFVQ6qI4c\",\"moduleName\":\"AI 对话\",\"moduleType\":\"chatNode\",\"runningTime\":1.79,\"totalPoints\":0,\"model\":\"GPT-4o-mini\",\"tokens\":137,\"inputTokens\":111,\"outputTokens\":26,\"query\":\"你好\",\"reasoningText\":\"\",\"historyPreview\":[{\"obj\":\"Human\",\"value\":\"[{\\\"renderTypeList\\\":[\\\"reference\\\"],\\\"selectedTypeInde\\n\\n...[hide 174 chars]...\\n\\ncanSelectImg\\\":true,\\\"required\\\":false,\\\"value\\\":\\\"你好\\\"}]\"},{\"obj\":\"AI\",\"value\":\"你好!有什么我可以帮助你的吗?\"},{\"obj\":\"Human\",\"value\":\"你好\"},{\"obj\":\"AI\",\"value\":\"你好!有什么我可以帮助你的吗?\"}],\"contextTotalLen\":4}"}
data: {"event":"flowNodeResponse","data":"{\"id\":\"lHCpHI0MrM00HQlX\",\"nodeId\":\"pluginOutput\",\"moduleName\":\"common:core.module.template.self_output\",\"moduleType\":\"pluginOutput\",\"runningTime\":0,\"totalPoints\":0,\"pluginOutput\":{\"result\":\"你好!有什么我可以帮助你的吗?\"}}"}
data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":null},\"index\":0,\"finish_reason\":\"stop\"}]}"}
data: {"event":"answer","data":"[DONE]"}
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="输出获取" >}}
{{< markdownify >}}
event取值
- answer: 返回给客户端的文本(最终会算作回答)
- fastAnswer: 指定回复返回给客户端的文本(最终会算作回答)
- toolCall: 执行工具
- toolParams: 工具参数
- toolResponse: 工具返回
- flowNodeStatus: 运行到的节点状态
- flowNodeResponse: 单个节点详细响应
- updateVariables: 更新变量
- error: 报错
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}
# 对话 CRUD

View File

@ -7,10 +7,53 @@ toc: true
weight: 796
---
## 升级指南
### 1. 做好数据备份
### 1. 安装 Redis
* docker 部署的用户,参考最新的 `docker-compose.yml` 文件增加 Redis 配置。增加一个 redis 容器,并配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。
* Sealos 部署的用户,在数据库里新建一个`redis`数据库,并复制`内网地址的 connection` 作为 `redis` 的链接串。然后配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。
| | | |
| --- | --- | --- |
| ![](/imgs/sealos-redis1.png) | ![](/imgs/sealos-redis2.png) | ![](/imgs/sealos-redis3.png) |
### 2. 更新镜像 tag
### 3. 执行升级脚本
该脚本仅需商业版用户执行。
从任意终端,发起 1 个 HTTP 请求。其中 {{rootkey}} 替换成环境变量里的 `rootkey`{{host}} 替换成**FastGPT 域名**。
```bash
curl --location --request POST 'https://{{host}}/api/admin/initv494' \
--header 'rootkey: {{rootkey}}' \
--header 'Content-Type: application/json'
```
**脚本功能**
1. 更新站点同步定时器
## 🚀 新增内容
1. SMTP 发送邮件插件
1. 集合数据训练状态展示
2. SMTP 发送邮件插件
3. BullMQ 消息队列。
4. 利用 redis 进行部分数据缓存。
5. 站点同步支持配置训练参数。
6. AI 对话/工具调用,增加返回模型 finish_reason 字段。
## ⚙️ 优化
1. Admin 模板渲染调整。
2. 支持环境变量配置对话文件过期时间。
3. MongoDB log 库可独立部署。
## 🐛 修复
1. 搜索应用/知识库时,无法点击目录进入下一层。

View File

@ -7,7 +7,7 @@ toc: true
weight: -10
---
FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,提供开箱即用的数据处理、模型调用等能力。同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的问答场景!
FastGPT 是一个AI Agent 构建平台,提供开箱即用的数据处理、模型调用等能力,同时可以通过 Flow 可视化进行工作流编排从而实现复杂的应用场景t
{{% alert icon="🤖 " context="success" %}}
FastGPT 在线使用:[https://tryfastgpt.ai](https://tryfastgpt.ai)

40
env.d.ts vendored Normal file
View File

@ -0,0 +1,40 @@
declare global {
namespace NodeJS {
interface ProcessEnv {
LOG_DEPTH: string;
DEFAULT_ROOT_PSW: string;
DB_MAX_LINK: string;
TOKEN_KEY: string;
FILE_TOKEN_KEY: string;
ROOT_KEY: string;
OPENAI_BASE_URL: string;
CHAT_API_KEY: string;
AIPROXY_API_ENDPOINT: string;
AIPROXY_API_TOKEN: string;
MULTIPLE_DATA_TO_BASE64: string;
MONGODB_URI: string;
MONGODB_LOG_URI?: string;
PG_URL: string;
OCEANBASE_URL: string;
MILVUS_ADDRESS: string;
MILVUS_TOKEN: string;
SANDBOX_URL: string;
PRO_URL: string;
FE_DOMAIN: string;
FILE_DOMAIN: string;
NEXT_PUBLIC_BASE_URL: string;
LOG_LEVEL: string;
STORE_LOG_LEVEL: string;
USE_IP_LIMIT: string;
WORKFLOW_MAX_RUN_TIMES: string;
WORKFLOW_MAX_LOOP_TIMES: string;
CHECK_INTERNAL_IP: string;
CHAT_LOG_URL: string;
CHAT_LOG_INTERVAL: string;
CHAT_LOG_SOURCE_ID_PREFIX: string;
ALLOWED_ORIGINS: string;
}
}
}
export {};

View File

@ -118,11 +118,12 @@ export type SystemEnvType = {
oneapiUrl?: string;
chatApiKey?: string;
customPdfParse?: {
url?: string;
key?: string;
doc2xKey?: string;
price?: number; // n points/1 page
};
customPdfParse?: customPdfParseType;
};
export type customPdfParseType = {
url?: string;
key?: string;
doc2xKey?: string;
price?: number;
};

View File

@ -1,3 +1,5 @@
import { i18nT } from '../../../web/i18n/utils';
export enum ChatCompletionRequestMessageRoleEnum {
'System' = 'system',
'User' = 'user',
@ -28,3 +30,13 @@ export enum EmbeddingTypeEnm {
query = 'query',
db = 'db'
}
export const completionFinishReasonMap = {
close: i18nT('chat:completion_finish_close'),
stop: i18nT('chat:completion_finish_stop'),
length: i18nT('chat:completion_finish_length'),
tool_calls: i18nT('chat:completion_finish_tool_calls'),
content_filter: i18nT('chat:completion_finish_content_filter'),
function_call: i18nT('chat:completion_finish_function_call'),
null: i18nT('chat:completion_finish_null')
};

View File

@ -73,6 +73,15 @@ export type ChatCompletionMessageFunctionCall =
export type StreamChatType = Stream<openai.Chat.Completions.ChatCompletionChunk>;
export type UnStreamChatType = openai.Chat.Completions.ChatCompletion;
export type CompletionFinishReason =
| 'close'
| 'stop'
| 'length'
| 'tool_calls'
| 'content_filter'
| 'function_call'
| null;
export default openai;
export * from 'openai';

View File

@ -15,7 +15,6 @@ export type DatasetUpdateBody = {
name?: string;
avatar?: string;
intro?: string;
status?: DatasetSchemaType['status'];
agentModel?: string;
vlmModel?: string;
@ -26,6 +25,7 @@ export type DatasetUpdateBody = {
apiServer?: DatasetSchemaType['apiServer'];
yuqueServer?: DatasetSchemaType['yuqueServer'];
feishuServer?: DatasetSchemaType['feishuServer'];
chunkSettings?: DatasetSchemaType['chunkSettings'];
// sync schedule
autoSync?: boolean;
@ -141,7 +141,6 @@ export type PushDatasetDataChunkProps = {
export type PostWebsiteSyncParams = {
datasetId: string;
billId: string;
};
export type PushDatasetDataProps = {

View File

@ -50,7 +50,9 @@ export const DatasetTypeMap = {
export enum DatasetStatusEnum {
active = 'active',
syncing = 'syncing'
syncing = 'syncing',
waiting = 'waiting',
error = 'error'
}
export const DatasetStatusMap = {
[DatasetStatusEnum.active]: {
@ -58,6 +60,12 @@ export const DatasetStatusMap = {
},
[DatasetStatusEnum.syncing]: {
label: i18nT('common:core.dataset.status.syncing')
},
[DatasetStatusEnum.waiting]: {
label: i18nT('common:core.dataset.status.waiting')
},
[DatasetStatusEnum.error]: {
label: i18nT('dataset:status_error')
}
};

View File

@ -17,6 +17,20 @@ import { SourceMemberType } from 'support/user/type';
import { DatasetDataIndexTypeEnum } from './data/constants';
import { ChunkSettingModeEnum } from './constants';
export type ChunkSettingsType = {
trainingType: DatasetCollectionDataProcessModeEnum;
autoIndexes?: boolean;
imageIndex?: boolean;
chunkSettingMode?: ChunkSettingModeEnum;
chunkSplitMode?: DataChunkSplitModeEnum;
chunkSize?: number;
indexSize?: number;
chunkSplitter?: string;
qaPrompt?: string;
};
export type DatasetSchemaType = {
_id: string;
parentId?: string;
@ -29,7 +43,6 @@ export type DatasetSchemaType = {
name: string;
intro: string;
type: `${DatasetTypeEnum}`;
status: `${DatasetStatusEnum}`;
vectorModel: string;
agentModel: string;
@ -39,14 +52,16 @@ export type DatasetSchemaType = {
url: string;
selector: string;
};
chunkSettings?: ChunkSettingsType;
inheritPermission: boolean;
apiServer?: APIFileServer;
feishuServer?: FeishuServer;
yuqueServer?: YuqueServer;
autoSync?: boolean;
// abandon
autoSync?: boolean;
externalReadUrl?: string;
defaultPermission?: number;
};
@ -163,6 +178,7 @@ export type DatasetTrainingSchemaType = {
weight: number;
indexes: Omit<DatasetDataIndexItemType, 'dataId'>[];
retryCount: number;
errorMsg?: string;
};
export type CollectionWithDatasetType = DatasetCollectionSchemaType & {
@ -192,6 +208,8 @@ export type DatasetListItemType = {
};
export type DatasetItemType = Omit<DatasetSchemaType, 'vectorModel' | 'agentModel' | 'vlmModel'> & {
status: `${DatasetStatusEnum}`;
errorMsg?: string;
vectorModel: EmbeddingModelItemType;
agentModel: LLMModelItemType;
vlmModel?: LLMModelItemType;
@ -216,6 +234,7 @@ export type DatasetCollectionItemType = CollectionWithDatasetType & {
file?: DatasetFileSchema;
permission: DatasetPermission;
indexAmount: number;
errorCount?: number;
};
/* ================= data ===================== */

View File

@ -5,6 +5,7 @@ export enum SseResponseEventEnum {
answer = 'answer', // animation stream
fastAnswer = 'fastAnswer', // direct answer text, not animation
flowNodeStatus = 'flowNodeStatus', // update node status
flowNodeResponse = 'flowNodeResponse', // node response
toolCall = 'toolCall', // tool start
toolParams = 'toolParams', // tool params return

View File

@ -22,6 +22,7 @@ import { UserSelectOptionType } from '../template/system/userSelect/type';
import { WorkflowResponseType } from '../../../../service/core/workflow/dispatch/type';
import { AiChatQuoteRoleType } from '../template/system/aiChat/type';
import { LafAccountType, OpenaiAccountType } from '../../../support/user/team/type';
import { CompletionFinishReason } from '../../ai/type';
export type ExternalProviderType = {
openaiAccount?: OpenaiAccountType;
@ -59,6 +60,7 @@ export type ChatDispatchProps = {
isToolCall?: boolean;
workflowStreamResponse?: WorkflowResponseType;
workflowDispatchDeep?: number;
version?: 'v1' | 'v2';
};
export type ModuleDispatchProps<T> = ChatDispatchProps & {
@ -129,6 +131,7 @@ export type DispatchNodeResponseType = {
obj: `${ChatRoleEnum}`;
value: string;
}[]; // completion context array. history will slice
finishReason?: CompletionFinishReason;
// dataset search
similarity?: number;

View File

@ -10,7 +10,7 @@
"js-yaml": "^4.1.0",
"jschardet": "3.1.1",
"nanoid": "^5.1.3",
"next": "14.2.25",
"next": "14.2.26",
"openai": "4.61.0",
"openapi-types": "^12.1.3",
"json5": "^2.2.3",

View File

@ -1,5 +1,4 @@
import { connectionMongo, getMongoModel } from '../../mongo';
const { Schema } = connectionMongo;
import { getMongoModel, Schema } from '../../mongo';
import { RawTextBufferSchemaType } from './type';
export const collectionName = 'buffer_rawtexts';

View File

@ -1,5 +1,4 @@
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
const { Schema, model, models } = connectionMongo;
import { Schema, getMongoModel } from '../../../common/mongo';
import { TTSBufferSchemaType } from './type.d';
export const collectionName = 'buffer_tts';

View File

@ -0,0 +1,79 @@
import { ConnectionOptions, Processor, Queue, QueueOptions, Worker, WorkerOptions } from 'bullmq';
import { addLog } from '../system/log';
import { newQueueRedisConnection, newWorkerRedisConnection } from '../redis';
const defaultWorkerOpts: Omit<ConnectionOptions, 'connection'> = {
removeOnComplete: {
count: 0 // Delete jobs immediately on completion
},
removeOnFail: {
count: 0 // Delete jobs immediately on failure
}
};
export enum QueueNames {
websiteSync = 'websiteSync'
}
export const queues = (() => {
if (!global.queues) {
global.queues = new Map<QueueNames, Queue>();
}
return global.queues;
})();
export const workers = (() => {
if (!global.workers) {
global.workers = new Map<QueueNames, Worker>();
}
return global.workers;
})();
export function getQueue<DataType, ReturnType = void>(
name: QueueNames,
opts?: Omit<QueueOptions, 'connection'>
): Queue<DataType, ReturnType> {
// check if global.queues has the queue
const queue = queues.get(name);
if (queue) {
return queue as Queue<DataType, ReturnType>;
}
const newQueue = new Queue<DataType, ReturnType>(name.toString(), {
connection: newQueueRedisConnection(),
...opts
});
// default error handler, to avoid unhandled exceptions
newQueue.on('error', (error) => {
addLog.error(`MQ Queue [${name}]: ${error.message}`, error);
});
queues.set(name, newQueue);
return newQueue;
}
export function getWorker<DataType, ReturnType = void>(
name: QueueNames,
processor: Processor<DataType, ReturnType>,
opts?: Omit<WorkerOptions, 'connection'>
): Worker<DataType, ReturnType> {
const worker = workers.get(name);
if (worker) {
return worker as Worker<DataType, ReturnType>;
}
const newWorker = new Worker<DataType, ReturnType>(name.toString(), processor, {
connection: newWorkerRedisConnection(),
...defaultWorkerOpts,
...opts
});
// default error handler, to avoid unhandled exceptions
newWorker.on('error', (error) => {
addLog.error(`MQ Worker [${name}]: ${error.message}`, error);
});
newWorker.on('failed', (jobId, error) => {
addLog.error(`MQ Worker [${name}]: ${error.message}`, error);
});
workers.set(name, newWorker);
return newWorker;
}
export * from 'bullmq';

View File

@ -0,0 +1,7 @@
import { Queue, Worker } from 'bullmq';
import { QueueNames } from './index';
declare global {
var queues: Map<QueueNames, Queue> | undefined;
var workers: Map<QueueNames, Worker> | undefined;
}

View File

@ -1,5 +1,4 @@
import { connectionMongo, getMongoModel, type Model } from '../../mongo';
const { Schema } = connectionMongo;
import { Schema, getMongoModel } from '../../mongo';
const DatasetFileSchema = new Schema({});
const ChatFileSchema = new Schema({});

View File

@ -1,7 +1,6 @@
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { connectionMongo, getMongoModel } from '../../mongo';
import { Schema, getMongoModel } from '../../mongo';
import { MongoImageSchemaType } from '@fastgpt/global/common/file/image/type.d';
const { Schema } = connectionMongo;
const ImageSchema = new Schema({
teamId: {

View File

@ -1,17 +1,26 @@
import { addLog } from '../../common/system/log';
import mongoose, { Model } from 'mongoose';
import mongoose, { Model, Mongoose } from 'mongoose';
export default mongoose;
export * from 'mongoose';
export const MONGO_URL = process.env.MONGODB_URI as string;
export const MONGO_LOG_URL = (process.env.MONGODB_LOG_URI ?? process.env.MONGODB_URI) as string;
export const connectionMongo = (() => {
if (!global.mongodb) {
global.mongodb = mongoose;
global.mongodb = new Mongoose();
}
return global.mongodb;
})();
export const connectionLogMongo = (() => {
if (!global.mongodbLog) {
global.mongodbLog = new Mongoose();
}
return global.mongodbLog;
})();
const addCommonMiddleware = (schema: mongoose.Schema) => {
const operations = [
/^find/,
@ -71,6 +80,19 @@ export const getMongoModel = <T>(name: string, schema: mongoose.Schema) => {
return model;
};
export const getMongoLogModel = <T>(name: string, schema: mongoose.Schema) => {
if (connectionLogMongo.models[name]) return connectionLogMongo.models[name] as Model<T>;
console.log('Load model======', name);
addCommonMiddleware(schema);
const model = connectionLogMongo.model<T>(name, schema);
// Sync index
syncMongoIndex(model);
return model;
};
const syncMongoIndex = async (model: Model<any>) => {
if (process.env.SYNC_INDEX !== '0' && process.env.NODE_ENV !== 'test') {
try {

View File

@ -1,6 +1,5 @@
import { delay } from '@fastgpt/global/common/system/utils';
import { addLog } from '../system/log';
import { connectionMongo } from './index';
import type { Mongoose } from 'mongoose';
const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
@ -8,41 +7,41 @@ const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
/**
* connect MongoDB and init data
*/
export async function connectMongo(): Promise<Mongoose> {
export async function connectMongo(db: Mongoose, url: string): Promise<Mongoose> {
/* Connecting, connected will return */
if (connectionMongo.connection.readyState !== 0) {
return connectionMongo;
if (db.connection.readyState !== 0) {
return db;
}
console.log('mongo start connect');
console.log('MongoDB start connect');
try {
// Remove existing listeners to prevent duplicates
connectionMongo.connection.removeAllListeners('error');
connectionMongo.connection.removeAllListeners('disconnected');
connectionMongo.set('strictQuery', 'throw');
db.connection.removeAllListeners('error');
db.connection.removeAllListeners('disconnected');
db.set('strictQuery', 'throw');
connectionMongo.connection.on('error', async (error) => {
db.connection.on('error', async (error) => {
console.log('mongo error', error);
try {
if (connectionMongo.connection.readyState !== 0) {
await connectionMongo.disconnect();
if (db.connection.readyState !== 0) {
await db.disconnect();
await delay(1000);
await connectMongo();
await connectMongo(db, url);
}
} catch (error) {}
});
connectionMongo.connection.on('disconnected', async () => {
db.connection.on('disconnected', async () => {
console.log('mongo disconnected');
try {
if (connectionMongo.connection.readyState !== 0) {
await connectionMongo.disconnect();
if (db.connection.readyState !== 0) {
await db.disconnect();
await delay(1000);
await connectMongo();
await connectMongo(db, url);
}
} catch (error) {}
});
await connectionMongo.connect(process.env.MONGODB_URI as string, {
const options = {
bufferCommands: true,
maxConnecting: maxConnecting,
maxPoolSize: maxConnecting,
@ -53,18 +52,18 @@ export async function connectMongo(): Promise<Mongoose> {
maxIdleTimeMS: 300000,
retryWrites: true,
retryReads: true
};
// readPreference: 'secondaryPreferred',
// readConcern: { level: 'local' },
// writeConcern: { w: 'majority', j: true }
});
db.connect(url, options);
console.log('mongo connected');
return connectionMongo;
return db;
} catch (error) {
addLog.error('mongo connect error', error);
await connectionMongo.disconnect();
addLog.error('Mongo connect error', error);
await db.disconnect();
await delay(1000);
return connectMongo();
return connectMongo(db, url);
}
}

View File

@ -3,4 +3,5 @@ import type { Logger } from 'winston';
declare global {
var mongodb: Mongoose | undefined;
var mongodbLog: Mongoose | undefined;
}

View File

@ -0,0 +1,38 @@
import { getGlobalRedisCacheConnection } from './index';
import { addLog } from '../system/log';
import { retryFn } from '@fastgpt/global/common/system/utils';
export enum CacheKeyEnum {
team_vector_count = 'team_vector_count'
}
export const setRedisCache = async (
key: string,
data: string | Buffer | number,
expireSeconds?: number
) => {
return await retryFn(async () => {
try {
const redis = getGlobalRedisCacheConnection();
if (expireSeconds) {
await redis.set(key, data, 'EX', expireSeconds);
} else {
await redis.set(key, data);
}
} catch (error) {
addLog.error('Set cache error:', error);
return Promise.reject(error);
}
});
};
export const getRedisCache = async (key: string) => {
const redis = getGlobalRedisCacheConnection();
return await retryFn(() => redis.get(key));
};
export const delRedisCache = async (key: string) => {
const redis = getGlobalRedisCacheConnection();
await retryFn(() => redis.del(key));
};

View File

@ -0,0 +1,43 @@
import { addLog } from '../system/log';
import Redis from 'ioredis';
const REDIS_URL = process.env.REDIS_URL ?? 'redis://localhost:6379';
export const newQueueRedisConnection = () => {
const redis = new Redis(REDIS_URL);
redis.on('connect', () => {
console.log('Redis connected');
});
redis.on('error', (error) => {
console.error('Redis connection error', error);
});
return redis;
};
export const newWorkerRedisConnection = () => {
const redis = new Redis(REDIS_URL, {
maxRetriesPerRequest: null
});
redis.on('connect', () => {
console.log('Redis connected');
});
redis.on('error', (error) => {
console.error('Redis connection error', error);
});
return redis;
};
export const getGlobalRedisCacheConnection = () => {
if (global.redisCache) return global.redisCache;
global.redisCache = new Redis(REDIS_URL, { keyPrefix: 'fastgpt:cache:' });
global.redisCache.on('connect', () => {
addLog.info('Redis connected');
});
global.redisCache.on('error', (error) => {
addLog.error('Redis connection error', error);
});
return global.redisCache;
};

View File

@ -0,0 +1,5 @@
import Redis from 'ioredis';
declare global {
var redisCache: Redis | null;
}

View File

@ -1,4 +1,4 @@
import { getMongoModel, Schema } from '../../../common/mongo';
import { getMongoLogModel as getMongoModel, Schema } from '../../../common/mongo';
import { SystemLogType } from './type';
import { LogLevelEnum } from './constant';

View File

@ -1,5 +1,5 @@
export enum TimerIdEnum {
checkInValidDatasetFiles = 'checkInValidDatasetFiles',
checkExpiredFiles = 'checkExpiredFiles',
checkInvalidDatasetData = 'checkInvalidDatasetData',
checkInvalidVector = 'checkInvalidVector',
clearExpiredSubPlan = 'clearExpiredSubPlan',

View File

@ -2,10 +2,12 @@
import { PgVectorCtrl } from './pg/class';
import { ObVectorCtrl } from './oceanbase/class';
import { getVectorsByText } from '../../core/ai/embedding';
import { InsertVectorProps } from './controller.d';
import { DelDatasetVectorCtrlProps, InsertVectorProps } from './controller.d';
import { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
import { MILVUS_ADDRESS, PG_ADDRESS, OCEANBASE_ADDRESS } from './constants';
import { MilvusCtrl } from './milvus/class';
import { setRedisCache, getRedisCache, delRedisCache, CacheKeyEnum } from '../redis/cache';
import { throttle } from 'lodash';
const getVectorObj = () => {
if (PG_ADDRESS) return new PgVectorCtrl();
@ -15,13 +17,33 @@ const getVectorObj = () => {
return new PgVectorCtrl();
};
const getChcheKey = (teamId: string) => `${CacheKeyEnum.team_vector_count}:${teamId}`;
const onDelCache = throttle((teamId: string) => delRedisCache(getChcheKey(teamId)), 30000, {
leading: true,
trailing: true
});
const Vector = getVectorObj();
export const initVectorStore = Vector.init;
export const deleteDatasetDataVector = Vector.delete;
export const recallFromVectorStore = Vector.embRecall;
export const getVectorDataByTime = Vector.getVectorDataByTime;
export const getVectorCountByTeamId = Vector.getVectorCountByTeamId;
export const getVectorCountByTeamId = async (teamId: string) => {
const key = getChcheKey(teamId);
const countStr = await getRedisCache(key);
if (countStr) {
return Number(countStr);
}
const count = await Vector.getVectorCountByTeamId(teamId);
await setRedisCache(key, count, 30 * 60);
return count;
};
export const getVectorCountByDatasetId = Vector.getVectorCountByDatasetId;
export const getVectorCountByCollectionId = Vector.getVectorCountByCollectionId;
@ -43,8 +65,16 @@ export const insertDatasetDataVector = async ({
vector: vectors[0]
});
onDelCache(props.teamId);
return {
tokens,
insertId
};
};
export const deleteDatasetDataVector = async (props: DelDatasetVectorCtrlProps) => {
const result = await Vector.delete(props);
onDelCache(props.teamId);
return result;
};

View File

@ -2,6 +2,7 @@ import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import {
ChatCompletionCreateParamsNonStreaming,
ChatCompletionCreateParamsStreaming,
CompletionFinishReason,
StreamChatType
} from '@fastgpt/global/core/ai/type';
import { getLLMModel } from './model';
@ -142,26 +143,40 @@ export const parseReasoningStreamContent = () => {
content?: string;
reasoning_content?: string;
};
finish_reason?: CompletionFinishReason;
}[];
},
parseThinkTag = false
): [string, string] => {
): {
reasoningContent: string;
content: string;
finishReason: CompletionFinishReason;
} => {
const content = part.choices?.[0]?.delta?.content || '';
const finishReason = part.choices?.[0]?.finish_reason || null;
// @ts-ignore
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
if (reasoningContent || !parseThinkTag) {
isInThinkTag = false;
return [reasoningContent, content];
return { reasoningContent, content, finishReason };
}
if (!content) {
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
}
// 如果不在 think 标签中,或者有 reasoningContent(接口已解析),则返回 reasoningContent 和 content
if (isInThinkTag === false) {
return ['', content];
return {
reasoningContent: '',
content,
finishReason
};
}
// 检测是否为 think 标签开头的数据
@ -170,17 +185,29 @@ export const parseReasoningStreamContent = () => {
startTagBuffer += content;
// 太少内容时候,暂时不解析
if (startTagBuffer.length < startTag.length) {
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
}
if (startTagBuffer.startsWith(startTag)) {
isInThinkTag = true;
return [startTagBuffer.slice(startTag.length), ''];
return {
reasoningContent: startTagBuffer.slice(startTag.length),
content: '',
finishReason
};
}
// 如果未命中 think 标签,则认为不在 think 标签中,返回 buffer 内容作为 content
isInThinkTag = false;
return ['', startTagBuffer];
return {
reasoningContent: '',
content: startTagBuffer,
finishReason
};
}
// 确认是 think 标签内容,开始返回 think 内容,并实时检测 </think>
@ -201,19 +228,35 @@ export const parseReasoningStreamContent = () => {
if (endTagBuffer.includes(endTag)) {
isInThinkTag = false;
const answer = endTagBuffer.slice(endTag.length);
return ['', answer];
return {
reasoningContent: '',
content: answer,
finishReason
};
} else if (endTagBuffer.length >= endTag.length) {
// 缓存内容超出尾标签长度,且仍未命中 </think>,则认为本次猜测 </think> 失败,仍处于 think 阶段。
const tmp = endTagBuffer;
endTagBuffer = '';
return [tmp, ''];
return {
reasoningContent: tmp,
content: '',
finishReason
};
}
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
} else if (content.includes(endTag)) {
// 返回内容,完整命中</think>,直接结束
isInThinkTag = false;
const [think, answer] = content.split(endTag);
return [think, answer];
return {
reasoningContent: think,
content: answer,
finishReason
};
} else {
// 无 buffer且未命中 </think>,开始疑似 </think> 检测。
for (let i = 1; i < endTag.length; i++) {
@ -222,13 +265,21 @@ export const parseReasoningStreamContent = () => {
if (content.endsWith(partialEndTag)) {
const think = content.slice(0, -partialEndTag.length);
endTagBuffer += partialEndTag;
return [think, ''];
return {
reasoningContent: think,
content: '',
finishReason
};
}
}
}
// 完全未命中尾标签,还是 think 阶段。
return [content, ''];
return {
reasoningContent: content,
content: '',
finishReason
};
};
const getStartTagBuffer = () => startTagBuffer;

View File

@ -1,6 +1,7 @@
import {
DatasetCollectionTypeEnum,
DatasetCollectionDataProcessModeEnum
DatasetCollectionDataProcessModeEnum,
DatasetTypeEnum
} from '@fastgpt/global/core/dataset/constants';
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { MongoDatasetCollection } from './schema';
@ -104,7 +105,8 @@ export const createCollectionAndInsertData = async ({
hashRawText: hashStr(rawText),
rawTextLength: rawText.length,
nextSyncTime: (() => {
if (!dataset.autoSync) return undefined;
// ignore auto collections sync for website datasets
if (!dataset.autoSync && dataset.type === DatasetTypeEnum.websiteDataset) return undefined;
if (
[DatasetCollectionTypeEnum.link, DatasetCollectionTypeEnum.apiFile].includes(
createCollectionParams.type

View File

@ -1,13 +1,8 @@
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema, model, models } = connectionMongo;
const { Schema } = connectionMongo;
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type.d';
import {
DatasetCollectionTypeMap,
DatasetCollectionDataProcessModeEnum,
ChunkSettingModeEnum,
DataChunkSplitModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { DatasetCollectionName } from '../schema';
import { DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constants';
import { ChunkSettings, DatasetCollectionName } from '../schema';
import {
TeamCollectionName,
TeamMemberCollectionName
@ -90,25 +85,7 @@ const DatasetCollectionSchema = new Schema({
customPdfParse: Boolean,
// Chunk settings
imageIndex: Boolean,
autoIndexes: Boolean,
trainingType: {
type: String,
enum: Object.values(DatasetCollectionDataProcessModeEnum)
},
chunkSettingMode: {
type: String,
enum: Object.values(ChunkSettingModeEnum)
},
chunkSplitMode: {
type: String,
enum: Object.values(DataChunkSplitModeEnum)
},
chunkSize: Number,
chunkSplitter: String,
indexSize: Number,
qaPrompt: String
...ChunkSettings
});
DatasetCollectionSchema.virtual('dataset', {

View File

@ -9,6 +9,8 @@ import { deleteDatasetDataVector } from '../../common/vectorStore/controller';
import { MongoDatasetDataText } from './data/dataTextSchema';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { retryFn } from '@fastgpt/global/common/system/utils';
import { removeWebsiteSyncJobScheduler } from './websiteSync';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
/* ============= dataset ========== */
/* find all datasetId by top datasetId */

View File

@ -1,7 +1,8 @@
import { getMongoModel, Schema } from '../../common/mongo';
import {
DatasetStatusEnum,
DatasetStatusMap,
ChunkSettingModeEnum,
DataChunkSplitModeEnum,
DatasetCollectionDataProcessModeEnum,
DatasetTypeEnum,
DatasetTypeMap
} from '@fastgpt/global/core/dataset/constants';
@ -13,6 +14,28 @@ import type { DatasetSchemaType } from '@fastgpt/global/core/dataset/type.d';
export const DatasetCollectionName = 'datasets';
export const ChunkSettings = {
imageIndex: Boolean,
autoIndexes: Boolean,
trainingType: {
type: String,
enum: Object.values(DatasetCollectionDataProcessModeEnum)
},
chunkSettingMode: {
type: String,
enum: Object.values(ChunkSettingModeEnum)
},
chunkSplitMode: {
type: String,
enum: Object.values(DataChunkSplitModeEnum)
},
chunkSize: Number,
chunkSplitter: String,
indexSize: Number,
qaPrompt: String
};
const DatasetSchema = new Schema({
parentId: {
type: Schema.Types.ObjectId,
@ -40,11 +63,6 @@ const DatasetSchema = new Schema({
required: true,
default: DatasetTypeEnum.dataset
},
status: {
type: String,
enum: Object.keys(DatasetStatusMap),
default: DatasetStatusEnum.active
},
avatar: {
type: String,
default: '/icon/logo.svg'
@ -84,6 +102,9 @@ const DatasetSchema = new Schema({
}
}
},
chunkSettings: {
type: ChunkSettings
},
inheritPermission: {
type: Boolean,
default: true
@ -98,9 +119,8 @@ const DatasetSchema = new Schema({
type: Object
},
autoSync: Boolean,
// abandoned
autoSync: Boolean,
externalReadUrl: {
type: String
},

View File

@ -98,7 +98,9 @@ const TrainingDataSchema = new Schema({
}
],
default: []
}
},
errorMsg: String
});
TrainingDataSchema.virtual('dataset', {

View File

@ -0,0 +1,101 @@
import { Processor } from 'bullmq';
import { getQueue, getWorker, QueueNames } from '../../../common/bullmq';
import { DatasetStatusEnum } from '@fastgpt/global/core/dataset/constants';
export type WebsiteSyncJobData = {
datasetId: string;
};
export const websiteSyncQueue = getQueue<WebsiteSyncJobData>(QueueNames.websiteSync, {
defaultJobOptions: {
attempts: 3, // retry 3 times
backoff: {
type: 'exponential',
delay: 1000 // delay 1 second between retries
}
}
});
export const getWebsiteSyncWorker = (processor: Processor<WebsiteSyncJobData>) => {
return getWorker<WebsiteSyncJobData>(QueueNames.websiteSync, processor, {
removeOnFail: {
age: 15 * 24 * 60 * 60, // Keep up to 15 days
count: 1000 // Keep up to 1000 jobs
},
concurrency: 1 // Set worker to process only 1 job at a time
});
};
export const addWebsiteSyncJob = (data: WebsiteSyncJobData) => {
const datasetId = String(data.datasetId);
// deduplication: make sure only 1 job
return websiteSyncQueue.add(datasetId, data, { deduplication: { id: datasetId } });
};
export const getWebsiteSyncDatasetStatus = async (datasetId: string) => {
const jobId = await websiteSyncQueue.getDeduplicationJobId(datasetId);
if (!jobId) {
return {
status: DatasetStatusEnum.active,
errorMsg: undefined
};
}
const job = await websiteSyncQueue.getJob(jobId);
if (!job) {
return {
status: DatasetStatusEnum.active,
errorMsg: undefined
};
}
const jobState = await job.getState();
if (jobState === 'failed' || jobState === 'unknown') {
return {
status: DatasetStatusEnum.error,
errorMsg: job.failedReason
};
}
if (['waiting-children', 'waiting'].includes(jobState)) {
return {
status: DatasetStatusEnum.waiting,
errorMsg: undefined
};
}
if (jobState === 'active') {
return {
status: DatasetStatusEnum.syncing,
errorMsg: undefined
};
}
return {
status: DatasetStatusEnum.active,
errorMsg: undefined
};
};
// Scheduler setting
const repeatDuration = 24 * 60 * 60 * 1000; // every day
export const upsertWebsiteSyncJobScheduler = (data: WebsiteSyncJobData, startDate?: number) => {
const datasetId = String(data.datasetId);
return websiteSyncQueue.upsertJobScheduler(
datasetId,
{
every: repeatDuration,
startDate: startDate || new Date().getTime() + repeatDuration // First run tomorrow
},
{
name: datasetId,
data
}
);
};
export const getWebsiteSyncJobScheduler = (datasetId: string) => {
return websiteSyncQueue.getJobScheduler(String(datasetId));
};
export const removeWebsiteSyncJobScheduler = (datasetId: string) => {
return websiteSyncQueue.removeJobScheduler(String(datasetId));
};

View File

@ -176,7 +176,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
toolNodeOutputTokens,
completeMessages = [], // The actual message sent to AI(just save text)
assistantResponses = [], // FastGPT system store assistant.value response
runTimes
runTimes,
finish_reason
} = await (async () => {
const adaptMessages = chats2GPTMessages({
messages,
@ -276,7 +277,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
useVision
),
toolDetail: childToolResponse,
mergeSignId: nodeId
mergeSignId: nodeId,
finishReason: finish_reason
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
// 工具调用本身的积分消耗

View File

@ -1,6 +1,10 @@
import { createChatCompletion } from '../../../../ai/config';
import { filterGPTMessageByMaxContext, loadRequestMessages } from '../../../../chat/utils';
import { StreamChatType, ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import {
StreamChatType,
ChatCompletionMessageParam,
CompletionFinishReason
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import { responseWriteController } from '../../../../../common/response';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
@ -252,9 +256,9 @@ export const runToolWithPromptCall = async (
}
});
const { answer, reasoning } = await (async () => {
const { answer, reasoning, finish_reason } = await (async () => {
if (res && isStreamResponse) {
const { answer, reasoning } = await streamResponse({
const { answer, reasoning, finish_reason } = await streamResponse({
res,
toolNodes,
stream: aiResponse,
@ -262,8 +266,9 @@ export const runToolWithPromptCall = async (
aiChatReasoning
});
return { answer, reasoning };
return { answer, reasoning, finish_reason };
} else {
const finish_reason = aiResponse.choices?.[0]?.finish_reason as CompletionFinishReason;
const content = aiResponse.choices?.[0]?.message?.content || '';
const reasoningContent: string = aiResponse.choices?.[0]?.message?.reasoning_content || '';
@ -271,14 +276,16 @@ export const runToolWithPromptCall = async (
if (reasoningContent || !aiChatReasoning) {
return {
answer: content,
reasoning: reasoningContent
reasoning: reasoningContent,
finish_reason
};
}
const [think, answer] = parseReasoningContent(content);
return {
answer,
reasoning: think
reasoning: think,
finish_reason
};
}
})();
@ -525,7 +532,8 @@ ANSWER: `;
toolNodeInputTokens,
toolNodeOutputTokens,
assistantResponses: toolNodeAssistants,
runTimes
runTimes,
finish_reason
}
);
};
@ -550,15 +558,18 @@ async function streamResponse({
let startResponseWrite = false;
let answer = '';
let reasoning = '';
let finish_reason: CompletionFinishReason = null;
const { parsePart, getStartTagBuffer } = parseReasoningStreamContent();
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finish_reason = 'close';
break;
}
const [reasoningContent, content] = parsePart(part, aiChatReasoning);
const { reasoningContent, content, finishReason } = parsePart(part, aiChatReasoning);
finish_reason = finish_reason || finishReason;
answer += content;
reasoning += reasoningContent;
@ -618,7 +629,7 @@ async function streamResponse({
}
}
return { answer, reasoning };
return { answer, reasoning, finish_reason };
}
const parseAnswer = (

View File

@ -7,7 +7,8 @@ import {
ChatCompletionToolMessageParam,
ChatCompletionMessageParam,
ChatCompletionTool,
ChatCompletionAssistantMessageParam
ChatCompletionAssistantMessageParam,
CompletionFinishReason
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import { responseWriteController } from '../../../../../common/response';
@ -300,7 +301,7 @@ export const runToolWithToolChoice = async (
}
});
const { answer, toolCalls } = await (async () => {
const { answer, toolCalls, finish_reason } = await (async () => {
if (res && isStreamResponse) {
return streamResponse({
res,
@ -310,6 +311,7 @@ export const runToolWithToolChoice = async (
});
} else {
const result = aiResponse as ChatCompletion;
const finish_reason = result.choices?.[0]?.finish_reason as CompletionFinishReason;
const calls = result.choices?.[0]?.message?.tool_calls || [];
const answer = result.choices?.[0]?.message?.content || '';
@ -350,7 +352,8 @@ export const runToolWithToolChoice = async (
return {
answer,
toolCalls: toolCalls
toolCalls: toolCalls,
finish_reason
};
}
})();
@ -549,8 +552,9 @@ export const runToolWithToolChoice = async (
toolNodeOutputTokens,
completeMessages,
assistantResponses: toolNodeAssistants,
toolWorkflowInteractiveResponse,
runTimes,
toolWorkflowInteractiveResponse
finish_reason
};
}
@ -565,7 +569,8 @@ export const runToolWithToolChoice = async (
toolNodeInputTokens,
toolNodeOutputTokens,
assistantResponses: toolNodeAssistants,
runTimes
runTimes,
finish_reason
}
);
} else {
@ -588,7 +593,8 @@ export const runToolWithToolChoice = async (
completeMessages,
assistantResponses: [...assistantResponses, ...toolNodeAssistant.value],
runTimes: (response?.runTimes || 0) + 1
runTimes: (response?.runTimes || 0) + 1,
finish_reason
};
}
};
@ -612,14 +618,18 @@ async function streamResponse({
let textAnswer = '';
let callingTool: { name: string; arguments: string } | null = null;
let toolCalls: ChatCompletionMessageToolCall[] = [];
let finishReason: CompletionFinishReason = null;
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finishReason = 'close';
break;
}
const responseChoice = part.choices?.[0]?.delta;
const finish_reason = part.choices?.[0]?.finish_reason as CompletionFinishReason;
finishReason = finishReason || finish_reason;
if (responseChoice?.content) {
const content = responseChoice.content || '';
@ -705,5 +715,5 @@ async function streamResponse({
}
}
return { answer: textAnswer, toolCalls };
return { answer: textAnswer, toolCalls, finish_reason: finishReason };
}

View File

@ -1,4 +1,4 @@
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ChatCompletionMessageParam, CompletionFinishReason } from '@fastgpt/global/core/ai/type';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type {
ModuleDispatchProps,
@ -43,6 +43,7 @@ export type RunToolResponse = {
assistantResponses?: AIChatItemValueItemType[];
toolWorkflowInteractiveResponse?: WorkflowInteractiveResponseType;
[DispatchNodeResponseKeyEnum.runTimes]: number;
finish_reason?: CompletionFinishReason;
};
export type ToolNodeItemType = RuntimeNodeItemType & {
toolParams: RuntimeNodeItemType['inputs'];

View File

@ -6,7 +6,11 @@ import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/cons
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { parseReasoningContent, parseReasoningStreamContent } from '../../../ai/utils';
import { createChatCompletion } from '../../../ai/config';
import type { ChatCompletionMessageParam, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import type {
ChatCompletionMessageParam,
CompletionFinishReason,
StreamChatType
} from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '../../../../common/api/requestPlusApi';
@ -101,7 +105,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const modelConstantsData = getLLMModel(model);
if (!modelConstantsData) {
return Promise.reject('The chat model is undefined, you need to select a chat model.');
return Promise.reject(`Mode ${model} is undefined, you need to select a chat model.`);
}
aiChatVision = modelConstantsData.vision && aiChatVision;
@ -195,16 +199,17 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
});
const { answerText, reasoningText } = await (async () => {
const { answerText, reasoningText, finish_reason } = await (async () => {
if (isStreamResponse) {
if (!res) {
return {
answerText: '',
reasoningText: ''
reasoningText: '',
finish_reason: 'close' as const
};
}
// sse response
const { answer, reasoning } = await streamResponse({
const { answer, reasoning, finish_reason } = await streamResponse({
res,
stream: response,
aiChatReasoning,
@ -215,9 +220,12 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: answer,
reasoningText: reasoning
reasoningText: reasoning,
finish_reason
};
} else {
const finish_reason = response.choices?.[0]?.finish_reason as CompletionFinishReason;
const { content, reasoningContent } = (() => {
const content = response.choices?.[0]?.message?.content || '';
// @ts-ignore
@ -260,7 +268,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: content,
reasoningText: reasoningContent
reasoningText: reasoningContent,
finish_reason
};
}
})();
@ -303,7 +312,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
maxToken: max_tokens,
reasoningText,
historyPreview: getHistoryPreview(chatCompleteMessages, 10000, aiChatVision),
contextTotalLen: completeMessages.length
contextTotalLen: completeMessages.length,
finishReason: finish_reason
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
@ -528,15 +538,18 @@ async function streamResponse({
});
let answer = '';
let reasoning = '';
let finish_reason: CompletionFinishReason = null;
const { parsePart, getStartTagBuffer } = parseReasoningStreamContent();
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finish_reason = 'close';
break;
}
const [reasoningContent, content] = parsePart(part, parseThinkTag);
const { reasoningContent, content, finishReason } = parsePart(part, parseThinkTag);
finish_reason = finish_reason || finishReason;
answer += content;
reasoning += reasoningContent;
@ -575,5 +588,5 @@ async function streamResponse({
}
}
return { answer, reasoning };
return { answer, reasoning, finish_reason };
}

View File

@ -130,6 +130,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
timezone,
externalProvider,
stream = false,
version = 'v1',
...props
} = data;
@ -626,6 +627,21 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
};
})();
// Response node response
if (
version === 'v2' &&
!props.isToolCall &&
!props.runningAppInfo.isChildApp &&
formatResponseData
) {
props.workflowStreamResponse?.({
event: SseResponseEventEnum.flowNodeResponse,
data: {
...formatResponseData
}
});
}
// Add output default value
node.outputs.forEach((item) => {
if (!item.required) return;

View File

@ -53,7 +53,8 @@ export const getWorkflowResponseWrite = ({
[SseResponseEventEnum.toolCall]: 1,
[SseResponseEventEnum.toolParams]: 1,
[SseResponseEventEnum.toolResponse]: 1,
[SseResponseEventEnum.updateVariables]: 1
[SseResponseEventEnum.updateVariables]: 1,
[SseResponseEventEnum.flowNodeResponse]: 1
};
if (!detail && detailEvent[event]) return;

View File

@ -7,6 +7,7 @@
"@xmldom/xmldom": "^0.8.10",
"@zilliz/milvus2-sdk-node": "2.4.2",
"axios": "^1.8.2",
"bullmq": "^5.44.0",
"chalk": "^5.3.0",
"cheerio": "1.0.0-rc.12",
"cookie": "^0.7.1",
@ -18,6 +19,7 @@
"file-type": "^19.0.0",
"form-data": "^4.0.0",
"iconv-lite": "^0.6.3",
"ioredis": "^5.6.0",
"joplin-turndown-plugin-gfm": "^1.0.12",
"json5": "^2.2.3",
"jsonpath-plus": "^10.3.0",
@ -27,7 +29,7 @@
"mongoose": "^8.10.1",
"multer": "1.4.5-lts.1",
"mysql2": "^3.11.3",
"next": "14.2.25",
"next": "14.2.26",
"nextjs-cors": "^2.2.0",
"node-cron": "^3.0.3",
"node-xlsx": "^0.24.0",

View File

@ -1,7 +1,7 @@
{
"extends":"../../tsconfig.json",
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "."
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../**/*.d.ts"]
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../../**/*.d.ts"]
}

View File

@ -67,6 +67,7 @@ export const iconPaths = {
'common/list': () => import('./icons/common/list.svg'),
'common/loading': () => import('./icons/common/loading.svg'),
'common/logLight': () => import('./icons/common/logLight.svg'),
'common/maximize': () => import('./icons/common/maximize.svg'),
'common/microsoft': () => import('./icons/common/microsoft.svg'),
'common/model': () => import('./icons/common/model.svg'),
'common/monitor': () => import('./icons/common/monitor.svg'),
@ -85,6 +86,7 @@ export const iconPaths = {
'common/rightArrowFill': () => import('./icons/common/rightArrowFill.svg'),
'common/rightArrowLight': () => import('./icons/common/rightArrowLight.svg'),
'common/routePushLight': () => import('./icons/common/routePushLight.svg'),
'common/running': () => import('./icons/common/running.svg'),
'common/saveFill': () => import('./icons/common/saveFill.svg'),
'common/searchLight': () => import('./icons/common/searchLight.svg'),
'common/select': () => import('./icons/common/select.svg'),
@ -181,6 +183,7 @@ export const iconPaths = {
'core/chat/feedback/goodLight': () => import('./icons/core/chat/feedback/goodLight.svg'),
'core/chat/fileSelect': () => import('./icons/core/chat/fileSelect.svg'),
'core/chat/finishSpeak': () => import('./icons/core/chat/finishSpeak.svg'),
'core/chat/backText':() => import('./icons/core/chat/backText.svg'),
'core/chat/imgSelect': () => import('./icons/core/chat/imgSelect.svg'),
'core/chat/quoteFill': () => import('./icons/core/chat/quoteFill.svg'),
'core/chat/quoteSign': () => import('./icons/core/chat/quoteSign.svg'),

View File

@ -1,9 +1,9 @@
<svg viewBox="0 0 13 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="icon/line/change">
<g id="Vector">
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.23479 4.71964C3.23479 4.4435 3.45864 4.21964 3.73479 4.21964L11.0348 4.21964C11.3109 4.21964 11.5348 4.4435 11.5348 4.71964C11.5348 4.99579 11.3109 5.21964 11.0348 5.21964L3.73479 5.21964C3.45864 5.21964 3.23479 4.99579 3.23479 4.71964Z" fill="#3370FF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.70133 2.38619C8.89659 2.19093 9.21317 2.19093 9.40843 2.38619L11.3883 4.36609C11.5836 4.56135 11.5836 4.87794 11.3883 5.0732C11.1931 5.26846 10.8765 5.26846 10.6812 5.0732L8.70133 3.0933C8.50607 2.89804 8.50607 2.58145 8.70133 2.38619Z" fill="#3370FF"/>
<path d="M1.84361 6.81774C1.78456 6.84214 1.72923 6.87834 1.68124 6.92633C1.63324 6.97433 1.59704 7.02965 1.57264 7.08871C1.54825 7.1476 1.53479 7.21217 1.53479 7.27989C1.53479 7.34768 1.54828 7.41232 1.57273 7.47128C1.59639 7.52847 1.63112 7.58215 1.67692 7.62907C1.67852 7.63071 1.68013 7.63234 1.68176 7.63396L3.66114 9.61334C3.8564 9.8086 4.17298 9.8086 4.36824 9.61334C4.5635 9.41808 4.5635 9.10149 4.36824 8.90623L3.2419 7.77989L9.33479 7.77989C9.61093 7.77989 9.83479 7.55603 9.83479 7.27989C9.83479 7.00374 9.61093 6.77989 9.33479 6.77989H2.03479C2.03325 6.77989 2.03171 6.77989 2.03017 6.77991C1.96414 6.7805 1.90117 6.7939 1.84361 6.81774Z" fill="#3370FF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.23479 4.71964C3.23479 4.4435 3.45864 4.21964 3.73479 4.21964L11.0348 4.21964C11.3109 4.21964 11.5348 4.4435 11.5348 4.71964C11.5348 4.99579 11.3109 5.21964 11.0348 5.21964L3.73479 5.21964C3.45864 5.21964 3.23479 4.99579 3.23479 4.71964Z"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.70133 2.38619C8.89659 2.19093 9.21317 2.19093 9.40843 2.38619L11.3883 4.36609C11.5836 4.56135 11.5836 4.87794 11.3883 5.0732C11.1931 5.26846 10.8765 5.26846 10.6812 5.0732L8.70133 3.0933C8.50607 2.89804 8.50607 2.58145 8.70133 2.38619Z"/>
<path d="M1.84361 6.81774C1.78456 6.84214 1.72923 6.87834 1.68124 6.92633C1.63324 6.97433 1.59704 7.02965 1.57264 7.08871C1.54825 7.1476 1.53479 7.21217 1.53479 7.27989C1.53479 7.34768 1.54828 7.41232 1.57273 7.47128C1.59639 7.52847 1.63112 7.58215 1.67692 7.62907C1.67852 7.63071 1.68013 7.63234 1.68176 7.63396L3.66114 9.61334C3.8564 9.8086 4.17298 9.8086 4.36824 9.61334C4.5635 9.41808 4.5635 9.10149 4.36824 8.90623L3.2419 7.77989L9.33479 7.77989C9.61093 7.77989 9.83479 7.55603 9.83479 7.27989C9.83479 7.00374 9.61093 6.77989 9.33479 6.77989H2.03479C2.03325 6.77989 2.03171 6.77989 2.03017 6.77991C1.96414 6.7805 1.90117 6.7939 1.84361 6.81774Z"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -1,3 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 11 10" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.82531 2.05806C9.06939 1.81398 9.46512 1.81398 9.70919 2.05806C9.95327 2.30214 9.95327 2.69786 9.7092 2.94194L5.12586 7.52528C4.88178 7.76935 4.48606 7.76935 4.24198 7.52528L2.15864 5.44194C1.91457 5.19786 1.91457 4.80214 2.15864 4.55806C2.40272 4.31398 2.79845 4.31398 3.04253 4.55806L4.68392 6.19945L8.82531 2.05806Z" />
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 12 12" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.46964 2.46967C9.76253 2.17678 10.2374 2.17678 10.5303 2.46967C10.8232 2.76256 10.8232 3.23744 10.5303 3.53033L5.0303 9.03033C4.73741 9.32322 4.26253 9.32322 3.96964 9.03033L1.46964 6.53033C1.17675 6.23744 1.17675 5.76256 1.46964 5.46967C1.76253 5.17678 2.23741 5.17678 2.5303 5.46967L4.49997 7.43934L9.46964 2.46967Z" />
</svg>

Before

Width:  |  Height:  |  Size: 456 B

After

Width:  |  Height:  |  Size: 454 B

View File

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 10 11" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.24998 2.22347C6.01986 2.22347 5.83331 2.03692 5.83331 1.8068C5.83331 1.57668 6.01986 1.39014 6.24998 1.39014H8.74998C8.9801 1.39014 9.16665 1.57668 9.16665 1.8068V4.3068C9.16665 4.53692 8.9801 4.72347 8.74998 4.72347C8.51986 4.72347 8.33331 4.53692 8.33331 4.3068V2.81273L6.12794 5.0181C5.96522 5.18082 5.7014 5.18082 5.53869 5.0181C5.37597 4.85538 5.37597 4.59156 5.53869 4.42884L7.74406 2.22347H6.24998ZM4.46127 6.09551C4.62399 6.25823 4.62399 6.52205 4.46127 6.68476L2.2559 8.89014H3.74998C3.9801 8.89014 4.16665 9.07669 4.16665 9.3068C4.16665 9.53692 3.9801 9.72347 3.74998 9.72347H1.24998C1.13947 9.72347 1.03349 9.67957 0.955352 9.60143C0.877212 9.52329 0.833313 9.41731 0.833313 9.3068L0.833313 6.8068C0.833313 6.57668 1.01986 6.39014 1.24998 6.39014C1.4801 6.39014 1.66665 6.57668 1.66665 6.8068L1.66665 8.30088L3.87202 6.09551C4.03474 5.93279 4.29856 5.93279 4.46127 6.09551Z" />
</svg>

After

Width:  |  Height:  |  Size: 1022 B

View File

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.42335 9.70025C3.42335 6.06906 6.36701 3.1254 9.9982 3.1254C11.8153 3.1254 13.6728 3.9104 14.9391 5.32281C16.1863 6.71386 16.8964 8.75372 16.3686 11.3689C16.2731 11.8419 16.5792 12.3028 17.0522 12.3982C17.5252 12.4937 17.9861 12.1876 18.0815 11.7146C18.7147 8.57736 17.8683 5.97217 16.2402 4.15628C14.6313 2.36174 12.2949 1.37793 9.9982 1.37793C5.40191 1.37793 1.67588 5.10396 1.67588 9.70025C1.67588 11.6523 2.5328 14.2506 4.35082 15.8991L3.75846 15.8976C3.27592 15.8964 2.88375 16.2866 2.88253 16.7692C2.88132 17.2517 3.27152 17.6439 3.75407 17.6451L6.84117 17.6529C7.17714 17.6537 7.4693 17.4648 7.6162 17.1871C7.69971 17.0534 7.74807 16.8955 7.74833 16.7262L7.75314 13.6037C7.75388 13.1211 7.3633 12.7293 6.88075 12.7286C6.3982 12.7278 6.00641 13.1184 6.00567 13.601L6.00354 14.9858C4.32556 13.8215 3.42335 11.4678 3.42335 9.70025ZM16.8177 14.5519C16.8177 15.1962 16.2954 15.7185 15.651 15.7185C15.0067 15.7185 14.4844 15.1962 14.4844 14.5519C14.4844 13.9075 15.0067 13.3852 15.651 13.3852C16.2954 13.3852 16.8177 13.9075 16.8177 14.5519ZM12.6957 17.7912C13.3401 17.7912 13.8624 17.2689 13.8624 16.6246C13.8624 15.9802 13.3401 15.4579 12.6957 15.4579C12.0514 15.4579 11.5291 15.9802 11.5291 16.6246C11.5291 17.2689 12.0514 17.7912 12.6957 17.7912Z" fill="#3370FF"/>
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,4 @@
<svg
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" width="200" height="200">
<path d="M512 74.666667C270.933333 74.666667 74.666667 270.933333 74.666667 512S270.933333 949.333333 512 949.333333 949.333333 753.066667 949.333333 512 753.066667 74.666667 512 74.666667z m0 810.666666c-204.8 0-373.333333-168.533333-373.333333-373.333333S307.2 138.666667 512 138.666667 885.333333 307.2 885.333333 512 716.8 885.333333 512 885.333333z" fill="#666666"></path>
<path d="M448 437.333333c17.066667 0 32-14.933333 32-32v-42.666666c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666666c0 17.066667 14.933333 32 32 32zM576 437.333333c17.066667 0 32-14.933333 32-32v-42.666666c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666666c0 17.066667 14.933333 32 32 32zM320 437.333333c17.066667 0 32-14.933333 32-32v-42.666666c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666666c0 17.066667 14.933333 32 32 32zM704 330.666667c-17.066667 0-32 14.933333-32 32v42.666666c0 17.066667 14.933333 32 32 32s32-14.933333 32-32v-42.666666c0-17.066667-14.933333-32-32-32zM448 586.666667c17.066667 0 32-14.933333 32-32v-42.666667c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666667c0 17.066667 14.933333 32 32 32zM576 586.666667c17.066667 0 32-14.933333 32-32v-42.666667c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666667c0 17.066667 14.933333 32 32 32zM352 554.666667v-42.666667c0-17.066667-14.933333-32-32-32s-32 14.933333-32 32v42.666667c0 17.066667 14.933333 32 32 32s32-14.933333 32-32zM704 480c-17.066667 0-32 14.933333-32 32v42.666667c0 17.066667 14.933333 32 32 32s32-14.933333 32-32v-42.666667c0-17.066667-14.933333-32-32-32zM682.666667 650.666667H341.333333c-17.066667 0-32 14.933333-32 32s14.933333 32 32 32h341.333334c17.066667 0 32-14.933333 32-32s-14.933333-32-32-32z" fill="#666666" ></path></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@ -26,23 +26,43 @@ const MyNumberInput = (props: Props) => {
<NumberInput
{...restProps}
onBlur={(e) => {
if (!onBlur) return;
const numE = Number(e.target.value);
if (isNaN(numE)) {
// @ts-ignore
onBlur('');
} else {
onBlur(numE);
if (onBlur) {
if (isNaN(numE)) {
// @ts-ignore
onBlur('');
} else {
onBlur(numE);
}
}
if (register && name) {
const event = {
target: {
name,
value: numE
}
};
register(name).onBlur(event);
}
}}
onChange={(e) => {
if (!onChange) return;
const numE = Number(e);
if (isNaN(numE)) {
// @ts-ignore
onChange('');
} else {
onChange(numE);
if (onChange) {
if (isNaN(numE)) {
// @ts-ignore
onChange('');
} else {
onChange(numE);
}
}
if (register && name) {
const event = {
target: {
name,
value: numE
}
};
register(name).onChange(event);
}
}}
>

View File

@ -1,5 +1,5 @@
import React, { useMemo } from 'react';
import { Box, Flex, type FlexProps } from '@chakra-ui/react';
import { Box, BoxProps, Flex, type FlexProps } from '@chakra-ui/react';
type ColorSchemaType = 'white' | 'blue' | 'green' | 'red' | 'yellow' | 'gray' | 'purple' | 'adora';
@ -8,6 +8,7 @@ export type TagProps = FlexProps & {
colorSchema?: ColorSchemaType;
type?: 'fill' | 'borderFill' | 'borderSolid';
showDot?: boolean;
DotStyles?: BoxProps;
};
const colorMap: Record<
@ -60,7 +61,14 @@ const colorMap: Record<
}
};
const MyTag = ({ children, colorSchema = 'blue', type = 'fill', showDot, ...props }: TagProps) => {
const MyTag = ({
children,
colorSchema = 'blue',
type = 'fill',
showDot,
DotStyles,
...props
}: TagProps) => {
const theme = useMemo(() => {
return colorMap[colorSchema];
}, [colorSchema]);
@ -81,7 +89,9 @@ const MyTag = ({ children, colorSchema = 'blue', type = 'fill', showDot, ...prop
bg={type !== 'borderSolid' ? theme.bg : 'transparent'}
{...props}
>
{showDot && <Box w={1.5} h={1.5} borderRadius={'md'} bg={theme.color} mr={1.5}></Box>}
{showDot && (
<Box w={1.5} h={1.5} borderRadius={'md'} bg={theme.color} mr={1.5} {...DotStyles}></Box>
)}
{children}
</Flex>
);

View File

@ -308,7 +308,13 @@ export function useScrollPagination<
);
return (
<MyBox ref={ref} h={'100%'} overflow={'auto'} isLoading={isLoading} {...props}>
<MyBox
ref={ref}
h={'100%'}
overflow={'auto'}
isLoading={isLoading || isLoadingProp}
{...props}
>
{scrollLoadType === 'top' && total > 0 && isLoading && (
<Box mt={2} fontSize={'xs'} color={'blackAlpha.500'} textAlign={'center'}>
{t('common:common.is_requesting')}

View File

@ -3,6 +3,7 @@
"Delete_all": "Clear All Lexicon",
"LLM_model_response_empty": "The model flow response is empty, please check whether the model flow output is normal.",
"ai_reasoning": "Thinking process",
"back_to_text": "Text input",
"chat.quote.No Data": "The file cannot be found",
"chat.quote.deleted": "This data has been deleted ~",
"chat_history": "Conversation History",
@ -10,12 +11,22 @@
"chat_test_app": "Debug-{{name}}",
"citations": "{{num}} References",
"click_contextual_preview": "Click to see contextual preview",
"completion_finish_close": "Disconnection",
"completion_finish_content_filter": "Trigger safe wind control",
"completion_finish_function_call": "Function Calls",
"completion_finish_length": "Reply limit exceeded",
"completion_finish_null": "unknown",
"completion_finish_reason": "Reason for completion",
"completion_finish_stop": "Completed normally",
"completion_finish_tool_calls": "Tool calls",
"config_input_guide": "Set Up Input Guide",
"config_input_guide_lexicon": "Set Up Lexicon",
"config_input_guide_lexicon_title": "Set Up Lexicon",
"content_empty": "No Content",
"contextual": "{{num}} Contexts",
"contextual_preview": "Contextual Preview {{num}} Items",
"core.chat.moveCancel": "Swipe to Cancel",
"core.chat.shortSpeak": "Speaking Time is Too Short",
"csv_input_lexicon_tip": "Only CSV batch import is supported, click to download the template",
"custom_input_guide_url": "Custom Lexicon URL",
"data_source": "Source Dataset: {{name}}",
@ -41,11 +52,14 @@
"not_query": "Missing query content",
"not_select_file": "No file selected",
"plugins_output": "Plugin Output",
"press_to_speak": "Hold down to speak",
"query_extension_IO_tokens": "Problem Optimization Input/Output Tokens",
"query_extension_result": "Problem optimization results",
"question_tip": "From top to bottom, the response order of each module",
"read_raw_source": "Open the original text",
"reasoning_text": "Thinking process",
"release_cancel": "Release Cancel",
"release_send": "Release send, slide up to cancel",
"response.child total points": "Sub-workflow point consumption",
"response.dataset_concat_length": "Combined total",
"response.node_inputs": "Node Inputs",

View File

@ -1,5 +1,6 @@
{
"App": "Application",
"Click_to_expand": "Click to expand",
"Download": "Download",
"Export": "Export",
"FAQ.ai_point_a": "Each time you use the AI model, a certain amount of AI points will be deducted. For detailed calculation standards, please refer to the 'AI Points Calculation Standards' above.\nToken calculation uses the same formula as GPT-3.5, where 1 Token ≈ 0.7 Chinese characters ≈ 0.9 English words. Consecutive characters may be considered as 1 Token.",
@ -511,7 +512,7 @@
"core.dataset.Query extension intro": "Enabling the question optimization function can improve the accuracy of Dataset searches during continuous conversations. After enabling this function, when performing Dataset searches, the AI will complete the missing information of the question based on the conversation history.",
"core.dataset.Quote Length": "Quote Content Length",
"core.dataset.Read Dataset": "View Dataset Details",
"core.dataset.Set Website Config": "Start Configuring Website Information",
"core.dataset.Set Website Config": "Start Configuring",
"core.dataset.Start export": "Export Started",
"core.dataset.Table collection": "Table Dataset",
"core.dataset.Text collection": "Text Dataset",
@ -527,7 +528,6 @@
"core.dataset.collection.Website Empty Tip": "No Website Associated Yet",
"core.dataset.collection.Website Link": "Website Address",
"core.dataset.collection.id": "Collection ID",
"core.dataset.collection.metadata.Chunk Size": "Chunk Size",
"core.dataset.collection.metadata.Createtime": "Creation Time",
"core.dataset.collection.metadata.Raw text length": "Raw Text Length",
"core.dataset.collection.metadata.Updatetime": "Update Time",
@ -538,6 +538,7 @@
"core.dataset.collection.metadata.source name": "Source Name",
"core.dataset.collection.metadata.source size": "Source Size",
"core.dataset.collection.status.active": "Ready",
"core.dataset.collection.status.error": "Error",
"core.dataset.collection.sync.result.sameRaw": "Content Unchanged, No Update Needed",
"core.dataset.collection.sync.result.success": "Sync Started",
"core.dataset.data.Data Content": "Related Data Content",
@ -628,6 +629,7 @@
"core.dataset.search.search mode": "Search Method",
"core.dataset.status.active": "Ready",
"core.dataset.status.syncing": "Syncing",
"core.dataset.status.waiting": "Waiting",
"core.dataset.test.Batch test": "Batch Test",
"core.dataset.test.Batch test Placeholder": "Select a CSV File",
"core.dataset.test.Search Test": "Search Test",

View File

@ -7,6 +7,7 @@
"auto_indexes_tips": "Additional index generation is performed through large models to improve semantic richness and improve retrieval accuracy.",
"auto_training_queue": "Enhanced index queueing",
"chunk_max_tokens": "max_tokens",
"chunk_size": "Block size",
"close_auto_sync": "Are you sure you want to turn off automatic sync?",
"collection.Create update time": "Creation/Update Time",
"collection.Training type": "Training",
@ -28,9 +29,24 @@
"custom_data_process_params_desc": "Customize data processing rules",
"custom_split_sign_tip": "Allows you to chunk according to custom delimiters. \nUsually used for processed data, using specific separators for precise chunking. \nYou can use the | symbol to represent multiple splitters, such as: \".|.\" to represent a period in Chinese and English.\n\nTry to avoid using special symbols related to regular, such as: * () [] {}, etc.",
"data_amount": "{{dataAmount}} Datas, {{indexAmount}} Indexes",
"data_error_amount": "{{errorAmount}} Group training exception",
"data_index_num": "Index {{index}}",
"data_process_params": "Params",
"data_process_setting": "Processing config",
"dataset.Chunk_Number": "Block number",
"dataset.Completed": "Finish",
"dataset.Delete_Chunk": "delete",
"dataset.Edit_Chunk": "edit",
"dataset.Error_Message": "Report an error message",
"dataset.No_Error": "No exception information yet",
"dataset.Operation": "operate",
"dataset.ReTrain": "Retrain",
"dataset.Training Process": "Training status",
"dataset.Training_Count": "{{count}} Group training",
"dataset.Training_Errors": "Errors",
"dataset.Training_QA": "{{count}} Group Q&A pair training",
"dataset.Training_Status": "Training status",
"dataset.Training_Waiting": "Need to wait for {{count}} group data",
"dataset.Unsupported operation": "dataset.Unsupported operation",
"dataset.no_collections": "No datasets available",
"dataset.no_tags": "No tags available",
@ -55,6 +71,7 @@
"image_auto_parse": "Automatic image indexing",
"image_auto_parse_tips": "Call VLM to automatically label the pictures in the document and generate additional search indexes",
"image_training_queue": "Queue of image processing",
"immediate_sync": "Immediate Synchronization",
"import.Auto mode Estimated Price Tips": "The text understanding model needs to be called, which requires more points: {{price}} points/1K tokens",
"import.Embedding Estimated Price Tips": "Only use the index model and consume a small amount of AI points: {{price}} points/1K tokens",
"import_confirm": "Confirm upload",
@ -71,6 +88,7 @@
"keep_image": "Keep the picture",
"move.hint": "After moving, the selected knowledge base/folder will inherit the permission settings of the new folder, and the original permission settings will become invalid.",
"open_auto_sync": "After scheduled synchronization is turned on, the system will try to synchronize the collection from time to time every day. During the collection synchronization period, the collection data will not be searched.",
"params_config": "Config",
"params_setting": "Parameter settings",
"pdf_enhance_parse": "PDF enhancement analysis",
"pdf_enhance_parse_price": "{{price}} points/page",
@ -82,6 +100,13 @@
"preview_chunk_empty": "Unable to read the contents of the file",
"preview_chunk_intro": "A total of {{total}} blocks, up to 10",
"preview_chunk_not_selected": "Click on the file on the left to preview",
"process.Auto_Index": "Automatic index generation",
"process.Get QA": "Q&A extraction",
"process.Image_Index": "Image index generation",
"process.Is_Ready": "Ready",
"process.Parsing": "Parsing",
"process.Vectorizing": "Index vectorization",
"process.Waiting": "Queue",
"rebuild_embedding_start_tip": "Index model switching task has started",
"rebuilding_index_count": "Number of indexes being rebuilt: {{count}}",
"request_headers": "Request headers, will automatically append 'Bearer '",
@ -99,6 +124,7 @@
"split_sign_question": "question mark",
"split_sign_semicolon": "semicolon",
"start_sync_website_tip": "Confirm to start synchronizing data? \nThe old data will be deleted and retrieved again, please confirm!",
"status_error": "Running exception",
"sync_collection_failed": "Synchronization collection error, please check whether the source file can be accessed normally",
"sync_schedule": "Timing synchronization",
"sync_schedule_tip": "Only existing collections will be synchronized. \nIncludes linked collections and all collections in the API knowledge base. \nThe system will poll for updates every day, and the specific update time cannot be determined.",
@ -114,11 +140,15 @@
"tag.total_tags": "Total {{total}} tags",
"the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "The Dataset has indexes that are being trained or rebuilt",
"total_num_files": "Total {{total}} files",
"training.Error": "{{count}} Group exception",
"training.Normal": "Normal",
"training_mode": "Chunk mode",
"training_ready": "{{count}} Group",
"vector_model_max_tokens_tip": "Each chunk of data has a maximum length of 3000 tokens",
"vllm_model": "Image understanding model",
"website_dataset": "Website Sync",
"website_dataset_desc": "Website sync allows you to build a Dataset directly using a web link.",
"website_info": "Website Information",
"yuque_dataset": "Yuque Dataset",
"yuque_dataset_config": "Yuque Dataset Config",
"yuque_dataset_desc": "Can build a dataset using Yuque documents by configuring permissions, without secondary storage"

View File

@ -3,6 +3,7 @@
"Delete_all": "清空词库",
"LLM_model_response_empty": "模型流响应为空,请检查模型流输出是否正常",
"ai_reasoning": "思考过程",
"back_to_text": "返回输入",
"chat.quote.No Data": "找不到该文件",
"chat.quote.deleted": "该数据已被删除~",
"chat_history": "聊天记录",
@ -10,12 +11,22 @@
"chat_test_app": "调试-{{name}}",
"citations": "{{num}}条引用",
"click_contextual_preview": "点击查看上下文预览",
"completion_finish_close": "连接断开",
"completion_finish_content_filter": "触发安全风控",
"completion_finish_function_call": "函数调用",
"completion_finish_length": "超出回复限制",
"completion_finish_null": "未知",
"completion_finish_reason": "完成原因",
"completion_finish_stop": "正常完成",
"completion_finish_tool_calls": "工具调用",
"config_input_guide": "配置输入引导",
"config_input_guide_lexicon": "配置词库",
"config_input_guide_lexicon_title": "配置词库",
"content_empty": "内容为空",
"contextual": "{{num}}条上下文",
"contextual_preview": "上下文预览 {{num}} 条",
"core.chat.moveCancel": "上滑取消",
"core.chat.shortSpeak": "说话时间太短",
"csv_input_lexicon_tip": "仅支持 CSV 批量导入,点击下载模板",
"custom_input_guide_url": "自定义词库地址",
"data_source": "来源知识库: {{name}}",
@ -41,11 +52,14 @@
"not_query": "缺少查询内容",
"not_select_file": "未选择文件",
"plugins_output": "插件输出",
"press_to_speak": "按住说话",
"query_extension_IO_tokens": "问题优化输入/输出 Tokens",
"query_extension_result": "问题优化结果",
"question_tip": "从上到下,为各个模块的响应顺序",
"read_raw_source": "打开原文",
"reasoning_text": "思考过程",
"release_cancel": "松开取消",
"release_send": "松开发送,上滑取消",
"response.child total points": "子工作流积分消耗",
"response.dataset_concat_length": "合并后总数",
"response.node_inputs": "节点输入",

View File

@ -1,5 +1,6 @@
{
"App": "应用",
"Click_to_expand": "点击查看详情",
"Download": "下载",
"Export": "导出",
"FAQ.ai_point_a": "每次调用AI模型时都会消耗一定的AI积分。具体的计算标准可参考上方的“AI 积分计算标准”。\nToken计算采用GPT3.5相同公式1Token≈0.7中文字符≈0.9英文单词连续出现的字符可能被认为是1个Tokens。",
@ -514,7 +515,7 @@
"core.dataset.Query extension intro": "开启问题优化功能,可以提高提高连续对话时,知识库搜索的精度。开启该功能后,在进行知识库搜索时,会根据对话记录,利用 AI 补全问题缺失的信息。",
"core.dataset.Quote Length": "引用内容长度",
"core.dataset.Read Dataset": "查看知识库详情",
"core.dataset.Set Website Config": "开始配置网站信息",
"core.dataset.Set Website Config": "开始配置",
"core.dataset.Start export": "已开始导出",
"core.dataset.Table collection": "表格数据集",
"core.dataset.Text collection": "文本数据集",
@ -530,7 +531,6 @@
"core.dataset.collection.Website Empty Tip": "还没有关联网站",
"core.dataset.collection.Website Link": "Web 站点地址",
"core.dataset.collection.id": "集合 ID",
"core.dataset.collection.metadata.Chunk Size": "分割大小",
"core.dataset.collection.metadata.Createtime": "创建时间",
"core.dataset.collection.metadata.Raw text length": "原文长度",
"core.dataset.collection.metadata.Updatetime": "更新时间",
@ -541,6 +541,7 @@
"core.dataset.collection.metadata.source name": "来源名",
"core.dataset.collection.metadata.source size": "来源大小",
"core.dataset.collection.status.active": "已就绪",
"core.dataset.collection.status.error": "训练异常",
"core.dataset.collection.sync.result.sameRaw": "内容未变动,无需更新",
"core.dataset.collection.sync.result.success": "开始同步",
"core.dataset.data.Data Content": "相关数据内容",
@ -631,6 +632,7 @@
"core.dataset.search.search mode": "搜索方式",
"core.dataset.status.active": "已就绪",
"core.dataset.status.syncing": "同步中",
"core.dataset.status.waiting": "排队中",
"core.dataset.test.Batch test": "批量测试",
"core.dataset.test.Batch test Placeholder": "选择一个 CSV 文件",
"core.dataset.test.Search Test": "搜索测试",
@ -1289,4 +1291,4 @@
"yes": "是",
"yesterday": "昨天",
"yesterday_detail_time": "昨天 {{time}}"
}
}

View File

@ -7,6 +7,7 @@
"auto_indexes_tips": "通过大模型进行额外索引生成,提高语义丰富度,提高检索的精度。",
"auto_training_queue": "增强索引排队",
"chunk_max_tokens": "分块上限",
"chunk_size": "分块大小",
"close_auto_sync": "确认关闭自动同步功能?",
"collection.Create update time": "创建/更新时间",
"collection.Training type": "训练模式",
@ -28,9 +29,24 @@
"custom_data_process_params_desc": "自定义设置数据处理规则",
"custom_split_sign_tip": "允许你根据自定义的分隔符进行分块。通常用于已处理好的数据,使用特定的分隔符来精确分块。可以使用 | 符号表示多个分割符,例如:“。|.” 表示中英文句号。\n尽量避免使用正则相关特殊符号例如: * () [] {} 等。",
"data_amount": "{{dataAmount}} 组数据, {{indexAmount}} 组索引",
"data_error_amount": "{{errorAmount}} 组训练异常",
"data_index_num": "索引 {{index}}",
"data_process_params": "处理参数",
"data_process_setting": "数据处理配置",
"dataset.Chunk_Number": "分块号",
"dataset.Completed": "完成",
"dataset.Delete_Chunk": "删除",
"dataset.Edit_Chunk": "编辑",
"dataset.Error_Message": "报错信息",
"dataset.No_Error": "暂无异常信息",
"dataset.Operation": "操作",
"dataset.ReTrain": "重试",
"dataset.Training Process": "训练状态",
"dataset.Training_Count": "{{count}} 组训练中",
"dataset.Training_Errors": "异常 ({{count}})",
"dataset.Training_QA": "{{count}} 组问答对训练中",
"dataset.Training_Status": "训练状态",
"dataset.Training_Waiting": "需等待 {{count}} 组数据",
"dataset.Unsupported operation": "操作不支持",
"dataset.no_collections": "暂无数据集",
"dataset.no_tags": "暂无标签",
@ -55,6 +71,7 @@
"image_auto_parse": "图片自动索引",
"image_auto_parse_tips": "调用 VLM 自动标注文档里的图片,并生成额外的检索索引",
"image_training_queue": "图片处理排队",
"immediate_sync": "立即同步",
"import.Auto mode Estimated Price Tips": "需调用文本理解模型需要消耗较多AI 积分:{{price}} 积分/1K tokens",
"import.Embedding Estimated Price Tips": "仅使用索引模型,消耗少量 AI 积分:{{price}} 积分/1K tokens",
"import_confirm": "确认上传",
@ -71,6 +88,7 @@
"keep_image": "保留图片",
"move.hint": "移动后,所选知识库/文件夹将继承新文件夹的权限设置,原先的权限设置失效。",
"open_auto_sync": "开启定时同步后,系统将会每天不定时尝试同步集合,集合同步期间,会出现无法搜索到该集合数据现象。",
"params_config": "配置",
"params_setting": "参数设置",
"pdf_enhance_parse": "PDF增强解析",
"pdf_enhance_parse_price": "{{price}}积分/页",
@ -82,6 +100,14 @@
"preview_chunk_empty": "无法读取该文件内容",
"preview_chunk_intro": "共 {{total}} 个分块,最多展示 10 个",
"preview_chunk_not_selected": "点击左侧文件后进行预览",
"process.Auto_Index": "自动索引生成",
"process.Get QA": "问答对提取",
"process.Image_Index": "图片索引生成",
"process.Is_Ready": "已就绪",
"process.Is_Ready_Count": "{{count}} 组已就绪",
"process.Parsing": "内容解析中",
"process.Vectorizing": "索引向量化",
"process.Waiting": "排队中",
"rebuild_embedding_start_tip": "切换索引模型任务已开始",
"rebuilding_index_count": "重建中索引数量:{{count}}",
"request_headers": "请求头参数,会自动补充 Bearer",
@ -99,6 +125,7 @@
"split_sign_question": "问号",
"split_sign_semicolon": "分号",
"start_sync_website_tip": "确认开始同步数据?将会删除旧数据后重新获取,请确认!",
"status_error": "运行异常",
"sync_collection_failed": "同步集合错误,请检查是否能正常访问源文件",
"sync_schedule": "定时同步",
"sync_schedule_tip": "仅会同步已存在的集合。包括链接集合以及 API 知识库里所有集合。系统会每天进行轮询更新,无法确定具体的更新时间。",
@ -114,11 +141,15 @@
"tag.total_tags": "共{{total}}个标签",
"the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "知识库有训练中或正在重建的索引",
"total_num_files": "共 {{total}} 个文件",
"training.Error": "{{count}} 组异常",
"training.Normal": "正常",
"training_mode": "处理方式",
"training_ready": "{{count}} 组",
"vector_model_max_tokens_tip": "每个分块数据,最大长度为 3000 tokens",
"vllm_model": "图片理解模型",
"website_dataset": "Web 站点同步",
"website_dataset_desc": "Web 站点同步允许你直接使用一个网页链接构建知识库",
"website_info": "网站信息",
"yuque_dataset": "语雀知识库",
"yuque_dataset_config": "配置语雀知识库",
"yuque_dataset_desc": "可通过配置语雀文档权限,使用语雀文档构建知识库,文档不会进行二次存储"

View File

@ -3,6 +3,7 @@
"Delete_all": "清除所有詞彙",
"LLM_model_response_empty": "模型流程回應為空,請檢查模型流程輸出是否正常",
"ai_reasoning": "思考過程",
"back_to_text": "返回輸入",
"chat.quote.No Data": "找不到該文件",
"chat.quote.deleted": "該數據已被刪除~",
"chat_history": "對話紀錄",
@ -10,6 +11,14 @@
"chat_test_app": "調試-{{name}}",
"citations": "{{num}} 筆引用",
"click_contextual_preview": "點選檢視上下文預覽",
"completion_finish_close": "連接斷開",
"completion_finish_content_filter": "觸發安全風控",
"completion_finish_function_call": "函數調用",
"completion_finish_length": "超出回复限制",
"completion_finish_null": "未知",
"completion_finish_reason": "完成原因",
"completion_finish_stop": "正常完成",
"completion_finish_tool_calls": "工具調用",
"config_input_guide": "設定輸入導引",
"config_input_guide_lexicon": "設定詞彙庫",
"config_input_guide_lexicon_title": "設定詞彙庫",
@ -35,16 +44,20 @@
"is_chatting": "對話進行中...請稍候",
"items": "筆",
"module_runtime_and": "模組執行總時間",
"moveCancel": "上滑取消",
"multiple_AI_conversations": "多組 AI 對話",
"new_input_guide_lexicon": "新增詞彙庫",
"no_workflow_response": "無工作流程資料",
"not_query": "缺少查詢內容",
"not_select_file": "尚未選取檔案",
"plugins_output": "外掛程式輸出",
"press_to_speak": "按住說話",
"query_extension_IO_tokens": "問題優化輸入/輸出 Tokens",
"question_tip": "由上至下,各個模組的回應順序",
"read_raw_source": "打開原文",
"reasoning_text": "思考過程",
"release_cancel": "鬆開取消",
"release_send": "鬆開發送,上滑取消",
"response.child total points": "子工作流程點數消耗",
"response.dataset_concat_length": "合併總數",
"response.node_inputs": "節點輸入",
@ -53,6 +66,7 @@
"select_file": "上傳檔案",
"select_file_img": "上傳檔案 / 圖片",
"select_img": "上傳圖片",
"shortSpeak ": "說話時間太短",
"source_cronJob": "定時執行",
"stream_output": "串流輸出",
"to_dataset": "前往知識庫",

View File

@ -1,5 +1,6 @@
{
"App": "應用程式",
"Click_to_expand": "點擊查看詳情",
"Download": "下載",
"Export": "匯出",
"FAQ.ai_point_a": "每次呼叫 AI 模型時,都會消耗一定數量的 AI 點數。詳細的計算標準請參考上方的「AI 點數計算標準」。\nToken 計算採用與 GPT3.5 相同的公式1 Token ≈ 0.7 個中文字 ≈ 0.9 個英文單字,連續出現的字元可能會被視為 1 個 Token。",
@ -510,7 +511,7 @@
"core.dataset.Query extension intro": "開啟問題最佳化功能,可以提高連續對話時知識庫搜尋的準確度。開啟此功能後,在進行知識庫搜尋時,系統會根據對話記錄,利用 AI 補充問題中缺少的資訊。",
"core.dataset.Quote Length": "引用內容長度",
"core.dataset.Read Dataset": "檢視知識庫詳細資料",
"core.dataset.Set Website Config": "開始設定網站資訊",
"core.dataset.Set Website Config": "開始設定",
"core.dataset.Start export": "已開始匯出",
"core.dataset.Table collection": "表格資料集",
"core.dataset.Text collection": "文字資料集",
@ -526,7 +527,6 @@
"core.dataset.collection.Website Empty Tip": "還沒有關聯網站",
"core.dataset.collection.Website Link": "網站網址",
"core.dataset.collection.id": "集合 ID",
"core.dataset.collection.metadata.Chunk Size": "分割大小",
"core.dataset.collection.metadata.Createtime": "建立時間",
"core.dataset.collection.metadata.Raw text length": "原始文字長度",
"core.dataset.collection.metadata.Updatetime": "更新時間",
@ -537,6 +537,7 @@
"core.dataset.collection.metadata.source name": "來源名稱",
"core.dataset.collection.metadata.source size": "來源大小",
"core.dataset.collection.status.active": "已就緒",
"core.dataset.collection.status.error": "訓練異常",
"core.dataset.collection.sync.result.sameRaw": "內容未變更,無需更新",
"core.dataset.collection.sync.result.success": "開始同步",
"core.dataset.data.Data Content": "相關資料內容",
@ -627,6 +628,7 @@
"core.dataset.search.search mode": "搜索方式",
"core.dataset.status.active": "已就緒",
"core.dataset.status.syncing": "同步中",
"core.dataset.status.waiting": "排队中",
"core.dataset.test.Batch test": "批次測試",
"core.dataset.test.Batch test Placeholder": "選擇一個 CSV 檔案",
"core.dataset.test.Search Test": "搜尋測試",

View File

@ -7,6 +7,7 @@
"auto_indexes_tips": "通過大模型進行額外索引生成,提高語義豐富度,提高檢索的精度。",
"auto_training_queue": "增強索引排隊",
"chunk_max_tokens": "分塊上限",
"chunk_size": "分塊大小",
"close_auto_sync": "確認關閉自動同步功能?",
"collection.Create update time": "建立/更新時間",
"collection.Training type": "分段模式",
@ -28,9 +29,24 @@
"custom_data_process_params_desc": "自訂資料處理規則",
"custom_split_sign_tip": "允許你根據自定義的分隔符進行分塊。\n通常用於已處理好的數據使用特定的分隔符來精確分塊。\n可以使用 | 符號表示多個分割符,例如:“。|.” 表示中英文句號。\n\n盡量避免使用正則相關特殊符號例如: * () [] {} 等。",
"data_amount": "{{dataAmount}} 組數據, {{indexAmount}} 組索引",
"data_error_amount": "{{errorAmount}} 組訓練異常",
"data_index_num": "索引 {{index}}",
"data_process_params": "處理參數",
"data_process_setting": "資料處理設定",
"dataset.Chunk_Number": "分塊號",
"dataset.Completed": "完成",
"dataset.Delete_Chunk": "刪除",
"dataset.Edit_Chunk": "編輯",
"dataset.Error_Message": "報錯信息",
"dataset.No_Error": "暫無異常信息",
"dataset.Operation": "操作",
"dataset.ReTrain": "重試",
"dataset.Training Process": "訓練狀態",
"dataset.Training_Count": "{{count}} 組訓練中",
"dataset.Training_Errors": "異常",
"dataset.Training_QA": "{{count}} 組問答對訓練中",
"dataset.Training_Status": "訓練狀態",
"dataset.Training_Waiting": "需等待 {{count}} 組數據",
"dataset.Unsupported operation": "操作不支持",
"dataset.no_collections": "尚無資料集",
"dataset.no_tags": "尚無標籤",
@ -55,6 +71,7 @@
"image_auto_parse": "圖片自動索引",
"image_auto_parse_tips": "調用 VLM 自動標註文檔裡的圖片,並生成額外的檢索索引",
"image_training_queue": "圖片處理排隊",
"immediate_sync": "立即同步",
"import.Auto mode Estimated Price Tips": "需呼叫文字理解模型,將消耗較多 AI 點數:{{price}} 點數 / 1K tokens",
"import.Embedding Estimated Price Tips": "僅使用索引模型,消耗少量 AI 點數:{{price}} 點數 / 1K tokens",
"import_confirm": "確認上傳",
@ -71,6 +88,7 @@
"keep_image": "保留圖片",
"move.hint": "移動後,所選資料集/資料夾將繼承新資料夾的權限設定,原先的權限設定將失效。",
"open_auto_sync": "開啟定時同步後,系統將每天不定時嘗試同步集合,集合同步期間,會出現無法搜尋到該集合資料現象。",
"params_config": "配置",
"params_setting": "參數設置",
"pdf_enhance_parse": "PDF增強解析",
"pdf_enhance_parse_price": "{{price}}積分/頁",
@ -82,6 +100,13 @@
"preview_chunk_empty": "無法讀取該文件內容",
"preview_chunk_intro": "共 {{total}} 個分塊,最多展示 10 個",
"preview_chunk_not_selected": "點擊左側文件後進行預覽",
"process.Auto_Index": "自動索引生成",
"process.Get QA": "問答對提取",
"process.Image_Index": "圖片索引生成",
"process.Is_Ready": "已就緒",
"process.Parsing": "內容解析中",
"process.Vectorizing": "索引向量化",
"process.Waiting": "排隊中",
"rebuild_embedding_start_tip": "切換索引模型任務已開始",
"rebuilding_index_count": "重建中索引數量:{{count}}",
"request_headers": "請求頭",
@ -99,6 +124,7 @@
"split_sign_question": "問號",
"split_sign_semicolon": "分號",
"start_sync_website_tip": "確認開始同步資料?\n將會刪除舊資料後重新獲取請確認",
"status_error": "運行異常",
"sync_collection_failed": "同步集合錯誤,請檢查是否能正常存取來源文件",
"sync_schedule": "定時同步",
"sync_schedule_tip": "只會同步已存在的集合。\n包括連結集合以及 API 知識庫裡所有集合。\n系統會每天進行輪詢更新無法確定特定的更新時間。",
@ -114,11 +140,15 @@
"tag.total_tags": "共 {{total}} 個標籤",
"the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "資料集有索引正在訓練或重建中",
"total_num_files": "共 {{total}} 個文件",
"training.Error": "{{count}} 組異常",
"training.Normal": "正常",
"training_mode": "分段模式",
"training_ready": "{{count}} 組",
"vector_model_max_tokens_tip": "每個分塊數據,最大長度為 3000 tokens",
"vllm_model": "圖片理解模型",
"website_dataset": "網站同步",
"website_dataset_desc": "網站同步功能讓您可以直接使用網頁連結建立資料集",
"website_info": "網站資訊",
"yuque_dataset": "語雀知識庫",
"yuque_dataset_config": "配置語雀知識庫",
"yuque_dataset_desc": "可通過配置語雀文檔權限,使用語雀文檔構建知識庫,文檔不會進行二次存儲"

View File

@ -25,7 +25,7 @@ importers:
version: 13.3.0
next-i18next:
specifier: 15.4.2
version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
prettier:
specifier: 3.2.4
version: 3.2.4
@ -75,8 +75,8 @@ importers:
specifier: ^5.1.3
version: 5.1.3
next:
specifier: 14.2.25
version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
specifier: 14.2.26
version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
openai:
specifier: 4.61.0
version: 4.61.0(encoding@0.1.13)(zod@3.24.2)
@ -169,6 +169,9 @@ importers:
axios:
specifier: ^1.8.2
version: 1.8.3
bullmq:
specifier: ^5.44.0
version: 5.44.0
chalk:
specifier: ^5.3.0
version: 5.4.1
@ -202,6 +205,9 @@ importers:
iconv-lite:
specifier: ^0.6.3
version: 0.6.3
ioredis:
specifier: ^5.6.0
version: 5.6.0
joplin-turndown-plugin-gfm:
specifier: ^1.0.12
version: 1.0.12
@ -230,11 +236,11 @@ importers:
specifier: ^3.11.3
version: 3.13.0
next:
specifier: 14.2.25
version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
specifier: 14.2.26
version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
nextjs-cors:
specifier: ^2.2.0
version: 2.2.0(next@14.2.25(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))
version: 2.2.0(next@14.2.26(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))
node-cron:
specifier: ^3.0.3
version: 3.0.3
@ -316,7 +322,7 @@ importers:
version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1)
'@chakra-ui/next-js':
specifier: 2.4.2
version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)
version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)
'@chakra-ui/react':
specifier: 2.10.7
version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@ -379,7 +385,7 @@ importers:
version: 4.17.21
next-i18next:
specifier: 15.4.2
version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
papaparse:
specifier: ^5.4.1
version: 5.4.1
@ -440,7 +446,7 @@ importers:
version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1)
'@chakra-ui/next-js':
specifier: 2.4.2
version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)
version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)
'@chakra-ui/react':
specifier: 2.10.7
version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@ -535,11 +541,11 @@ importers:
specifier: ^5.1.3
version: 5.1.3
next:
specifier: 14.2.25
version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
specifier: 14.2.26
version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
next-i18next:
specifier: 15.4.2
version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
nprogress:
specifier: ^0.2.0
version: 0.2.0
@ -2044,6 +2050,9 @@ packages:
resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==}
deprecated: Use @eslint/object-schema instead
'@ioredis/commands@1.2.0':
resolution: {integrity: sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==}
'@isaacs/cliui@8.0.2':
resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
engines: {node: '>=12'}
@ -2314,6 +2323,36 @@ packages:
'@mongodb-js/saslprep@1.2.0':
resolution: {integrity: sha512-+ywrb0AqkfaYuhHs6LxKWgqbh3I72EpEgESCw37o+9qPx9WTCkgDm2B+eMrwehGtHBWHFU4GXvnSCNiFhhausg==}
'@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3':
resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==}
cpu: [arm64]
os: [darwin]
'@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3':
resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==}
cpu: [x64]
os: [darwin]
'@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3':
resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==}
cpu: [arm64]
os: [linux]
'@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3':
resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==}
cpu: [arm]
os: [linux]
'@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3':
resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==}
cpu: [x64]
os: [linux]
'@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3':
resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==}
cpu: [x64]
os: [win32]
'@napi-rs/wasm-runtime@0.2.7':
resolution: {integrity: sha512-5yximcFK5FNompXfJFoWanu5l8v1hNGqNHh9du1xETp9HWk/B/PzvchX55WYOPaIeNglG8++68AAiauBAtbnzw==}
@ -2421,62 +2460,62 @@ packages:
'@nestjs/platform-express':
optional: true
'@next/env@14.2.25':
resolution: {integrity: sha512-JnzQ2cExDeG7FxJwqAksZ3aqVJrHjFwZQAEJ9gQZSoEhIow7SNoKZzju/AwQ+PLIR4NY8V0rhcVozx/2izDO0w==}
'@next/env@14.2.26':
resolution: {integrity: sha512-vO//GJ/YBco+H7xdQhzJxF7ub3SUwft76jwaeOyVVQFHCi5DCnkP16WHB+JBylo4vOKPoZBlR94Z8xBxNBdNJA==}
'@next/eslint-plugin-next@14.2.24':
resolution: {integrity: sha512-FDL3qs+5DML0AJz56DCVr+KnFYivxeAX73En8QbPw9GjJZ6zbfvqDy+HrarHFzbsIASn7y8y5ySJ/lllSruNVQ==}
'@next/swc-darwin-arm64@14.2.25':
resolution: {integrity: sha512-09clWInF1YRd6le00vt750s3m7SEYNehz9C4PUcSu3bAdCTpjIV4aTYQZ25Ehrr83VR1rZeqtKUPWSI7GfuKZQ==}
'@next/swc-darwin-arm64@14.2.26':
resolution: {integrity: sha512-zDJY8gsKEseGAxG+C2hTMT0w9Nk9N1Sk1qV7vXYz9MEiyRoF5ogQX2+vplyUMIfygnjn9/A04I6yrUTRTuRiyQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
'@next/swc-darwin-x64@14.2.25':
resolution: {integrity: sha512-V+iYM/QR+aYeJl3/FWWU/7Ix4b07ovsQ5IbkwgUK29pTHmq+5UxeDr7/dphvtXEq5pLB/PucfcBNh9KZ8vWbug==}
'@next/swc-darwin-x64@14.2.26':
resolution: {integrity: sha512-U0adH5ryLfmTDkahLwG9sUQG2L0a9rYux8crQeC92rPhi3jGQEY47nByQHrVrt3prZigadwj/2HZ1LUUimuSbg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
'@next/swc-linux-arm64-gnu@14.2.25':
resolution: {integrity: sha512-LFnV2899PJZAIEHQ4IMmZIgL0FBieh5keMnriMY1cK7ompR+JUd24xeTtKkcaw8QmxmEdhoE5Mu9dPSuDBgtTg==}
'@next/swc-linux-arm64-gnu@14.2.26':
resolution: {integrity: sha512-SINMl1I7UhfHGM7SoRiw0AbwnLEMUnJ/3XXVmhyptzriHbWvPPbbm0OEVG24uUKhuS1t0nvN/DBvm5kz6ZIqpg==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
'@next/swc-linux-arm64-musl@14.2.25':
resolution: {integrity: sha512-QC5y5PPTmtqFExcKWKYgUNkHeHE/z3lUsu83di488nyP0ZzQ3Yse2G6TCxz6nNsQwgAx1BehAJTZez+UQxzLfw==}
'@next/swc-linux-arm64-musl@14.2.26':
resolution: {integrity: sha512-s6JaezoyJK2DxrwHWxLWtJKlqKqTdi/zaYigDXUJ/gmx/72CrzdVZfMvUc6VqnZ7YEvRijvYo+0o4Z9DencduA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
'@next/swc-linux-x64-gnu@14.2.25':
resolution: {integrity: sha512-y6/ML4b9eQ2D/56wqatTJN5/JR8/xdObU2Fb1RBidnrr450HLCKr6IJZbPqbv7NXmje61UyxjF5kvSajvjye5w==}
'@next/swc-linux-x64-gnu@14.2.26':
resolution: {integrity: sha512-FEXeUQi8/pLr/XI0hKbe0tgbLmHFRhgXOUiPScz2hk0hSmbGiU8aUqVslj/6C6KA38RzXnWoJXo4FMo6aBxjzg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
'@next/swc-linux-x64-musl@14.2.25':
resolution: {integrity: sha512-sPX0TSXHGUOZFvv96GoBXpB3w4emMqKeMgemrSxI7A6l55VBJp/RKYLwZIB9JxSqYPApqiREaIIap+wWq0RU8w==}
'@next/swc-linux-x64-musl@14.2.26':
resolution: {integrity: sha512-BUsomaO4d2DuXhXhgQCVt2jjX4B4/Thts8nDoIruEJkhE5ifeQFtvW5c9JkdOtYvE5p2G0hcwQ0UbRaQmQwaVg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
'@next/swc-win32-arm64-msvc@14.2.25':
resolution: {integrity: sha512-ReO9S5hkA1DU2cFCsGoOEp7WJkhFzNbU/3VUF6XxNGUCQChyug6hZdYL/istQgfT/GWE6PNIg9cm784OI4ddxQ==}
'@next/swc-win32-arm64-msvc@14.2.26':
resolution: {integrity: sha512-5auwsMVzT7wbB2CZXQxDctpWbdEnEW/e66DyXO1DcgHxIyhP06awu+rHKshZE+lPLIGiwtjo7bsyeuubewwxMw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
'@next/swc-win32-ia32-msvc@14.2.25':
resolution: {integrity: sha512-DZ/gc0o9neuCDyD5IumyTGHVun2dCox5TfPQI/BJTYwpSNYM3CZDI4i6TOdjeq1JMo+Ug4kPSMuZdwsycwFbAw==}
'@next/swc-win32-ia32-msvc@14.2.26':
resolution: {integrity: sha512-GQWg/Vbz9zUGi9X80lOeGsz1rMH/MtFO/XqigDznhhhTfDlDoynCM6982mPCbSlxJ/aveZcKtTlwfAjwhyxDpg==}
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
'@next/swc-win32-x64-msvc@14.2.25':
resolution: {integrity: sha512-KSznmS6eFjQ9RJ1nEc66kJvtGIL1iZMYmGEXsZPh2YtnLtqrgdVvKXJY2ScjjoFnG6nGLyPFR0UiEvDwVah4Tw==}
'@next/swc-win32-x64-msvc@14.2.26':
resolution: {integrity: sha512-2rdB3T1/Gp7bv1eQTTm9d1Y1sv9UuJ2LAwOE0Pe2prHKe32UNscj7YS13fRB37d0GAiGNR+Y7ZcW8YjDI8Ns0w==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
@ -4014,6 +4053,9 @@ packages:
buffer@6.0.3:
resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==}
bullmq@5.44.0:
resolution: {integrity: sha512-OnEtkuXyrUx2Jm5BpH92+ttrobblBdCbkhOe3OoR0hxZuAilI3mPWlwELslhfImRpDv8rK+C/0/VK7I8f3xIig==}
bundle-n-require@1.1.2:
resolution: {integrity: sha512-bEk2jakVK1ytnZ9R2AAiZEeK/GxPUM8jvcRxHZXifZDMcjkI4EG/GlsJ2YGSVYT9y/p/gA9/0yDY8rCGsSU6Tg==}
@ -4248,6 +4290,10 @@ packages:
resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==}
engines: {node: '>=6'}
cluster-key-slot@1.1.2:
resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==}
engines: {node: '>=0.10.0'}
co@4.6.0:
resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==}
engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'}
@ -5860,6 +5906,10 @@ packages:
intersection-observer@0.12.2:
resolution: {integrity: sha512-7m1vEcPCxXYI8HqnL8CKI6siDyD+eIWSwgB3DZA+ZTogxk9I4CDnj4wilt9x/+/QbHI4YG5YZNmC6458/e9Ktg==}
ioredis@5.6.0:
resolution: {integrity: sha512-tBZlIIWbndeWBWCXWZiqtOF/yxf6yZX3tAlTJ7nfo5jhd6dctNxF7QnYlZLZ1a0o0pDoen7CgZqO+zjNaFbJAg==}
engines: {node: '>=12.22.0'}
ip-address@9.0.5:
resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==}
engines: {node: '>= 12'}
@ -6554,9 +6604,15 @@ packages:
lodash.debounce@4.0.8:
resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==}
lodash.defaults@4.2.0:
resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==}
lodash.includes@4.3.0:
resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==}
lodash.isarguments@3.1.0:
resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==}
lodash.isboolean@3.0.3:
resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==}
@ -7128,6 +7184,13 @@ packages:
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
msgpackr-extract@3.0.3:
resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==}
hasBin: true
msgpackr@1.11.2:
resolution: {integrity: sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==}
mssql@11.0.1:
resolution: {integrity: sha512-KlGNsugoT90enKlR8/G36H0kTxPthDhmtNUCwEHvgRza5Cjpjoj+P2X6eMpFUDN7pFrJZsKadL4x990G8RBE1w==}
engines: {node: '>=18'}
@ -7203,8 +7266,8 @@ packages:
react: '>= 17.0.2'
react-i18next: '>= 13.5.0'
next@14.2.25:
resolution: {integrity: sha512-N5M7xMc4wSb4IkPvEV5X2BRRXUmhVHNyaXwEM86+voXthSZz8ZiRyQW4p9mwAoAPIm6OzuVZtn7idgEJeAJN3Q==}
next@14.2.26:
resolution: {integrity: sha512-b81XSLihMwCfwiUVRRja3LphLo4uBBMZEzBBWMaISbKTwOmq3wPknIETy/8000tr7Gq4WmbuFYPS7jOYIf+ZJw==}
engines: {node: '>=18.17.0'}
hasBin: true
peerDependencies:
@ -7260,6 +7323,10 @@ packages:
encoding:
optional: true
node-gyp-build-optional-packages@5.2.2:
resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==}
hasBin: true
node-gyp@10.3.1:
resolution: {integrity: sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==}
engines: {node: ^16.14.0 || >=18.0.0}
@ -8041,6 +8108,14 @@ packages:
react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
redis-errors@1.2.0:
resolution: {integrity: sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==}
engines: {node: '>=4'}
redis-parser@3.0.0:
resolution: {integrity: sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==}
engines: {node: '>=4'}
redux@4.2.1:
resolution: {integrity: sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==}
@ -8490,6 +8565,9 @@ packages:
stackback@0.0.2:
resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
standard-as-callback@2.1.0:
resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==}
state-local@1.0.7:
resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==}
@ -10668,12 +10746,12 @@ snapshots:
'@chakra-ui/system': 2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1)
react: 18.3.1
'@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)':
'@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)':
dependencies:
'@chakra-ui/react': 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@emotion/cache': 11.14.0
'@emotion/react': 11.11.1(@types/react@18.3.1)(react@18.3.1)
next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
react: 18.3.1
'@chakra-ui/object-utils@2.1.0': {}
@ -11160,6 +11238,8 @@ snapshots:
'@humanwhocodes/object-schema@2.0.3': {}
'@ioredis/commands@1.2.0': {}
'@isaacs/cliui@8.0.2':
dependencies:
string-width: 5.1.2
@ -11565,6 +11645,24 @@ snapshots:
dependencies:
sparse-bitfield: 3.0.3
'@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3':
optional: true
'@napi-rs/wasm-runtime@0.2.7':
dependencies:
'@emnapi/core': 1.3.1
@ -11681,37 +11779,37 @@ snapshots:
'@nestjs/core': 10.4.15(@nestjs/common@10.4.15(reflect-metadata@0.2.2)(rxjs@7.8.2))(encoding@0.1.13)(reflect-metadata@0.2.2)(rxjs@7.8.2)
tslib: 2.8.1
'@next/env@14.2.25': {}
'@next/env@14.2.26': {}
'@next/eslint-plugin-next@14.2.24':
dependencies:
glob: 10.3.10
'@next/swc-darwin-arm64@14.2.25':
'@next/swc-darwin-arm64@14.2.26':
optional: true
'@next/swc-darwin-x64@14.2.25':
'@next/swc-darwin-x64@14.2.26':
optional: true
'@next/swc-linux-arm64-gnu@14.2.25':
'@next/swc-linux-arm64-gnu@14.2.26':
optional: true
'@next/swc-linux-arm64-musl@14.2.25':
'@next/swc-linux-arm64-musl@14.2.26':
optional: true
'@next/swc-linux-x64-gnu@14.2.25':
'@next/swc-linux-x64-gnu@14.2.26':
optional: true
'@next/swc-linux-x64-musl@14.2.25':
'@next/swc-linux-x64-musl@14.2.26':
optional: true
'@next/swc-win32-arm64-msvc@14.2.25':
'@next/swc-win32-arm64-msvc@14.2.26':
optional: true
'@next/swc-win32-ia32-msvc@14.2.25':
'@next/swc-win32-ia32-msvc@14.2.26':
optional: true
'@next/swc-win32-x64-msvc@14.2.25':
'@next/swc-win32-x64-msvc@14.2.26':
optional: true
'@node-rs/jieba-android-arm-eabi@2.0.1':
@ -13456,6 +13554,18 @@ snapshots:
base64-js: 1.5.1
ieee754: 1.2.1
bullmq@5.44.0:
dependencies:
cron-parser: 4.9.0
ioredis: 5.6.0
msgpackr: 1.11.2
node-abort-controller: 3.1.1
semver: 7.7.1
tslib: 2.8.1
uuid: 9.0.1
transitivePeerDependencies:
- supports-color
bundle-n-require@1.1.2:
dependencies:
esbuild: 0.25.1
@ -13713,6 +13823,8 @@ snapshots:
clsx@2.1.1: {}
cluster-key-slot@1.1.2: {}
co@4.6.0: {}
collapse-white-space@1.0.6: {}
@ -14627,7 +14739,7 @@ snapshots:
eslint: 8.56.0
eslint-import-resolver-node: 0.3.9
eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0)
eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-jsx-a11y: 6.10.2(eslint@8.56.0)
eslint-plugin-react: 7.37.4(eslint@8.56.0)
eslint-plugin-react-hooks: 5.0.0-canary-7118f5dd7-20230705(eslint@8.56.0)
@ -14657,7 +14769,7 @@ snapshots:
stable-hash: 0.0.5
tinyglobby: 0.2.12
optionalDependencies:
eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0)
eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0)
transitivePeerDependencies:
- supports-color
@ -14672,7 +14784,7 @@ snapshots:
transitivePeerDependencies:
- supports-color
eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0):
eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0):
dependencies:
'@rtsao/scc': 1.1.0
array-includes: 3.1.8
@ -15692,6 +15804,20 @@ snapshots:
intersection-observer@0.12.2: {}
ioredis@5.6.0:
dependencies:
'@ioredis/commands': 1.2.0
cluster-key-slot: 1.1.2
debug: 4.4.0
denque: 2.1.0
lodash.defaults: 4.2.0
lodash.isarguments: 3.1.0
redis-errors: 1.2.0
redis-parser: 3.0.0
standard-as-callback: 2.1.0
transitivePeerDependencies:
- supports-color
ip-address@9.0.5:
dependencies:
jsbn: 1.1.0
@ -16558,8 +16684,12 @@ snapshots:
lodash.debounce@4.0.8: {}
lodash.defaults@4.2.0: {}
lodash.includes@4.3.0: {}
lodash.isarguments@3.1.0: {}
lodash.isboolean@3.0.3: {}
lodash.isinteger@4.0.4: {}
@ -17481,6 +17611,22 @@ snapshots:
ms@2.1.3: {}
msgpackr-extract@3.0.3:
dependencies:
node-gyp-build-optional-packages: 5.2.2
optionalDependencies:
'@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3
'@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3
'@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3
optional: true
msgpackr@1.11.2:
optionalDependencies:
msgpackr-extract: 3.0.3
mssql@11.0.1:
dependencies:
'@tediousjs/connection-string': 0.5.0
@ -17552,7 +17698,7 @@ snapshots:
transitivePeerDependencies:
- supports-color
next-i18next@15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1):
next-i18next@15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1):
dependencies:
'@babel/runtime': 7.26.10
'@types/hoist-non-react-statics': 3.3.6
@ -17560,13 +17706,13 @@ snapshots:
hoist-non-react-statics: 3.3.2
i18next: 23.16.8
i18next-fs-backend: 2.6.0
next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
react: 18.3.1
react-i18next: 14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1):
next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1):
dependencies:
'@next/env': 14.2.25
'@next/env': 14.2.26
'@swc/helpers': 0.5.5
busboy: 1.6.0
caniuse-lite: 1.0.30001704
@ -17576,24 +17722,24 @@ snapshots:
react-dom: 18.3.1(react@18.3.1)
styled-jsx: 5.1.1(@babel/core@7.26.10)(react@18.3.1)
optionalDependencies:
'@next/swc-darwin-arm64': 14.2.25
'@next/swc-darwin-x64': 14.2.25
'@next/swc-linux-arm64-gnu': 14.2.25
'@next/swc-linux-arm64-musl': 14.2.25
'@next/swc-linux-x64-gnu': 14.2.25
'@next/swc-linux-x64-musl': 14.2.25
'@next/swc-win32-arm64-msvc': 14.2.25
'@next/swc-win32-ia32-msvc': 14.2.25
'@next/swc-win32-x64-msvc': 14.2.25
'@next/swc-darwin-arm64': 14.2.26
'@next/swc-darwin-x64': 14.2.26
'@next/swc-linux-arm64-gnu': 14.2.26
'@next/swc-linux-arm64-musl': 14.2.26
'@next/swc-linux-x64-gnu': 14.2.26
'@next/swc-linux-x64-musl': 14.2.26
'@next/swc-win32-arm64-msvc': 14.2.26
'@next/swc-win32-ia32-msvc': 14.2.26
'@next/swc-win32-x64-msvc': 14.2.26
sass: 1.85.1
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
nextjs-cors@2.2.0(next@14.2.25(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)):
nextjs-cors@2.2.0(next@14.2.26(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)):
dependencies:
cors: 2.8.5
next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)
node-abi@3.74.0:
dependencies:
@ -17624,6 +17770,11 @@ snapshots:
optionalDependencies:
encoding: 0.1.13
node-gyp-build-optional-packages@5.2.2:
dependencies:
detect-libc: 2.0.3
optional: true
node-gyp@10.3.1:
dependencies:
env-paths: 2.2.1
@ -18499,6 +18650,12 @@ snapshots:
tiny-invariant: 1.3.3
victory-vendor: 36.9.2
redis-errors@1.2.0: {}
redis-parser@3.0.0:
dependencies:
redis-errors: 1.2.0
redux@4.2.1:
dependencies:
'@babel/runtime': 7.26.10
@ -19048,6 +19205,8 @@ snapshots:
stackback@0.0.2: {}
standard-as-callback@2.1.0: {}
state-local@1.0.7: {}
state-toggle@1.0.3: {}

View File

@ -20,9 +20,12 @@ AIPROXY_API_TOKEN=xxxxx
# 强制将图片转成 base64 传递给模型
MULTIPLE_DATA_TO_BASE64=true
# Redis URL
REDIS_URL=redis://default:password@127.0.0.1:6379
# mongo 数据库连接参数,本地开发连接远程数据库时,可能需要增加 directConnection=true 参数,才能连接上。
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin
# 日志库
MONGODB_LOG_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin
# 向量库优先级: pg > oceanbase > milvus
# PG 向量库连接参数
PG_URL=postgresql://username:password@host:port/postgres
@ -48,6 +51,8 @@ LOG_LEVEL=debug
STORE_LOG_LEVEL=warn
# 安全配置
# 对话文件 n 天过期
CHAT_FILE_EXPIRE_TIME=7
# 启动 IP 限流(true),部分接口增加了 ip 限流策略,防止非正常请求操作。
USE_IP_LIMIT=false
# 工作流最大运行次数,避免极端的死循环情况
@ -65,4 +70,4 @@ CHECK_INTERNAL_IP=false
# # 日志来源ID前缀
# CHAT_LOG_SOURCE_ID_PREFIX=fastgpt-
# 自定义跨域,不配置时,默认都允许跨域(逗号分割)
ALLOWED_ORIGINS=
ALLOWED_ORIGINS=

View File

@ -83,6 +83,7 @@ const nextConfig = {
serverComponentsExternalPackages: [
'mongoose',
'pg',
'bullmq',
'@zilliz/milvus2-sdk-node',
"tiktoken",
],

View File

@ -1,6 +1,6 @@
{
"name": "app",
"version": "4.9.3",
"version": "4.9.4",
"private": false,
"scripts": {
"dev": "next dev",
@ -42,7 +42,7 @@
"lodash": "^4.17.21",
"mermaid": "^10.2.3",
"nanoid": "^5.1.3",
"next": "14.2.25",
"next": "14.2.26",
"next-i18next": "15.4.2",
"nprogress": "^0.2.0",
"qrcode": "^1.5.4",

View File

@ -99,7 +99,6 @@ const SettingLLMModel = ({
<AISettingModal
onClose={onCloseAIChatSetting}
onSuccess={(e) => {
console.log(e);
onChange(e);
onCloseAIChatSetting();
}}

View File

@ -1,7 +1,6 @@
import { useSpeech } from '@/web/common/hooks/useSpeech';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { Box, Flex, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback, useMemo } from 'react';
import React, { useRef, useEffect, useCallback, useMemo, useState } from 'react';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
@ -18,6 +17,7 @@ import FilePreview from '../../components/FilePreview';
import { useFileUpload } from '../hooks/useFileUpload';
import ComplianceTip from '@/components/common/ComplianceTip/index';
import { useToast } from '@fastgpt/web/hooks/useToast';
import VoiceInput, { type VoiceInputComponentRef } from './VoiceInput';
const InputGuideBox = dynamic(() => import('./InputGuideBox'));
@ -44,6 +44,7 @@ const ChatInput = ({
const { t } = useTranslation();
const { toast } = useToast();
const { isPc } = useSystem();
const VoiceInputRef = useRef<VoiceInputComponentRef>(null);
const { setValue, watch, control } = chatForm;
const inputValue = watch('input');
@ -53,7 +54,6 @@ const ChatInput = ({
const chatId = useContextSelector(ChatBoxContext, (v) => v.chatId);
const isChatting = useContextSelector(ChatBoxContext, (v) => v.isChatting);
const whisperConfig = useContextSelector(ChatBoxContext, (v) => v.whisperConfig);
const autoTTSResponse = useContextSelector(ChatBoxContext, (v) => v.autoTTSResponse);
const chatInputGuide = useContextSelector(ChatBoxContext, (v) => v.chatInputGuide);
const fileSelectConfig = useContextSelector(ChatBoxContext, (v) => v.fileSelectConfig);
@ -106,86 +106,6 @@ const ChatInput = ({
[TextareaDom, canSendMessage, fileList, onSendMessage, replaceFiles]
);
/* whisper init */
const canvasRef = useRef<HTMLCanvasElement>(null);
const {
isSpeaking,
isTransCription,
stopSpeak,
startSpeak,
speakingTimeString,
renderAudioGraph,
stream
} = useSpeech({ appId, ...outLinkAuthData });
const onWhisperRecord = useCallback(() => {
const finishWhisperTranscription = (text: string) => {
if (!text) return;
if (whisperConfig?.autoSend) {
onSendMessage({
text,
files: fileList,
autoTTSResponse
});
replaceFiles([]);
} else {
resetInputVal({ text });
}
};
if (isSpeaking) {
return stopSpeak();
}
startSpeak(finishWhisperTranscription);
}, [
autoTTSResponse,
fileList,
isSpeaking,
onSendMessage,
replaceFiles,
resetInputVal,
startSpeak,
stopSpeak,
whisperConfig?.autoSend
]);
useEffect(() => {
if (!stream) {
return;
}
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 4096;
analyser.smoothingTimeConstant = 1;
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
const renderCurve = () => {
if (!canvasRef.current) return;
renderAudioGraph(analyser, canvasRef.current);
window.requestAnimationFrame(renderCurve);
};
renderCurve();
}, [renderAudioGraph, stream]);
const RenderTranslateLoading = useMemo(
() => (
<Flex
position={'absolute'}
top={0}
bottom={0}
left={0}
right={0}
zIndex={10}
pl={5}
alignItems={'center'}
bg={'white'}
color={'primary.500'}
visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'}
>
<Spinner size={'sm'} mr={4} />
{t('common:core.chat.Converting to text')}
</Flex>
),
[isSpeaking, isTransCription, t]
);
const RenderTextarea = useMemo(
() => (
<Flex alignItems={'flex-end'} mt={fileList.length > 0 ? 1 : 0} pl={[2, 4]}>
@ -198,7 +118,6 @@ const ChatInput = ({
cursor={'pointer'}
transform={'translateY(1px)'}
onClick={() => {
if (isSpeaking) return;
onOpenSelectFile();
}}
>
@ -208,7 +127,6 @@ const ChatInput = ({
<File onSelect={(files) => onSelectFile({ files })} />
</Flex>
)}
{/* input area */}
<Textarea
ref={TextareaDom}
@ -220,11 +138,7 @@ const ChatInput = ({
border: 'none'
}}
placeholder={
isSpeaking
? t('common:core.chat.Speaking')
: isPc
? t('common:core.chat.Type a message')
: t('chat:input_placeholder_phone')
isPc ? t('common:core.chat.Type a message') : t('chat:input_placeholder_phone')
}
resize={'none'}
rows={1}
@ -237,9 +151,8 @@ const ChatInput = ({
wordBreak={'break-all'}
boxShadow={'none !important'}
color={'myGray.900'}
isDisabled={isSpeaking}
value={inputValue}
fontSize={['md', 'sm']}
value={inputValue}
onChange={(e) => {
const textarea = e.target;
textarea.style.height = textareaMinH;
@ -290,118 +203,78 @@ const ChatInput = ({
}
}}
/>
<Flex alignItems={'center'} position={'absolute'} right={[2, 4]} bottom={['10px', '12px']}>
{/* voice-input */}
{whisperConfig?.open && !inputValue && !isChatting && (
<>
<canvas
ref={canvasRef}
style={{
height: '30px',
width: isSpeaking && !isTransCription ? '100px' : 0,
background: 'white',
zIndex: 0
<Flex
alignItems={'center'}
position={'absolute'}
right={[2, 4]}
bottom={['10px', '12px']}
zIndex={3}
>
{/* Voice input icon */}
{whisperConfig?.open && !inputValue && (
<MyTooltip label={t('common:core.chat.Record')}>
<Flex
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['28px', '32px']}
w={['28px', '32px']}
mr={2}
borderRadius={'md'}
cursor={'pointer'}
_hover={{ bg: '#F5F5F8' }}
onClick={() => {
VoiceInputRef.current?.onSpeak?.();
}}
/>
{isSpeaking && (
<MyTooltip label={t('common:core.chat.Cancel Speak')}>
<Flex
mr={2}
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['26px', '32px']}
w={['26px', '32px']}
borderRadius={'md'}
cursor={'pointer'}
_hover={{ bg: '#F5F5F8' }}
onClick={() => stopSpeak(true)}
>
<MyIcon
name={'core/chat/cancelSpeak'}
width={['20px', '22px']}
height={['20px', '22px']}
/>
</Flex>
</MyTooltip>
)}
<MyTooltip
label={
isSpeaking ? t('common:core.chat.Finish Speak') : t('common:core.chat.Record')
}
>
<Flex
mr={2}
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['26px', '32px']}
w={['26px', '32px']}
borderRadius={'md'}
cursor={'pointer'}
_hover={{ bg: '#F5F5F8' }}
onClick={onWhisperRecord}
>
<MyIcon
name={isSpeaking ? 'core/chat/finishSpeak' : 'core/chat/recordFill'}
width={['20px', '22px']}
height={['20px', '22px']}
color={isSpeaking ? 'primary.500' : 'myGray.600'}
/>
</Flex>
</MyTooltip>
</>
)}
{/* send and stop icon */}
{isSpeaking ? (
<Box color={'#5A646E'} w={'36px'} textAlign={'right'} whiteSpace={'nowrap'}>
{speakingTimeString}
</Box>
) : (
<Flex
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['28px', '32px']}
w={['28px', '32px']}
borderRadius={'md'}
bg={
isSpeaking || isChatting
? ''
: !havInput || hasFileUploading
? '#E5E5E5'
: 'primary.500'
}
cursor={havInput ? 'pointer' : 'not-allowed'}
lineHeight={1}
onClick={() => {
if (isChatting) {
return onStop();
}
return handleSend();
}}
>
{isChatting ? (
<MyIcon
animation={'zoomStopIcon 0.4s infinite alternate'}
name={'core/chat/recordFill'}
width={['22px', '25px']}
height={['22px', '25px']}
cursor={'pointer'}
name={'stop'}
color={'gray.500'}
color={'myGray.600'}
/>
) : (
<MyTooltip label={t('common:core.chat.Send Message')}>
<MyIcon
name={'core/chat/sendFill'}
width={['18px', '20px']}
height={['18px', '20px']}
color={'white'}
/>
</MyTooltip>
)}
</Flex>
</Flex>
</MyTooltip>
)}
{/* send and stop icon */}
<Flex
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['28px', '32px']}
w={['28px', '32px']}
borderRadius={'md'}
bg={isChatting ? '' : !havInput || hasFileUploading ? '#E5E5E5' : 'primary.500'}
cursor={havInput ? 'pointer' : 'not-allowed'}
lineHeight={1}
onClick={() => {
if (isChatting) {
return onStop();
}
return handleSend();
}}
>
{isChatting ? (
<MyIcon
animation={'zoomStopIcon 0.4s infinite alternate'}
width={['22px', '25px']}
height={['22px', '25px']}
cursor={'pointer'}
name={'stop'}
color={'gray.500'}
/>
) : (
<MyTooltip label={t('common:core.chat.Send Message')}>
<MyIcon
name={'core/chat/sendFill'}
width={['18px', '20px']}
height={['18px', '20px']}
color={'white'}
/>
</MyTooltip>
)}
</Flex>
</Flex>
</Flex>
),
@ -415,21 +288,15 @@ const ChatInput = ({
inputValue,
isChatting,
isPc,
isSpeaking,
isTransCription,
onOpenSelectFile,
onSelectFile,
onStop,
onWhisperRecord,
selectFileIcon,
selectFileLabel,
setValue,
showSelectFile,
showSelectImg,
speakingTimeString,
stopSpeak,
t,
whisperConfig?.open
t
]
);
@ -468,7 +335,7 @@ const ChatInput = ({
pt={fileList.length > 0 ? '0' : ['14px', '18px']}
pb={['14px', '18px']}
position={'relative'}
boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`}
boxShadow={`0 0 10px rgba(0,0,0,0.2)`}
borderRadius={['none', 'md']}
bg={'white'}
overflow={'display'}
@ -495,15 +362,20 @@ const ChatInput = ({
}}
/>
)}
{/* translate loading */}
{RenderTranslateLoading}
{/* file preview */}
<Box px={[1, 3]}>
<FilePreview fileList={fileList} removeFiles={removeFiles} />
</Box>
{/* voice input and loading container */}
{!inputValue && (
<VoiceInput
ref={VoiceInputRef}
onSendMessage={onSendMessage}
resetInputVal={resetInputVal}
/>
)}
{RenderTextarea}
</Box>
<ComplianceTip type={'chat'} />

View File

@ -0,0 +1,369 @@
import { useSpeech } from '@/web/common/hooks/useSpeech';
import { Box, Flex, HStack, Spinner } from '@chakra-ui/react';
import React, {
useRef,
useEffect,
useCallback,
useState,
forwardRef,
useImperativeHandle,
useMemo
} from 'react';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../Provider';
import MyIconButton from '@/pageComponents/account/team/OrgManage/IconButton';
export interface VoiceInputComponentRef {
onSpeak: () => void;
}
type VoiceInputProps = {
onSendMessage: (params: { text: string; files?: any[]; autoTTSResponse?: boolean }) => void;
resetInputVal: (val: { text: string }) => void;
};
// PC voice input
const PCVoiceInput = ({
speakingTimeString,
stopSpeak,
canvasRef
}: {
speakingTimeString: string;
stopSpeak: (param: boolean) => void;
canvasRef: React.RefObject<HTMLCanvasElement>;
}) => {
const { t } = useTranslation();
return (
<HStack h={'100%'} px={4}>
<Box fontSize="sm" color="myGray.500" flex={'1 0 0'}>
{t('common:core.chat.Speaking')}
</Box>
<canvas
ref={canvasRef}
style={{
height: '10px',
width: '100px',
background: 'white'
}}
/>
<Box fontSize="sm" color="myGray.500" whiteSpace={'nowrap'}>
{speakingTimeString}
</Box>
<MyTooltip label={t('common:core.chat.Cancel Speak')}>
<MyIconButton
name={'core/chat/cancelSpeak'}
h={'22px'}
w={'22px'}
onClick={() => stopSpeak(true)}
/>
</MyTooltip>
<MyTooltip label={t('common:core.chat.Finish Speak')}>
<MyIconButton
name={'core/chat/finishSpeak'}
h={'22px'}
w={'22px'}
onClick={() => stopSpeak(false)}
/>
</MyTooltip>
</HStack>
);
};
// mobile voice input
const MobileVoiceInput = ({
isSpeaking,
onStartSpeak,
onCloseSpeak,
stopSpeak,
canvasRef
}: {
isSpeaking: boolean;
onStartSpeak: () => void;
onCloseSpeak: () => any;
stopSpeak: (param: boolean) => void;
canvasRef: React.RefObject<HTMLCanvasElement>;
}) => {
const { t } = useTranslation();
const isPressing = useRef(false);
const startTimeRef = useRef(0); // 防抖
const startYRef = useRef(0);
const [isCancel, setIsCancel] = useState(false);
const canvasPosition = canvasRef.current?.getBoundingClientRect();
const maskBottom = canvasPosition ? `${window.innerHeight - canvasPosition.top}px` : '50px';
const handleTouchStart = useCallback(
(e: React.TouchEvent<HTMLDivElement>) => {
isPressing.current = true;
setIsCancel(false);
startTimeRef.current = Date.now();
const touch = e.touches[0];
startYRef.current = touch.pageY;
onStartSpeak();
},
[onStartSpeak]
);
const handleTouchMove = useCallback(
(e: React.TouchEvent<HTMLDivElement>) => {
const touch = e.touches[0] as Touch;
const currentY = touch.pageY;
const deltaY = startYRef.current - currentY;
if (deltaY > 90) {
setIsCancel(true);
} else if (deltaY <= 90) {
setIsCancel(false);
}
},
[startYRef]
);
const handleTouchEnd = useCallback(
(e: React.TouchEvent<HTMLDivElement>) => {
if (!isPressing.current) return;
const endTime = Date.now();
const timeDifference = endTime - startTimeRef.current;
if (isCancel || timeDifference < 200) {
stopSpeak(true);
} else {
stopSpeak(false);
}
},
[isCancel, stopSpeak]
);
return (
<Flex position="relative" h="100%">
{/* Back Icon */}
{!isSpeaking && (
<MyTooltip label={t('chat:back_to_text')}>
<MyIconButton
position="absolute"
right={2}
top={'50%'}
transform={'translateY(-50%)'}
zIndex={5}
name={'core/chat/backText'}
h={'22px'}
w={'22px'}
onClick={onCloseSpeak}
/>
</MyTooltip>
)}
<Flex
alignItems={'center'}
justifyContent={'center'}
h="100%"
flex="1 0 0"
bg={isSpeaking ? (isCancel ? 'red.500' : 'primary.500') : 'white'}
onTouchMove={handleTouchMove}
onTouchEnd={handleTouchEnd}
onTouchStart={handleTouchStart}
onTouchCancel={() => {
stopSpeak(true);
}}
zIndex={4}
>
<Box visibility={isSpeaking ? 'hidden' : 'visible'}>{t('chat:press_to_speak')}</Box>
<Box
position="absolute"
h={'100%'}
w={'100%'}
as="canvas"
ref={canvasRef}
flex="0 0 80%"
visibility={isSpeaking ? 'visible' : 'hidden'}
/>
</Flex>
{/* Mask */}
{isSpeaking && (
<Flex
justifyContent="center"
alignItems="center"
height="100%"
position="fixed"
left={0}
right={0}
bottom={maskBottom}
h={'200px'}
bg="linear-gradient(to top, white, rgba(255, 255, 255, 0.7), rgba(255, 255, 255, 0))"
>
<Box fontSize="sm" color="myGray.500" position="absolute" bottom={'10px'}>
{isCancel ? t('chat:release_cancel') : t('chat:release_send')}
</Box>
</Flex>
)}
</Flex>
);
};
const VoiceInput = forwardRef<VoiceInputComponentRef, VoiceInputProps>(
({ onSendMessage, resetInputVal }, ref) => {
const { t } = useTranslation();
const { isPc } = useSystem();
const outLinkAuthData = useContextSelector(ChatBoxContext, (v) => v.outLinkAuthData);
const appId = useContextSelector(ChatBoxContext, (v) => v.appId);
const whisperConfig = useContextSelector(ChatBoxContext, (v) => v.whisperConfig);
const autoTTSResponse = useContextSelector(ChatBoxContext, (v) => v.autoTTSResponse);
const canvasRef = useRef<HTMLCanvasElement>(null);
const {
isSpeaking,
isTransCription,
stopSpeak,
startSpeak,
speakingTimeString,
renderAudioGraphPc,
renderAudioGraphMobile,
stream
} = useSpeech({ appId, ...outLinkAuthData });
const [mobilePreSpeak, setMobilePreSpeak] = useState(false);
// Canvas render
useEffect(() => {
if (!stream) {
return;
}
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 4096;
analyser.smoothingTimeConstant = 1;
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
let animationFrameId: number | null = null;
const renderCurve = () => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d');
if (!ctx) return;
if (!stream.active) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (animationFrameId) {
window.cancelAnimationFrame(animationFrameId);
animationFrameId = null;
}
return;
}
if (isPc) {
renderAudioGraphPc(analyser, canvas);
} else {
renderAudioGraphMobile(analyser, canvas);
}
animationFrameId = window.requestAnimationFrame(renderCurve);
};
renderCurve();
return () => {
if (animationFrameId) {
window.cancelAnimationFrame(animationFrameId);
}
audioContext.close();
source.disconnect();
analyser.disconnect();
};
}, [stream, canvasRef, renderAudioGraphPc, renderAudioGraphMobile, isPc]);
const onStartSpeak = useCallback(() => {
const finishWhisperTranscription = (text: string) => {
if (!text) return;
if (whisperConfig?.autoSend) {
onSendMessage({
text,
autoTTSResponse
});
} else {
resetInputVal({ text });
}
};
startSpeak(finishWhisperTranscription);
}, [autoTTSResponse, onSendMessage, resetInputVal, startSpeak, whisperConfig?.autoSend]);
const onSpeach = useCallback(() => {
if (isPc) {
onStartSpeak();
} else {
setMobilePreSpeak(true);
}
}, [isPc, onStartSpeak]);
useImperativeHandle(ref, () => ({
onSpeak: onSpeach
}));
if (!whisperConfig?.open) return null;
if (!mobilePreSpeak && !isSpeaking && !isTransCription) return null;
return (
<Box
position="absolute"
overflow={'hidden'}
userSelect={'none'}
top={0}
left={0}
right={0}
bottom={0}
bg="white"
zIndex={5}
borderRadius={isPc ? 'md' : ''}
onContextMenu={(e) => e.preventDefault()}
>
{isPc ? (
<PCVoiceInput
speakingTimeString={speakingTimeString}
stopSpeak={stopSpeak}
canvasRef={canvasRef}
/>
) : (
<MobileVoiceInput
isSpeaking={isSpeaking}
onStartSpeak={onStartSpeak}
onCloseSpeak={() => setMobilePreSpeak(false)}
stopSpeak={stopSpeak}
canvasRef={canvasRef}
/>
)}
{isTransCription && (
<Flex
position={'absolute'}
top={0}
bottom={0}
left={0}
right={0}
pl={5}
alignItems={'center'}
bg={'white'}
color={'primary.500'}
zIndex={6}
>
<Spinner size={'sm'} mr={4} />
{t('common:core.chat.Converting to text')}
</Flex>
)}
</Box>
);
}
);
VoiceInput.displayName = 'VoiceInput';
export default VoiceInput;

View File

@ -219,7 +219,8 @@ const ChatBox = ({
tool,
interactive,
autoTTSResponse,
variables
variables,
nodeResponse
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
setChatRecords((state) =>
state.map((item, index) => {
@ -232,7 +233,14 @@ const ChatBox = ({
JSON.stringify(item.value[item.value.length - 1])
);
if (event === SseResponseEventEnum.flowNodeStatus && status) {
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
};
} else if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
@ -518,36 +526,34 @@ const ChatBox = ({
reserveTool: true
});
const {
responseData,
responseText,
isNewChat = false
} = await onStartChat({
const { responseText } = await onStartChat({
messages, // 保证最后一条是 Human 的消息
responseChatItemId: responseChatId,
controller: abortSignal,
generatingMessage: (e) => generatingMessage({ ...e, autoTTSResponse }),
variables: requestVariables
});
if (responseData?.[responseData.length - 1]?.error) {
toast({
title: t(responseData[responseData.length - 1].error?.message),
status: 'error'
});
}
// Set last chat finish status
let newChatHistories: ChatSiteItemType[] = [];
setChatRecords((state) => {
newChatHistories = state.map((item, index) => {
if (index !== state.length - 1) return item;
// Check node response error
const responseData = mergeChatResponseData(item.responseData || []);
if (responseData[responseData.length - 1]?.error) {
toast({
title: t(responseData[responseData.length - 1].error?.message),
status: 'error'
});
}
return {
...item,
status: ChatStatusEnum.finish,
time: new Date(),
responseData: item.responseData
? mergeChatResponseData([...item.responseData, ...responseData])
: responseData
responseData
};
});
return newChatHistories;
@ -567,7 +573,7 @@ const ChatBox = ({
} catch (err: any) {
console.log(err);
toast({
title: t(getErrText(err, 'core.chat.error.Chat error') as any),
title: t(getErrText(err, t('common:core.chat.error.Chat error') as any)),
status: 'error',
duration: 5000,
isClosable: true
@ -807,12 +813,14 @@ const ChatBox = ({
showEmptyIntro &&
chatRecords.length === 0 &&
!variableList?.length &&
!externalVariableList?.length &&
!welcomeText,
[
chatRecords.length,
feConfigs?.show_emptyChat,
showEmptyIntro,
variableList?.length,
externalVariableList?.length,
welcomeText
]
);

View File

@ -18,6 +18,7 @@ import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext';
import { AppFileSelectConfigType } from '@fastgpt/global/core/app/type';
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils';
type PluginRunContextType = PluginRunBoxProps & {
isChatting: boolean;
@ -46,11 +47,12 @@ const PluginRunContextProvider = ({
const pluginInputs = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.pluginInputs);
const setTab = useContextSelector(ChatItemContext, (v) => v.setPluginRunTab);
const variablesForm = useContextSelector(ChatItemContext, (v) => v.variablesForm);
const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig);
const setChatRecords = useContextSelector(ChatRecordContext, (v) => v.setChatRecords);
const chatRecords = useContextSelector(ChatRecordContext, (v) => v.chatRecords);
const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig);
const { instruction = '', fileSelectConfig = defaultAppSelectFileConfig } = useMemo(
() => chatConfig || {},
[chatConfig]
@ -65,7 +67,7 @@ const PluginRunContextProvider = ({
}, []);
const generatingMessage = useCallback(
({ event, text = '', status, name, tool }: generatingMessageProps) => {
({ event, text = '', status, name, tool, nodeResponse, variables }: generatingMessageProps) => {
setChatRecords((state) =>
state.map((item, index) => {
if (index !== state.length - 1 || item.obj !== ChatRoleEnum.AI) return item;
@ -74,7 +76,14 @@ const PluginRunContextProvider = ({
JSON.stringify(item.value[item.value.length - 1])
);
if (event === SseResponseEventEnum.flowNodeStatus && status) {
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
};
} else if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
@ -144,13 +153,15 @@ const PluginRunContextProvider = ({
return val;
})
};
} else if (event === SseResponseEventEnum.updateVariables && variables) {
variablesForm.setValue('variables', variables);
}
return item;
})
);
},
[setChatRecords]
[setChatRecords, variablesForm]
);
const isChatting = useMemo(
@ -226,7 +237,7 @@ const PluginRunContextProvider = ({
}
}
const { responseData } = await onStartChat({
await onStartChat({
messages,
controller: chatController.current,
generatingMessage,
@ -235,16 +246,20 @@ const PluginRunContextProvider = ({
...formatVariables
}
});
if (responseData?.[responseData.length - 1]?.error) {
toast({
title: responseData[responseData.length - 1].error?.message,
status: 'error'
});
}
setChatRecords((state) =>
state.map((item, index) => {
if (index !== state.length - 1) return item;
// Check node response error
const responseData = mergeChatResponseData(item.responseData || []);
if (responseData[responseData.length - 1]?.error) {
toast({
title: t(responseData[responseData.length - 1].error?.message),
status: 'error'
});
}
return {
...item,
status: 'finish',

View File

@ -1,6 +1,10 @@
import { StreamResponseType } from '@/web/common/api/fetch';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ChatSiteItemType, ToolModuleResponseItemType } from '@fastgpt/global/core/chat/type';
import {
ChatHistoryItemResType,
ChatSiteItemType,
ToolModuleResponseItemType
} from '@fastgpt/global/core/chat/type';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
export type generatingMessageProps = {
@ -12,6 +16,7 @@ export type generatingMessageProps = {
tool?: ToolModuleResponseItemType;
interactive?: WorkflowInteractiveResponseType;
variables?: Record<string, any>;
nodeResponse?: ChatHistoryItemResType;
};
export type StartChatFnProps = {

View File

@ -17,6 +17,7 @@ import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { completionFinishReasonMap } from '@fastgpt/global/core/ai/constants';
type sideTabItemType = {
moduleLogo?: string;
@ -196,6 +197,13 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.module maxToken')}
value={activeModule?.maxToken}
/>
{activeModule?.finishReason && (
<Row
label={t('chat:completion_finish_reason')}
value={t(completionFinishReasonMap[activeModule?.finishReason])}
/>
)}
<Row label={t('chat:reasoning_text')} value={activeModule?.reasoningText} />
<Row
label={t('common:core.chat.response.module historyPreview')}

View File

@ -29,6 +29,7 @@ export type DatasetCollectionsListItemType = {
dataAmount: number;
trainingAmount: number;
hasError?: boolean;
};
/* ================= data ===================== */

View File

@ -1,6 +1,6 @@
import { exit } from 'process';
/*
/*
Init system
*/
export async function register() {
@ -9,6 +9,7 @@ export async function register() {
// 基础系统初始化
const [
{ connectMongo },
{ connectionMongo, connectionLogMongo, MONGO_URL, MONGO_LOG_URL },
{ systemStartCb },
{ initGlobalVariables, getInitConfig, initSystemPluginGroups, initAppTemplateTypes },
{ initVectorStore },
@ -19,6 +20,7 @@ export async function register() {
{ startTrainingQueue }
] = await Promise.all([
import('@fastgpt/service/common/mongo/init'),
import('@fastgpt/service/common/mongo/index'),
import('@fastgpt/service/common/system/tools'),
import('@/service/common/system'),
import('@fastgpt/service/common/vectorStore/controller'),
@ -34,7 +36,8 @@ export async function register() {
initGlobalVariables();
// Connect to MongoDB
await connectMongo();
await connectMongo(connectionMongo, MONGO_URL);
connectMongo(connectionLogMongo, MONGO_LOG_URL);
//init system configinit vector databaseinit root user
await Promise.all([getInitConfig(), initVectorStore(), initRootUser()]);

View File

@ -297,7 +297,7 @@ const InputTypeConfig = ({
<FormLabel flex={'0 0 132px'} fontWeight={'medium'}>
{t('common:core.module.Default Value')}
</FormLabel>
<Flex alignItems={'center'} flex={1} h={10}>
<Flex flex={1} h={10}>
{(inputType === FlowNodeInputTypeEnum.numberInput ||
(inputType === VariableInputEnum.custom &&
valueType === WorkflowIOValueTypeEnum.number)) && (

View File

@ -48,7 +48,7 @@ export const useChatTest = ({
const histories = messages.slice(-1);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({
const { responseText } = await streamFetch({
url: '/api/core/chat/chatTest',
data: {
// Send histories and user messages
@ -66,7 +66,7 @@ export const useChatTest = ({
abortCtrl: controller
});
return { responseText, responseData };
return { responseText };
}
);

View File

@ -84,7 +84,6 @@ const ChatHistorySlider = ({ confirmClearText }: { confirmClearText: string }) =
return (
<MyBox
isLoading={isLoading}
display={'flex'}
flexDirection={'column'}
w={'100%'}

View File

@ -1,19 +1,18 @@
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { Dispatch, ReactNode, SetStateAction, useEffect, useState } from 'react';
import { Dispatch, ReactNode, SetStateAction, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { createContext, useContextSelector } from 'use-context-selector';
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { useRequest, useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useDisclosure } from '@chakra-ui/react';
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
import { postCreateTrainingUsage } from '@/web/support/wallet/usage/api';
import { getDatasetCollections, postWebsiteSync } from '@/web/core/dataset/api';
import dynamic from 'next/dynamic';
import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { DatasetCollectionsListItemType } from '@/global/core/dataset/type';
import { useRouter } from 'next/router';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { WebsiteConfigFormType } from './WebsiteConfig';
const WebSiteConfigModal = dynamic(() => import('./WebsiteConfig'));
@ -66,7 +65,7 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) =>
const router = useRouter();
const { parentId = '' } = router.query as { parentId: string };
const { datasetDetail, datasetId, updateDataset } = useContextSelector(
const { datasetDetail, datasetId, updateDataset, loadDatasetDetail } = useContextSelector(
DatasetPageContext,
(v) => v
);
@ -75,30 +74,32 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) =>
const { openConfirm: openWebSyncConfirm, ConfirmModal: ConfirmWebSyncModal } = useConfirm({
content: t('dataset:start_sync_website_tip')
});
const syncWebsite = async () => {
await checkTeamWebSyncLimit();
postWebsiteSync({ datasetId: datasetId }).then(() => {
loadDatasetDetail(datasetId);
});
};
const {
isOpen: isOpenWebsiteModal,
onOpen: onOpenWebsiteModal,
onClose: onCloseWebsiteModal
} = useDisclosure();
const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({
mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => {
onCloseWebsiteModal();
await checkTeamWebSyncLimit();
const { runAsync: onUpdateDatasetWebsiteConfig } = useRequest2(
async (websiteConfig: WebsiteConfigFormType) => {
await updateDataset({
id: datasetId,
websiteConfig,
status: DatasetStatusEnum.syncing
websiteConfig: websiteConfig.websiteConfig,
chunkSettings: websiteConfig.chunkSettings
});
const billId = await postCreateTrainingUsage({
name: t('common:core.dataset.training.Website Sync'),
datasetId: datasetId
});
await postWebsiteSync({ datasetId: datasetId, billId });
return;
await syncWebsite();
},
errorToast: t('common:common.Update Failed')
});
{
onSuccess() {
onCloseWebsiteModal();
}
}
);
// collection list
const [searchText, setSearchText] = useState('');
@ -124,7 +125,7 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) =>
});
const contextValue: CollectionPageContextType = {
openWebSyncConfirm: openWebSyncConfirm(onUpdateDatasetWebsiteConfig),
openWebSyncConfirm: openWebSyncConfirm(syncWebsite),
onOpenWebsiteModal,
searchText,
@ -149,10 +150,6 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) =>
<WebSiteConfigModal
onClose={onCloseWebsiteModal}
onSuccess={onUpdateDatasetWebsiteConfig}
defaultValue={{
url: datasetDetail?.websiteConfig?.url,
selector: datasetDetail?.websiteConfig?.selector
}}
/>
)}
<ConfirmWebSyncModal />

View File

@ -25,6 +25,9 @@ const EmptyCollectionTip = () => {
{datasetDetail.status === DatasetStatusEnum.syncing && (
<>{t('common:core.dataset.status.syncing')}</>
)}
{datasetDetail.status === DatasetStatusEnum.waiting && (
<>{t('common:core.dataset.status.waiting')}</>
)}
{datasetDetail.status === DatasetStatusEnum.active && (
<>
{!datasetDetail?.websiteConfig?.url ? (

View File

@ -1,35 +1,23 @@
import React from 'react';
import {
Box,
Flex,
MenuButton,
Button,
Link,
useTheme,
useDisclosure,
HStack
} from '@chakra-ui/react';
import { Box, Flex, MenuButton, Button, Link, useDisclosure, HStack } from '@chakra-ui/react';
import {
getDatasetCollectionPathById,
postDatasetCollection,
putDatasetCollectionById
} from '@/web/core/dataset/api';
import { useQuery } from '@tanstack/react-query';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import { useRequest, useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useRouter } from 'next/router';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum,
DatasetTypeEnum,
DatasetTypeMap,
DatasetStatusEnum,
DatasetCollectionDataProcessModeEnum
DatasetStatusEnum
} from '@fastgpt/global/core/dataset/constants';
import EditFolderModal, { useEditFolder } from '../../EditFolderModal';
import { TabEnum } from '../../../../pages/dataset/detail/index';
@ -43,26 +31,36 @@ import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContex
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import HeaderTagPopOver from './HeaderTagPopOver';
import MyBox from '@fastgpt/web/components/common/MyBox';
import Icon from '@fastgpt/web/components/common/Icon';
import MyTag from '@fastgpt/web/components/common/Tag/index';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
const FileSourceSelector = dynamic(() => import('../Import/components/FileSourceSelector'));
const Header = ({}: {}) => {
const Header = ({ hasTrainingData }: { hasTrainingData: boolean }) => {
const { t } = useTranslation();
const theme = useTheme();
const { feConfigs } = useSystemStore();
const { isPc } = useSystem();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const router = useRouter();
const { parentId = '' } = router.query as { parentId: string };
const { isPc } = useSystem();
const { searchText, setSearchText, total, getData, pageNum, onOpenWebsiteModal } =
useContextSelector(CollectionPageContext, (v) => v);
const {
searchText,
setSearchText,
total,
getData,
pageNum,
onOpenWebsiteModal,
openWebSyncConfirm
} = useContextSelector(CollectionPageContext, (v) => v);
const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () =>
getDatasetCollectionPathById(parentId)
);
const { data: paths = [] } = useRequest2(() => getDatasetCollectionPathById(parentId), {
refreshDeps: [parentId],
manual: false
});
const { editFolderData, setEditFolderData } = useEditFolder();
const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } =
@ -72,13 +70,14 @@ const Header = ({}: {}) => {
canEmpty: false
});
// Import collection
const {
isOpen: isOpenFileSourceSelector,
onOpen: onOpenFileSourceSelector,
onClose: onCloseFileSourceSelector
} = useDisclosure();
const { runAsync: onCreateCollection, loading: onCreating } = useRequest2(
const { runAsync: onCreateCollection } = useRequest2(
async ({ name, type }: { name: string; type: DatasetCollectionTypeEnum }) => {
const id = await postDatasetCollection({
parentId,
@ -100,7 +99,7 @@ const Header = ({}: {}) => {
const isWebSite = datasetDetail?.type === DatasetTypeEnum.websiteDataset;
return (
<MyBox isLoading={onCreating} display={['block', 'flex']} alignItems={'center'} gap={2}>
<MyBox display={['block', 'flex']} alignItems={'center'} gap={2}>
<HStack flex={1}>
<Box flex={1} fontWeight={'500'} color={'myGray.900'} whiteSpace={'nowrap'}>
<ParentPath
@ -121,13 +120,15 @@ const Header = ({}: {}) => {
{!isWebSite && <MyIcon name="common/list" mr={2} w={'20px'} color={'black'} />}
{t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel as any)}({total})
</Flex>
{/* Website sync */}
{datasetDetail?.websiteConfig?.url && (
<Flex fontSize={'mini'}>
{t('common:core.dataset.website.Base Url')}:
<Box>{t('common:core.dataset.website.Base Url')}:</Box>
<Link
className="textEllipsis"
maxW={'300px'}
href={datasetDetail.websiteConfig.url}
target="_blank"
mr={2}
color={'blue.700'}
>
{datasetDetail.websiteConfig.url}
@ -171,12 +172,14 @@ const Header = ({}: {}) => {
)}
{/* Tag */}
{datasetDetail.permission.hasWritePer && feConfigs?.isPlus && <HeaderTagPopOver />}
{datasetDetail.type !== DatasetTypeEnum.websiteDataset &&
datasetDetail.permission.hasWritePer &&
feConfigs?.isPlus && <HeaderTagPopOver />}
</HStack>
{/* diff collection button */}
{datasetDetail.permission.hasWritePer && (
<Box textAlign={'end'} mt={[3, 0]}>
<Box mt={[3, 0]}>
{datasetDetail?.type === DatasetTypeEnum.dataset && (
<MyMenu
offset={[0, 5]}
@ -233,9 +236,8 @@ const Header = ({}: {}) => {
onClick: () => {
onOpenCreateVirtualFileModal({
defaultVal: '',
onSuccess: (name) => {
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual });
}
onSuccess: (name) =>
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual })
});
}
},
@ -272,35 +274,70 @@ const Header = ({}: {}) => {
{datasetDetail?.type === DatasetTypeEnum.websiteDataset && (
<>
{datasetDetail?.websiteConfig?.url ? (
<Flex alignItems={'center'}>
<>
{datasetDetail.status === DatasetStatusEnum.active && (
<Button onClick={onOpenWebsiteModal}>{t('common:common.Config')}</Button>
<HStack gap={2}>
<Button
onClick={onOpenWebsiteModal}
leftIcon={<Icon name="change" w={'1rem'} />}
>
{t('dataset:params_config')}
</Button>
{!hasTrainingData && (
<Button
variant={'whitePrimary'}
onClick={openWebSyncConfirm}
leftIcon={<Icon name="common/confirm/restoreTip" w={'1rem'} />}
>
{t('dataset:immediate_sync')}
</Button>
)}
</HStack>
)}
{datasetDetail.status === DatasetStatusEnum.syncing && (
<Flex
ml={3}
alignItems={'center'}
<MyTag
colorSchema="purple"
showDot
px={3}
py={1}
borderRadius="md"
border={theme.borders.base}
h={'36px'}
DotStyles={{
w: '8px',
h: '8px',
animation: 'zoomStopIcon 0.5s infinite alternate'
}}
>
<Box
animation={'zoomStopIcon 0.5s infinite alternate'}
bg={'myGray.700'}
w="8px"
h="8px"
borderRadius={'50%'}
mt={'1px'}
></Box>
<Box ml={2} color={'myGray.600'}>
{t('common:core.dataset.status.syncing')}
</Box>
</Flex>
{t('common:core.dataset.status.syncing')}
</MyTag>
)}
</Flex>
{datasetDetail.status === DatasetStatusEnum.waiting && (
<MyTag
colorSchema="gray"
showDot
px={3}
h={'36px'}
DotStyles={{
w: '8px',
h: '8px',
animation: 'zoomStopIcon 0.5s infinite alternate'
}}
>
{t('common:core.dataset.status.waiting')}
</MyTag>
)}
{datasetDetail.status === DatasetStatusEnum.error && (
<MyTag colorSchema="red" showDot px={3} h={'36px'}>
<HStack spacing={1}>
<Box>{t('dataset:status_error')}</Box>
<QuestionTip color={'red.500'} label={datasetDetail.errorMsg} />
</HStack>
</MyTag>
)}
</>
) : (
<Button onClick={onOpenWebsiteModal}>
<Button
onClick={onOpenWebsiteModal}
leftIcon={<Icon name="common/setting" w={'18px'} />}
>
{t('common:core.dataset.Set Website Config')}
</Button>
)}

View File

@ -0,0 +1,502 @@
import {
Box,
Button,
Flex,
ModalBody,
Table,
TableContainer,
Tbody,
Td,
Th,
Thead,
Tr
} from '@chakra-ui/react';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import MyTag from '@fastgpt/web/components/common/Tag/index';
import FillRowTabs from '@fastgpt/web/components/common/Tabs/FillRowTabs';
import { useMemo, useState } from 'react';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import {
deleteTrainingData,
getDatasetCollectionTrainingDetail,
getTrainingDataDetail,
getTrainingError,
updateTrainingData
} from '@/web/core/dataset/api';
import { DatasetCollectionDataProcessModeEnum } from '@fastgpt/global/core/dataset/constants';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { getTrainingDataDetailResponse } from '@/pages/api/core/dataset/training/getTrainingDataDetail';
import MyTextarea from '@/components/common/Textarea/MyTextarea';
import { TrainingProcess } from '@/web/core/dataset/constants';
import { useForm } from 'react-hook-form';
import type { getTrainingDetailResponse } from '@/pages/api/core/dataset/collection/trainingDetail';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
enum TrainingStatus {
NotStart = 'NotStart',
Queued = 'Queued', // wait count>0
Running = 'Running', // wait count=0; training count>0.
Ready = 'Ready',
Error = 'Error'
}
const ProgressView = ({ trainingDetail }: { trainingDetail: getTrainingDetailResponse }) => {
const { t } = useTranslation();
const isQA = trainingDetail?.trainingType === DatasetCollectionDataProcessModeEnum.qa;
/*
1.
2.
3.
*/
const statesArray = useMemo(() => {
const isReady =
Object.values(trainingDetail.queuedCounts).every((count) => count === 0) &&
Object.values(trainingDetail.trainingCounts).every((count) => count === 0) &&
Object.values(trainingDetail.errorCounts).every((count) => count === 0);
const getTrainingStatus = ({ errorCount }: { errorCount: number }) => {
if (isReady) return TrainingStatus.Ready;
if (errorCount > 0) {
return TrainingStatus.Error;
}
return TrainingStatus.Running;
};
// 只显示排队和处理中的数量
const getStatusText = (mode: TrainingModeEnum) => {
if (isReady) return;
if (trainingDetail.queuedCounts[mode] > 0) {
return t('dataset:dataset.Training_Waiting', {
count: trainingDetail.queuedCounts[mode]
});
}
if (trainingDetail.trainingCounts[mode] > 0) {
return t('dataset:dataset.Training_Count', {
count: trainingDetail.trainingCounts[mode]
});
}
return;
};
const states: {
label: string;
statusText?: string;
status: TrainingStatus;
errorCount: number;
}[] = [
// {
// label: TrainingProcess.waiting.label,
// status: TrainingStatus.Queued,
// statusText: t('dataset:dataset.Completed')
// },
{
label: t(TrainingProcess.parsing.label),
status: TrainingStatus.Ready,
errorCount: 0
},
...(isQA
? [
{
errorCount: trainingDetail.errorCounts.qa,
label: t(TrainingProcess.getQA.label),
statusText: getStatusText(TrainingModeEnum.qa),
status: getTrainingStatus({
errorCount: trainingDetail.errorCounts.qa
})
}
]
: []),
...(trainingDetail?.advancedTraining.imageIndex && !isQA
? [
{
errorCount: trainingDetail.errorCounts.image,
label: t(TrainingProcess.imageIndex.label),
statusText: getStatusText(TrainingModeEnum.image),
status: getTrainingStatus({
errorCount: trainingDetail.errorCounts.image
})
}
]
: []),
...(trainingDetail?.advancedTraining.autoIndexes && !isQA
? [
{
errorCount: trainingDetail.errorCounts.auto,
label: t(TrainingProcess.autoIndex.label),
statusText: getStatusText(TrainingModeEnum.auto),
status: getTrainingStatus({
errorCount: trainingDetail.errorCounts.auto
})
}
]
: []),
{
errorCount: trainingDetail.errorCounts.chunk,
label: t(TrainingProcess.vectorizing.label),
statusText: getStatusText(TrainingModeEnum.chunk),
status: getTrainingStatus({
errorCount: trainingDetail.errorCounts.chunk
})
},
{
errorCount: 0,
label: t('dataset:process.Is_Ready'),
status: isReady ? TrainingStatus.Ready : TrainingStatus.NotStart,
statusText: isReady
? undefined
: t('dataset:training_ready', {
count: trainingDetail.trainedCount
})
}
];
return states;
}, [trainingDetail, t, isQA]);
return (
<Flex flexDirection={'column'} gap={6}>
{statesArray.map((item, index) => (
<Flex alignItems={'center'} pl={4} key={index}>
{/* Status round */}
<Box
w={'14px'}
h={'14px'}
borderWidth={'2px'}
borderRadius={'50%'}
position={'relative'}
display={'flex'}
alignItems={'center'}
justifyContent={'center'}
{...((item.status === TrainingStatus.Running ||
item.status === TrainingStatus.Error) && {
bg: 'primary.600',
borderColor: 'primary.600',
boxShadow: '0 0 0 4px var(--Royal-Blue-100, #E1EAFF)'
})}
{...(item.status === TrainingStatus.Ready && {
bg: 'primary.600',
borderColor: 'primary.600'
})}
// Line
{...(index !== statesArray.length - 1 && {
_after: {
content: '""',
height: '59px',
width: '2px',
bgColor: 'myGray.250',
position: 'absolute',
top: '14px',
left: '4px'
}
})}
>
{item.status === TrainingStatus.Ready && (
<MyIcon name="common/check" w={3} color={'white'} />
)}
</Box>
{/* Card */}
<Flex
alignItems={'center'}
w={'full'}
bg={
item.status === TrainingStatus.Running
? 'primary.50'
: item.status === TrainingStatus.Error
? 'red.50'
: 'myGray.50'
}
py={2.5}
px={3}
ml={5}
borderRadius={'8px'}
flex={1}
h={'53px'}
>
<Box
fontSize={'14px'}
fontWeight={'medium'}
color={item.status === TrainingStatus.NotStart ? 'myGray.400' : 'myGray.900'}
mr={2}
>
{t(item.label as any)}
</Box>
{item.status === TrainingStatus.Error && (
<MyTag
showDot
type={'borderSolid'}
px={1}
fontSize={'mini'}
borderRadius={'md'}
h={5}
colorSchema={'red'}
>
{t('dataset:training.Error', { count: item.errorCount })}
</MyTag>
)}
<Box flex={1} />
{!!item.statusText && (
<Flex fontSize={'sm'} alignItems={'center'}>
{item.statusText}
</Flex>
)}
</Flex>
</Flex>
))}
</Flex>
);
};
const ErrorView = ({ datasetId, collectionId }: { datasetId: string; collectionId: string }) => {
const { t } = useTranslation();
const TrainingText = {
[TrainingModeEnum.chunk]: t('dataset:process.Vectorizing'),
[TrainingModeEnum.qa]: t('dataset:process.Get QA'),
[TrainingModeEnum.image]: t('dataset:process.Image_Index'),
[TrainingModeEnum.auto]: t('dataset:process.Auto_Index')
};
const [editChunk, setEditChunk] = useState<getTrainingDataDetailResponse>();
const {
data: errorList,
ScrollData,
isLoading,
refreshList
} = useScrollPagination(getTrainingError, {
pageSize: 15,
params: {
collectionId
},
EmptyTip: <EmptyTip />
});
const { runAsync: getData, loading: getDataLoading } = useRequest2(
(data: { datasetId: string; collectionId: string; dataId: string }) => {
return getTrainingDataDetail(data);
},
{
manual: true,
onSuccess: (data) => {
setEditChunk(data);
}
}
);
const { runAsync: deleteData, loading: deleteLoading } = useRequest2(
(data: { datasetId: string; collectionId: string; dataId: string }) => {
return deleteTrainingData(data);
},
{
manual: true,
onSuccess: () => {
refreshList();
}
}
);
const { runAsync: updateData, loading: updateLoading } = useRequest2(
(data: { datasetId: string; collectionId: string; dataId: string; q?: string; a?: string }) => {
return updateTrainingData(data);
},
{
manual: true,
onSuccess: () => {
refreshList();
setEditChunk(undefined);
}
}
);
if (editChunk) {
return (
<EditView
editChunk={editChunk}
onCancel={() => setEditChunk(undefined)}
onSave={(data) => {
updateData({
datasetId,
collectionId,
dataId: editChunk._id,
...data
});
}}
/>
);
}
return (
<ScrollData
h={'400px'}
isLoading={isLoading || updateLoading || getDataLoading || deleteLoading}
>
<TableContainer overflowY={'auto'} fontSize={'12px'}>
<Table variant={'simple'}>
<Thead>
<Tr>
<Th pr={0}>{t('dataset:dataset.Chunk_Number')}</Th>
<Th pr={0}>{t('dataset:dataset.Training_Status')}</Th>
<Th>{t('dataset:dataset.Error_Message')}</Th>
<Th>{t('dataset:dataset.Operation')}</Th>
</Tr>
</Thead>
<Tbody>
{errorList.map((item, index) => (
<Tr key={index}>
<Td>{item.chunkIndex + 1}</Td>
<Td>{TrainingText[item.mode]}</Td>
<Td maxW={50}>
<MyTooltip label={item.errorMsg}>{item.errorMsg}</MyTooltip>
</Td>
<Td>
<Flex alignItems={'center'}>
<Button
variant={'ghost'}
size={'sm'}
color={'myGray.600'}
leftIcon={<MyIcon name={'common/confirm/restoreTip'} w={4} />}
fontSize={'mini'}
onClick={() => updateData({ datasetId, collectionId, dataId: item._id })}
>
{t('dataset:dataset.ReTrain')}
</Button>
<Box w={'1px'} height={'16px'} bg={'myGray.200'} />
<Button
variant={'ghost'}
size={'sm'}
color={'myGray.600'}
leftIcon={<MyIcon name={'edit'} w={4} />}
fontSize={'mini'}
onClick={() => getData({ datasetId, collectionId, dataId: item._id })}
>
{t('dataset:dataset.Edit_Chunk')}
</Button>
<Box w={'1px'} height={'16px'} bg={'myGray.200'} />
<Button
variant={'ghost'}
size={'sm'}
color={'myGray.600'}
leftIcon={<MyIcon name={'delete'} w={4} />}
fontSize={'mini'}
onClick={() => {
deleteData({ datasetId, collectionId, dataId: item._id });
}}
>
{t('dataset:dataset.Delete_Chunk')}
</Button>
</Flex>
</Td>
</Tr>
))}
</Tbody>
</Table>
</TableContainer>
</ScrollData>
);
};
const EditView = ({
editChunk,
onCancel,
onSave
}: {
editChunk: getTrainingDataDetailResponse;
onCancel: () => void;
onSave: (data: { q: string; a?: string }) => void;
}) => {
const { t } = useTranslation();
const { register, handleSubmit } = useForm({
defaultValues: {
q: editChunk?.q || '',
a: editChunk?.a || ''
}
});
return (
<Flex flexDirection={'column'} gap={4}>
{editChunk?.a && <Box>q</Box>}
<MyTextarea {...register('q')} minH={editChunk?.a ? 200 : 400} />
{editChunk?.a && (
<>
<Box>a</Box>
<MyTextarea {...register('a')} minH={200} />
</>
)}
<Flex justifyContent={'flex-end'} gap={4}>
<Button variant={'outline'} onClick={onCancel}>
{t('common:common.Cancel')}
</Button>
<Button variant={'primary'} onClick={handleSubmit(onSave)}>
{t('dataset:dataset.ReTrain')}
</Button>
</Flex>
</Flex>
);
};
const TrainingStates = ({
datasetId,
collectionId,
defaultTab = 'states',
onClose
}: {
datasetId: string;
collectionId: string;
defaultTab?: 'states' | 'errors';
onClose: () => void;
}) => {
const { t } = useTranslation();
const [tab, setTab] = useState<typeof defaultTab>(defaultTab);
const { data: trainingDetail, loading } = useRequest2(
() => getDatasetCollectionTrainingDetail(collectionId),
{
pollingInterval: 5000,
pollingWhenHidden: false,
manual: false
}
);
const errorCounts = (Object.values(trainingDetail?.errorCounts || {}) as number[]).reduce(
(acc, count) => acc + count,
0
);
return (
<MyModal
isOpen
onClose={onClose}
iconSrc="common/running"
title={t('dataset:dataset.Training Process')}
minW={['90vw', '712px']}
isLoading={!trainingDetail && loading && tab === 'states'}
>
<ModalBody px={9} minH={['90vh', '500px']}>
<FillRowTabs
py={1}
mb={6}
value={tab}
onChange={(e) => setTab(e as 'states' | 'errors')}
list={[
{ label: t('dataset:dataset.Training Process'), value: 'states' },
{
label: t('dataset:dataset.Training_Errors', {
count: errorCounts
}),
value: 'errors'
}
]}
/>
{tab === 'states' && trainingDetail && <ProgressView trainingDetail={trainingDetail} />}
{tab === 'errors' && <ErrorView datasetId={datasetId} collectionId={collectionId} />}
</ModalBody>
</MyModal>
);
};
export default TrainingStates;

View File

@ -1,110 +1,215 @@
import React from 'react';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import { Box, Button, Input, Link, ModalBody, ModalFooter } from '@chakra-ui/react';
import { strIsLink } from '@fastgpt/global/common/string/tools';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useForm } from 'react-hook-form';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { getDocPath } from '@/web/common/system/doc';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useMyStep } from '@fastgpt/web/hooks/useStep';
import MyDivider from '@fastgpt/web/components/common/MyDivider';
import React, { useRef } from 'react';
import {
Box,
Link,
Input,
Button,
ModalBody,
ModalFooter,
Textarea,
Stack
} from '@chakra-ui/react';
import {
DataChunkSplitModeEnum,
DatasetCollectionDataProcessModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { ChunkSettingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import CollectionChunkForm, {
collectionChunkForm2StoreChunkData,
type CollectionChunkFormType
} from '../Form/CollectionChunkForm';
import { getLLMDefaultChunkSize } from '@fastgpt/global/core/dataset/training/utils';
import { ChunkSettingsType } from '@fastgpt/global/core/dataset/type';
type FormType = {
url?: string | undefined;
selector?: string | undefined;
export type WebsiteConfigFormType = {
websiteConfig: {
url: string;
selector: string;
};
chunkSettings: ChunkSettingsType;
};
const WebsiteConfigModal = ({
onClose,
onSuccess,
defaultValue = {
url: '',
selector: ''
}
onSuccess
}: {
onClose: () => void;
onSuccess: (data: FormType) => void;
defaultValue?: FormType;
onSuccess: (data: WebsiteConfigFormType) => void;
}) => {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const { toast } = useToast();
const { register, handleSubmit } = useForm({
defaultValues: defaultValue
const steps = [
{
title: t('dataset:website_info')
},
{
title: t('dataset:params_config')
}
];
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const websiteConfig = datasetDetail.websiteConfig;
const chunkSettings = datasetDetail.chunkSettings;
const {
register: websiteInfoForm,
handleSubmit: websiteInfoHandleSubmit,
getValues: websiteInfoGetValues
} = useForm({
defaultValues: {
url: websiteConfig?.url || '',
selector: websiteConfig?.selector || ''
}
});
const isEdit = !!defaultValue.url;
const confirmTip = isEdit
? t('common:core.dataset.website.Confirm Update Tips')
: t('common:core.dataset.website.Confirm Create Tips');
const isEdit = !!websiteConfig?.url;
const { ConfirmModal, openConfirm } = useConfirm({
type: 'common'
});
const { activeStep, goToPrevious, goToNext, MyStep } = useMyStep({
defaultStep: 0,
steps
});
const form = useForm<CollectionChunkFormType>({
defaultValues: {
trainingType: chunkSettings?.trainingType || DatasetCollectionDataProcessModeEnum.chunk,
imageIndex: chunkSettings?.imageIndex || false,
autoIndexes: chunkSettings?.autoIndexes || false,
chunkSettingMode: chunkSettings?.chunkSettingMode || ChunkSettingModeEnum.auto,
chunkSplitMode: chunkSettings?.chunkSplitMode || DataChunkSplitModeEnum.size,
embeddingChunkSize: chunkSettings?.chunkSize || 2000,
qaChunkSize: chunkSettings?.chunkSize || getLLMDefaultChunkSize(datasetDetail.agentModel),
indexSize: chunkSettings?.indexSize || datasetDetail.vectorModel?.defaultToken || 512,
chunkSplitter: chunkSettings?.chunkSplitter || '',
qaPrompt: chunkSettings?.qaPrompt || Prompt_AgentQA.description
}
});
return (
<MyModal
isOpen
iconSrc="core/dataset/websiteDataset"
title={t('common:core.dataset.website.Config')}
onClose={onClose}
maxW={'500px'}
w={'550px'}
>
<ModalBody>
<Box fontSize={'sm'} color={'myGray.600'}>
{t('common:core.dataset.website.Config Description')}
{feConfigs?.docUrl && (
<Link
href={getDocPath('/docs/guide/knowledge_base/websync/')}
target="_blank"
textDecoration={'underline'}
fontWeight={'bold'}
<ModalBody w={'full'}>
<Stack w={'75%'} marginX={'auto'}>
<MyStep />
</Stack>
<MyDivider />
{activeStep == 0 && (
<>
<Box
fontSize={'xs'}
color={'myGray.900'}
bgColor={'blue.50'}
padding={'4'}
borderRadius={'8px'}
>
{t('common:common.course.Read Course')}
</Link>
)}
</Box>
<Box mt={2}>
<Box>{t('common:core.dataset.website.Base Url')}</Box>
<Input
placeholder={t('common:core.dataset.collection.Website Link')}
{...register('url', {
required: true
})}
/>
</Box>
<Box mt={3}>
<Box>
{t('common:core.dataset.website.Selector')}({t('common:common.choosable')})
</Box>
<Input {...register('selector')} placeholder="body .content #document" />
</Box>
{t('common:core.dataset.website.Config Description')}
{feConfigs?.docUrl && (
<Link
href={getDocPath('/docs/guide/knowledge_base/websync/')}
target="_blank"
textDecoration={'underline'}
color={'blue.700'}
>
{t('common:common.course.Read Course')}
</Link>
)}
</Box>
<Box mt={2}>
<Box>{t('common:core.dataset.website.Base Url')}</Box>
<Input
placeholder={t('common:core.dataset.collection.Website Link')}
{...websiteInfoForm('url', {
required: true
})}
/>
</Box>
<Box mt={3}>
<Box>
{t('common:core.dataset.website.Selector')}({t('common:common.choosable')})
</Box>
<Input {...websiteInfoForm('selector')} placeholder="body .content #document" />
</Box>
</>
)}
{activeStep == 1 && <CollectionChunkForm form={form} />}
</ModalBody>
<ModalFooter>
<Button variant={'whiteBase'} onClick={onClose}>
{t('common:common.Close')}
</Button>
<Button
ml={2}
onClick={handleSubmit((data) => {
if (!data.url) return;
// check is link
if (!strIsLink(data.url)) {
return toast({
status: 'warning',
title: t('common:common.link.UnValid')
});
}
openConfirm(
() => {
onSuccess(data);
},
undefined,
confirmTip
)();
})}
>
{t('common:core.dataset.website.Start Sync')}
</Button>
{activeStep == 0 && (
<>
<Button variant={'whiteBase'} onClick={onClose}>
{t('common:common.Close')}
</Button>
<Button
ml={2}
onClick={websiteInfoHandleSubmit((data) => {
if (!data.url) return;
// check is link
if (!strIsLink(data.url)) {
return toast({
status: 'warning',
title: t('common:common.link.UnValid')
});
}
goToNext();
})}
>
{t('common:common.Next Step')}
</Button>
</>
)}
{activeStep == 1 && (
<>
<Button variant={'whiteBase'} onClick={goToPrevious}>
{t('common:common.Last Step')}
</Button>
<Button
ml={2}
onClick={form.handleSubmit((data) => {
openConfirm(
() =>
onSuccess({
websiteConfig: websiteInfoGetValues(),
chunkSettings: collectionChunkForm2StoreChunkData({
...data,
agentModel: datasetDetail.agentModel,
vectorModel: datasetDetail.vectorModel
})
}),
undefined,
isEdit
? t('common:core.dataset.website.Confirm Update Tips')
: t('common:core.dataset.website.Confirm Create Tips')
)();
})}
>
{t('common:core.dataset.website.Start Sync')}
</Button>
</>
)}
</ModalFooter>
<ConfirmModal />
</MyModal>
@ -112,3 +217,42 @@ const WebsiteConfigModal = ({
};
export default WebsiteConfigModal;
const PromptTextarea = ({
defaultValue,
onChange,
onClose
}: {
defaultValue: string;
onChange: (e: string) => void;
onClose: () => void;
}) => {
const ref = useRef<HTMLTextAreaElement>(null);
const { t } = useTranslation();
return (
<MyModal
isOpen
title={t('common:core.dataset.import.Custom prompt')}
iconSrc="modal/edit"
w={'600px'}
onClose={onClose}
>
<ModalBody whiteSpace={'pre-wrap'} fontSize={'sm'} px={[3, 6]} pt={[3, 6]}>
<Textarea ref={ref} rows={8} fontSize={'sm'} defaultValue={defaultValue} />
<Box>{Prompt_AgentQA.fixedText}</Box>
</ModalBody>
<ModalFooter>
<Button
onClick={() => {
const val = ref.current?.value || Prompt_AgentQA.description;
onChange(val);
onClose();
}}
>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};

View File

@ -51,6 +51,7 @@ import {
import { useFolderDrag } from '@/components/common/folder/useFolderDrag';
import TagsPopOver from './TagsPopOver';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import TrainingStates from './TrainingStates';
const Header = dynamic(() => import('./Header'));
const EmptyCollectionTip = dynamic(() => import('./EmptyCollectionTip'));
@ -63,26 +64,25 @@ const CollectionCard = () => {
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { feConfigs } = useSystemStore();
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
content: t('common:dataset.Confirm to delete the file'),
type: 'delete'
});
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('common:Rename')
});
const [moveCollectionData, setMoveCollectionData] = useState<{ collectionId: string }>();
const [trainingStatesCollection, setTrainingStatesCollection] = useState<{
collectionId: string;
}>();
const { collections, Pagination, total, getData, isGetting, pageNum, pageSize } =
useContextSelector(CollectionPageContext, (v) => v);
// Ad file status icon
// Add file status icon
const formatCollections = useMemo(
() =>
collections.map((collection) => {
const icon = getCollectionIcon(collection.type, collection.name);
const status = (() => {
if (collection.hasError) {
return {
statusText: t('common:core.dataset.collection.status.error'),
colorSchema: 'red'
};
}
if (collection.trainingAmount > 0) {
return {
statusText: t('common:dataset.collections.Collection Embedding', {
@ -106,6 +106,11 @@ const CollectionCard = () => {
[collections, t]
);
const [moveCollectionData, setMoveCollectionData] = useState<{ collectionId: string }>();
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('common:Rename')
});
const { runAsync: onUpdateCollection, loading: isUpdating } = useRequest2(
putDatasetCollectionById,
{
@ -115,7 +120,12 @@ const CollectionCard = () => {
successToast: t('common:common.Update Success')
}
);
const { runAsync: onDelCollection, loading: isDeleting } = useRequest2(
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
content: t('common:dataset.Confirm to delete the file'),
type: 'delete'
});
const { runAsync: onDelCollection } = useRequest2(
(collectionId: string) => {
return delDatasetCollectionById({
id: collectionId
@ -153,14 +163,14 @@ const CollectionCard = () => {
['refreshCollection'],
() => {
getData(pageNum);
if (datasetDetail.status === DatasetStatusEnum.syncing) {
if (datasetDetail.status !== DatasetStatusEnum.active) {
loadDatasetDetail(datasetDetail._id);
}
return null;
},
{
refetchInterval: 6000,
enabled: hasTrainingData || datasetDetail.status === DatasetStatusEnum.syncing
enabled: hasTrainingData || datasetDetail.status !== DatasetStatusEnum.active
}
);
@ -180,13 +190,13 @@ const CollectionCard = () => {
});
const isLoading =
isUpdating || isDeleting || isSyncing || (isGetting && collections.length === 0) || isDropping;
isUpdating || isSyncing || (isGetting && collections.length === 0) || isDropping;
return (
<MyBox isLoading={isLoading} h={'100%'} py={[2, 4]}>
<Flex ref={BoxRef} flexDirection={'column'} py={[1, 0]} h={'100%'} px={[2, 6]}>
{/* header */}
<Header />
<Header hasTrainingData={hasTrainingData} />
{/* collection table */}
<TableContainer mt={3} overflowY={'auto'} fontSize={'sm'}>
@ -269,9 +279,22 @@ const CollectionCard = () => {
<Box>{formatTime2YMDHM(collection.updateTime)}</Box>
</Td>
<Td py={2}>
<MyTag showDot colorSchema={collection.colorSchema as any} type={'borderFill'}>
{t(collection.statusText as any)}
</MyTag>
<MyTooltip label={t('common:Click_to_expand')}>
<MyTag
showDot
colorSchema={collection.colorSchema as any}
type={'fill'}
onClick={(e) => {
e.stopPropagation();
setTrainingStatesCollection({ collectionId: collection._id });
}}
>
<Flex fontWeight={'medium'} alignItems={'center'} gap={1}>
{t(collection.statusText as any)}
<MyIcon name={'common/maximize'} w={'11px'} />
</Flex>
</MyTag>
</MyTooltip>
</Td>
<Td py={2} onClick={(e) => e.stopPropagation()}>
<Switch
@ -383,9 +406,7 @@ const CollectionCard = () => {
type: 'danger',
onClick: () =>
openDeleteConfirm(
() => {
onDelCollection(collection._id);
},
() => onDelCollection(collection._id),
undefined,
collection.type === DatasetCollectionTypeEnum.folder
? t('common:dataset.collections.Confirm to delete the folder')
@ -414,6 +435,14 @@ const CollectionCard = () => {
<ConfirmSyncModal />
<EditTitleModal />
{!!trainingStatesCollection && (
<TrainingStates
datasetId={datasetDetail._id}
collectionId={trainingStatesCollection.collectionId}
onClose={() => setTrainingStatesCollection(undefined)}
/>
)}
{!!moveCollectionData && (
<SelectCollections
datasetId={datasetDetail._id}

View File

@ -30,6 +30,7 @@ import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { TabEnum } from './NavBar';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import TrainingStates from './CollectionCard/TrainingStates';
const DataCard = () => {
const theme = useTheme();
@ -44,6 +45,7 @@ const DataCard = () => {
const { t } = useTranslation();
const [searchText, setSearchText] = useState('');
const [errorModalId, setErrorModalId] = useState('');
const { toast } = useToast();
const scrollParams = useMemo(
@ -174,7 +176,7 @@ const DataCard = () => {
<MyDivider my={'17px'} w={'100%'} />
</Box>
<Flex alignItems={'center'} px={6} pb={4}>
<Flex align={'center'} color={'myGray.500'}>
<Flex alignItems={'center'} color={'myGray.500'}>
<MyIcon name="common/list" mr={2} w={'18px'} />
<Box as={'span'} fontSize={['sm', '14px']} fontWeight={'500'}>
{t('dataset:data_amount', {
@ -182,6 +184,25 @@ const DataCard = () => {
indexAmount: collection?.indexAmount ?? '-'
})}
</Box>
{!!collection?.errorCount && (
<MyTag
colorSchema={'red'}
type={'fill'}
cursor={'pointer'}
rounded={'full'}
ml={2}
onClick={() => {
setErrorModalId(collection._id);
}}
>
<Flex fontWeight={'medium'} alignItems={'center'} gap={1}>
{t('dataset:data_error_amount', {
errorAmount: collection?.errorCount
})}
<MyIcon name={'common/maximize'} w={'11px'} />
</Flex>
</MyTag>
)}
</Flex>
<Box flex={1} mr={1} />
<MyInput
@ -354,6 +375,14 @@ const DataCard = () => {
}}
/>
)}
{errorModalId && (
<TrainingStates
datasetId={datasetId}
defaultTab={'errors'}
collectionId={errorModalId}
onClose={() => setErrorModalId('')}
/>
)}
<ConfirmModal />
</MyBox>
);

View File

@ -0,0 +1,524 @@
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import { UseFormReturn } from 'react-hook-form';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import {
Box,
Flex,
Input,
Button,
ModalBody,
ModalFooter,
Textarea,
useDisclosure,
Checkbox,
HStack
} from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import LeftRadio from '@fastgpt/web/components/common/Radio/LeftRadio';
import {
DataChunkSplitModeEnum,
DatasetCollectionDataProcessModeEnum,
DatasetCollectionDataProcessModeMap
} from '@fastgpt/global/core/dataset/constants';
import { ChunkSettingModeEnum } from '@fastgpt/global/core/dataset/constants';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import { useContextSelector } from 'use-context-selector';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import MyNumberInput from '@fastgpt/web/components/common/Input/NumberInput';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import MySelect from '@fastgpt/web/components/common/MySelect';
import {
chunkAutoChunkSize,
getAutoIndexSize,
getIndexSizeSelectList,
getLLMDefaultChunkSize,
getLLMMaxChunkSize,
getMaxChunkSize,
getMaxIndexSize,
minChunkSize
} from '@fastgpt/global/core/dataset/training/utils';
import RadioGroup from '@fastgpt/web/components/common/Radio/RadioGroup';
import { ChunkSettingsType } from '@fastgpt/global/core/dataset/type';
import type { LLMModelItemType, EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
const PromptTextarea = ({
defaultValue = '',
onChange,
onClose
}: {
defaultValue?: string;
onChange: (e: string) => void;
onClose: () => void;
}) => {
const ref = useRef<HTMLTextAreaElement>(null);
const { t } = useTranslation();
return (
<MyModal
isOpen
title={t('common:core.dataset.import.Custom prompt')}
iconSrc="modal/edit"
w={'600px'}
onClose={onClose}
>
<ModalBody whiteSpace={'pre-wrap'} fontSize={'sm'} px={[3, 6]} pt={[3, 6]}>
<Textarea ref={ref} rows={8} fontSize={'sm'} defaultValue={defaultValue} />
<Box>{Prompt_AgentQA.fixedText}</Box>
</ModalBody>
<ModalFooter>
<Button
onClick={() => {
const val = ref.current?.value || Prompt_AgentQA.description;
onChange(val);
onClose();
}}
>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};
export type CollectionChunkFormType = {
trainingType: DatasetCollectionDataProcessModeEnum;
imageIndex: boolean;
autoIndexes: boolean;
chunkSettingMode: ChunkSettingModeEnum;
chunkSplitMode: DataChunkSplitModeEnum;
embeddingChunkSize: number;
qaChunkSize: number;
chunkSplitter?: string;
indexSize: number;
qaPrompt?: string;
};
const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkFormType> }) => {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const vectorModel = datasetDetail.vectorModel;
const agentModel = datasetDetail.agentModel;
const { setValue, register, watch, getValues } = form;
const trainingType = watch('trainingType');
const chunkSettingMode = watch('chunkSettingMode');
const chunkSplitMode = watch('chunkSplitMode');
const autoIndexes = watch('autoIndexes');
const indexSize = watch('indexSize');
const trainingModeList = useMemo(() => {
const list = Object.entries(DatasetCollectionDataProcessModeMap);
return list
.filter(([key]) => key !== DatasetCollectionDataProcessModeEnum.auto)
.map(([key, value]) => ({
title: t(value.label as any),
value: key as DatasetCollectionDataProcessModeEnum,
tooltip: t(value.tooltip as any)
}));
}, [t]);
const {
chunkSizeField,
maxChunkSize,
minChunkSize: minChunkSizeValue,
maxIndexSize
} = useMemo(() => {
if (trainingType === DatasetCollectionDataProcessModeEnum.qa) {
return {
chunkSizeField: 'qaChunkSize',
maxChunkSize: getLLMMaxChunkSize(agentModel),
minChunkSize: 1000,
maxIndexSize: 1000
};
} else if (autoIndexes) {
return {
chunkSizeField: 'embeddingChunkSize',
maxChunkSize: getMaxChunkSize(agentModel),
minChunkSize: minChunkSize,
maxIndexSize: getMaxIndexSize(vectorModel)
};
} else {
return {
chunkSizeField: 'embeddingChunkSize',
maxChunkSize: getMaxChunkSize(agentModel),
minChunkSize: minChunkSize,
maxIndexSize: getMaxIndexSize(vectorModel)
};
}
}, [trainingType, autoIndexes, agentModel, vectorModel]);
// Custom split list
const customSplitList = [
{ label: t('dataset:split_sign_null'), value: '' },
{ label: t('dataset:split_sign_break'), value: '\\n' },
{ label: t('dataset:split_sign_break2'), value: '\\n\\n' },
{ label: t('dataset:split_sign_period'), value: '.|。' },
{ label: t('dataset:split_sign_exclamatiob'), value: '!|' },
{ label: t('dataset:split_sign_question'), value: '?|' },
{ label: t('dataset:split_sign_semicolon'), value: ';|' },
{ label: '=====', value: '=====' },
{ label: t('dataset:split_sign_custom'), value: 'Other' }
];
const [customListSelectValue, setCustomListSelectValue] = useState(getValues('chunkSplitter'));
useEffect(() => {
if (customListSelectValue === 'Other') {
setValue('chunkSplitter', '');
} else {
setValue('chunkSplitter', customListSelectValue);
}
}, [customListSelectValue, setValue]);
// Index size
const indexSizeSeletorList = useMemo(() => getIndexSizeSelectList(maxIndexSize), [maxIndexSize]);
// QA
const qaPrompt = watch('qaPrompt');
const {
isOpen: isOpenCustomPrompt,
onOpen: onOpenCustomPrompt,
onClose: onCloseCustomPrompt
} = useDisclosure();
const showQAPromptInput = trainingType === DatasetCollectionDataProcessModeEnum.qa;
// Adapt 4.9.0- auto training
useEffect(() => {
if (trainingType === DatasetCollectionDataProcessModeEnum.auto) {
setValue('autoIndexes', true);
setValue('trainingType', DatasetCollectionDataProcessModeEnum.chunk);
}
}, [trainingType, setValue]);
return (
<>
<Box>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:training_mode')}
</Box>
<LeftRadio<DatasetCollectionDataProcessModeEnum>
list={trainingModeList}
px={3}
py={2.5}
value={trainingType}
onChange={(e) => {
setValue('trainingType', e);
}}
defaultBg="white"
activeBg="white"
gridTemplateColumns={'repeat(2, 1fr)'}
/>
</Box>
{trainingType === DatasetCollectionDataProcessModeEnum.chunk && (
<Box mt={6}>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:enhanced_indexes')}
</Box>
<HStack gap={[3, 7]}>
<HStack flex={'1'} spacing={1}>
<MyTooltip label={!feConfigs?.isPlus ? t('common:commercial_function_tip') : ''}>
<Checkbox isDisabled={!feConfigs?.isPlus} {...register('autoIndexes')}>
<FormLabel>{t('dataset:auto_indexes')}</FormLabel>
</Checkbox>
</MyTooltip>
<QuestionTip label={t('dataset:auto_indexes_tips')} />
</HStack>
<HStack flex={'1'} spacing={1}>
<MyTooltip
label={
!feConfigs?.isPlus
? t('common:commercial_function_tip')
: !datasetDetail?.vlmModel
? t('common:error_vlm_not_config')
: ''
}
>
<Checkbox
isDisabled={!feConfigs?.isPlus || !datasetDetail?.vlmModel}
{...register('imageIndex')}
>
<FormLabel>{t('dataset:image_auto_parse')}</FormLabel>
</Checkbox>
</MyTooltip>
<QuestionTip label={t('dataset:image_auto_parse_tips')} />
</HStack>
</HStack>
</Box>
)}
<Box mt={6}>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:params_setting')}
</Box>
<LeftRadio<ChunkSettingModeEnum>
list={[
{
title: t('dataset:default_params'),
desc: t('dataset:default_params_desc'),
value: ChunkSettingModeEnum.auto
},
{
title: t('dataset:custom_data_process_params'),
desc: t('dataset:custom_data_process_params_desc'),
value: ChunkSettingModeEnum.custom,
children: chunkSettingMode === ChunkSettingModeEnum.custom && (
<Box mt={5}>
<Box>
<RadioGroup<DataChunkSplitModeEnum>
list={[
{
title: t('dataset:split_chunk_size'),
value: DataChunkSplitModeEnum.size
},
{
title: t('dataset:split_chunk_char'),
value: DataChunkSplitModeEnum.char,
tooltip: t('dataset:custom_split_sign_tip')
}
]}
value={chunkSplitMode}
onChange={(e) => {
setValue('chunkSplitMode', e);
}}
/>
{chunkSplitMode === DataChunkSplitModeEnum.size && (
<Box
mt={1.5}
css={{
'& > span': {
display: 'block'
}
}}
>
<MyTooltip
label={t('common:core.dataset.import.Chunk Range', {
min: minChunkSizeValue,
max: maxChunkSize
})}
>
<MyNumberInput
register={register}
name={chunkSizeField}
min={minChunkSizeValue}
max={maxChunkSize}
size={'sm'}
step={100}
/>
</MyTooltip>
</Box>
)}
{chunkSplitMode === DataChunkSplitModeEnum.char && (
<HStack mt={1.5}>
<Box flex={'1 0 0'}>
<MySelect<string>
list={customSplitList}
size={'sm'}
bg={'myGray.50'}
value={customListSelectValue}
h={'32px'}
onChange={(val) => {
setCustomListSelectValue(val);
}}
/>
</Box>
{customListSelectValue === 'Other' && (
<Input
flex={'1 0 0'}
h={'32px'}
size={'sm'}
bg={'myGray.50'}
placeholder="\n;======;==SPLIT=="
{...register('chunkSplitter')}
/>
)}
</HStack>
)}
</Box>
{trainingType === DatasetCollectionDataProcessModeEnum.chunk && (
<Box>
<Flex alignItems={'center'} mt={3}>
<Box>{t('dataset:index_size')}</Box>
<QuestionTip label={t('dataset:index_size_tips')} />
</Flex>
<Box mt={1}>
<MySelect<number>
bg={'myGray.50'}
list={indexSizeSeletorList}
value={indexSize}
onChange={(val) => {
setValue('indexSize', val);
}}
/>
</Box>
</Box>
)}
{showQAPromptInput && (
<Box mt={3}>
<Box>{t('common:core.dataset.collection.QA Prompt')}</Box>
<Box
position={'relative'}
py={2}
px={3}
bg={'myGray.50'}
fontSize={'xs'}
whiteSpace={'pre-wrap'}
border={'1px'}
borderColor={'borderColor.base'}
borderRadius={'md'}
maxH={'140px'}
overflow={'auto'}
_hover={{
'& .mask': {
display: 'block'
}
}}
>
{qaPrompt}
<Box
display={'none'}
className="mask"
position={'absolute'}
top={0}
right={0}
bottom={0}
left={0}
background={
'linear-gradient(182deg, rgba(255, 255, 255, 0.00) 1.76%, #FFF 84.07%)'
}
>
<Button
size="xs"
variant={'whiteBase'}
leftIcon={<MyIcon name={'edit'} w={'13px'} />}
color={'black'}
position={'absolute'}
right={2}
bottom={2}
onClick={onOpenCustomPrompt}
>
{t('common:core.dataset.import.Custom prompt')}
</Button>
</Box>
</Box>
</Box>
)}
</Box>
)
}
]}
gridGap={3}
px={3}
py={3}
defaultBg="white"
activeBg="white"
value={chunkSettingMode}
w={'100%'}
onChange={(e) => {
setValue('chunkSettingMode', e);
}}
/>
</Box>
{isOpenCustomPrompt && (
<PromptTextarea
defaultValue={qaPrompt}
onChange={(e) => {
setValue('qaPrompt', e);
}}
onClose={onCloseCustomPrompt}
/>
)}
</>
);
};
export default CollectionChunkForm;
export const collectionChunkForm2StoreChunkData = ({
trainingType,
imageIndex,
autoIndexes,
chunkSettingMode,
chunkSplitMode,
embeddingChunkSize,
qaChunkSize,
chunkSplitter,
indexSize,
qaPrompt,
agentModel,
vectorModel
}: CollectionChunkFormType & {
agentModel: LLMModelItemType;
vectorModel: EmbeddingModelItemType;
}): ChunkSettingsType => {
const trainingModeSize: {
autoChunkSize: number;
autoIndexSize: number;
chunkSize: number;
indexSize: number;
} = (() => {
if (trainingType === DatasetCollectionDataProcessModeEnum.qa) {
return {
autoChunkSize: getLLMDefaultChunkSize(agentModel),
autoIndexSize: 512,
chunkSize: qaChunkSize,
indexSize: 512
};
} else if (autoIndexes) {
return {
autoChunkSize: chunkAutoChunkSize,
autoIndexSize: getAutoIndexSize(vectorModel),
chunkSize: embeddingChunkSize,
indexSize
};
} else {
return {
autoChunkSize: chunkAutoChunkSize,
autoIndexSize: getAutoIndexSize(vectorModel),
chunkSize: embeddingChunkSize,
indexSize
};
}
})();
const { chunkSize: formatChunkIndex, indexSize: formatIndexSize } = (() => {
if (chunkSettingMode === ChunkSettingModeEnum.auto) {
return {
chunkSize: trainingModeSize.autoChunkSize,
indexSize: trainingModeSize.autoIndexSize
};
} else {
return {
chunkSize: trainingModeSize.chunkSize,
indexSize: trainingModeSize.indexSize
};
}
})();
return {
trainingType,
imageIndex,
autoIndexes,
chunkSettingMode,
chunkSplitMode,
chunkSize: formatChunkIndex,
indexSize: formatIndexSize,
chunkSplitter,
qaPrompt: trainingType === DatasetCollectionDataProcessModeEnum.qa ? qaPrompt : undefined
};
};

View File

@ -25,6 +25,14 @@ import {
getAutoIndexSize,
getMaxIndexSize
} from '@fastgpt/global/core/dataset/training/utils';
import { CollectionChunkFormType } from '../Form/CollectionChunkForm';
type ChunkSizeFieldType = 'embeddingChunkSize' | 'qaChunkSize';
export type ImportFormType = {
customPdfParse: boolean;
webSelector: string;
} & CollectionChunkFormType;
type TrainingFiledType = {
chunkOverlapRatio: number;
@ -51,26 +59,6 @@ type DatasetImportContextType = {
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
} & TrainingFiledType;
type ChunkSizeFieldType = 'embeddingChunkSize' | 'qaChunkSize';
export type ImportFormType = {
customPdfParse: boolean;
trainingType: DatasetCollectionDataProcessModeEnum;
imageIndex: boolean;
autoIndexes: boolean;
chunkSettingMode: ChunkSettingModeEnum;
chunkSplitMode: DataChunkSplitModeEnum;
embeddingChunkSize: number;
qaChunkSize: number;
chunkSplitter: string;
indexSize: number;
qaPrompt: string;
webSelector: string;
};
export const DatasetImportContext = createContext<DatasetImportContextType>({
importSource: ImportDataSourceEnum.fileLocal,
goToNext: function (): void {
@ -314,14 +302,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
chunkSplitter
};
}
}, [
chunkSettingMode,
TrainingModeMap.autoChunkSize,
TrainingModeMap.autoIndexSize,
TrainingModeMap.chunkSize,
TrainingModeMap.indexSize,
chunkSplitter
]);
}, [chunkSettingMode, TrainingModeMap, chunkSplitter]);
const contextValue = {
...TrainingModeMap,

View File

@ -1,13 +1,8 @@
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import React, { useCallback } from 'react';
import {
Box,
Flex,
Input,
Button,
ModalBody,
ModalFooter,
Textarea,
useDisclosure,
Checkbox,
Accordion,
AccordionItem,
@ -16,93 +11,26 @@ import {
AccordionIcon,
HStack
} from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import LeftRadio from '@fastgpt/web/components/common/Radio/LeftRadio';
import {
DataChunkSplitModeEnum,
DatasetCollectionDataProcessModeEnum,
DatasetCollectionDataProcessModeMap
} from '@fastgpt/global/core/dataset/constants';
import { ChunkSettingModeEnum } from '@fastgpt/global/core/dataset/constants';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import MyTag from '@fastgpt/web/components/common/Tag/index';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import MyNumberInput from '@fastgpt/web/components/common/Input/NumberInput';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { shadowLight } from '@fastgpt/web/styles/theme';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import MySelect from '@fastgpt/web/components/common/MySelect';
import { getIndexSizeSelectList } from '@fastgpt/global/core/dataset/training/utils';
import RadioGroup from '@fastgpt/web/components/common/Radio/RadioGroup';
import CollectionChunkForm from '../../Form/CollectionChunkForm';
import { DatasetCollectionDataProcessModeEnum } from '@fastgpt/global/core/dataset/constants';
function DataProcess() {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const {
goToNext,
processParamsForm,
chunkSizeField,
minChunkSize,
maxChunkSize,
maxIndexSize,
indexSize
} = useContextSelector(DatasetImportContext, (v) => v);
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { setValue, register, watch, getValues } = processParamsForm;
const trainingType = watch('trainingType');
const trainingModeList = useMemo(() => {
const list = Object.entries(DatasetCollectionDataProcessModeMap);
return list
.filter(([key]) => key !== DatasetCollectionDataProcessModeEnum.auto)
.map(([key, value]) => ({
title: t(value.label as any),
value: key as DatasetCollectionDataProcessModeEnum,
tooltip: t(value.tooltip as any)
}));
}, [t]);
const chunkSettingMode = watch('chunkSettingMode');
const chunkSplitMode = watch('chunkSplitMode');
const customSplitList = [
{ label: t('dataset:split_sign_null'), value: '' },
{ label: t('dataset:split_sign_break'), value: '\\n' },
{ label: t('dataset:split_sign_break2'), value: '\\n\\n' },
{ label: t('dataset:split_sign_period'), value: '.|。' },
{ label: t('dataset:split_sign_exclamatiob'), value: '!|' },
{ label: t('dataset:split_sign_question'), value: '?|' },
{ label: t('dataset:split_sign_semicolon'), value: ';|' },
{ label: '=====', value: '=====' },
{ label: t('dataset:split_sign_custom'), value: 'Other' }
];
const [customListSelectValue, setCustomListSelectValue] = useState(getValues('chunkSplitter'));
useEffect(() => {
if (customListSelectValue === 'Other') {
setValue('chunkSplitter', '');
} else {
setValue('chunkSplitter', customListSelectValue);
}
}, [customListSelectValue, setValue]);
// Index size
const indexSizeSeletorList = useMemo(() => getIndexSizeSelectList(maxIndexSize), [maxIndexSize]);
// QA
const qaPrompt = watch('qaPrompt');
const {
isOpen: isOpenCustomPrompt,
onOpen: onOpenCustomPrompt,
onClose: onCloseCustomPrompt
} = useDisclosure();
const { goToNext, processParamsForm, chunkSize } = useContextSelector(
DatasetImportContext,
(v) => v
);
const { register } = processParamsForm;
const Title = useCallback(({ title }: { title: string }) => {
return (
@ -116,16 +44,7 @@ function DataProcess() {
);
}, []);
// Adapt auto training
useEffect(() => {
if (trainingType === DatasetCollectionDataProcessModeEnum.auto) {
setValue('autoIndexes', true);
setValue('trainingType', DatasetCollectionDataProcessModeEnum.chunk);
}
}, [trainingType, setValue]);
const showFileParseSetting = feConfigs?.showCustomPdfParse;
const showQAPromptInput = trainingType === DatasetCollectionDataProcessModeEnum.qa;
return (
<>
@ -179,238 +98,8 @@ function DataProcess() {
<Title title={t('dataset:import_data_process_setting')} />
<AccordionPanel p={2}>
<Box mt={2}>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:training_mode')}
</Box>
<LeftRadio<DatasetCollectionDataProcessModeEnum>
list={trainingModeList}
px={3}
py={2.5}
value={trainingType}
onChange={(e) => {
setValue('trainingType', e);
}}
defaultBg="white"
activeBg="white"
gridTemplateColumns={'repeat(2, 1fr)'}
/>
</Box>
{trainingType === DatasetCollectionDataProcessModeEnum.chunk && (
<Box mt={6}>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:enhanced_indexes')}
</Box>
<HStack gap={[3, 7]}>
<HStack flex={'1'} spacing={1}>
<MyTooltip
label={!feConfigs?.isPlus ? t('common:commercial_function_tip') : ''}
>
<Checkbox isDisabled={!feConfigs?.isPlus} {...register('autoIndexes')}>
<FormLabel>{t('dataset:auto_indexes')}</FormLabel>
</Checkbox>
</MyTooltip>
<QuestionTip label={t('dataset:auto_indexes_tips')} />
</HStack>
<HStack flex={'1'} spacing={1}>
<MyTooltip
label={
!feConfigs?.isPlus
? t('common:commercial_function_tip')
: !datasetDetail?.vlmModel
? t('common:error_vlm_not_config')
: ''
}
>
<Checkbox
isDisabled={!feConfigs?.isPlus || !datasetDetail?.vlmModel}
{...register('imageIndex')}
>
<FormLabel>{t('dataset:image_auto_parse')}</FormLabel>
</Checkbox>
</MyTooltip>
<QuestionTip label={t('dataset:image_auto_parse_tips')} />
</HStack>
</HStack>
</Box>
)}
<Box mt={6}>
<Box fontSize={'sm'} mb={2} color={'myGray.600'}>
{t('dataset:params_setting')}
</Box>
<LeftRadio<ChunkSettingModeEnum>
list={[
{
title: t('dataset:default_params'),
desc: t('dataset:default_params_desc'),
value: ChunkSettingModeEnum.auto
},
{
title: t('dataset:custom_data_process_params'),
desc: t('dataset:custom_data_process_params_desc'),
value: ChunkSettingModeEnum.custom,
children: chunkSettingMode === ChunkSettingModeEnum.custom && (
<Box mt={5}>
<Box>
<RadioGroup<DataChunkSplitModeEnum>
list={[
{
title: t('dataset:split_chunk_size'),
value: DataChunkSplitModeEnum.size
},
{
title: t('dataset:split_chunk_char'),
value: DataChunkSplitModeEnum.char,
tooltip: t('dataset:custom_split_sign_tip')
}
]}
value={chunkSplitMode}
onChange={(e) => {
setValue('chunkSplitMode', e);
}}
/>
{chunkSplitMode === DataChunkSplitModeEnum.size && (
<Box
mt={1.5}
css={{
'& > span': {
display: 'block'
}
}}
>
<MyTooltip
label={t('common:core.dataset.import.Chunk Range', {
min: minChunkSize,
max: maxChunkSize
})}
>
<MyNumberInput
register={register}
name={chunkSizeField}
min={minChunkSize}
max={maxChunkSize}
size={'sm'}
step={100}
/>
</MyTooltip>
</Box>
)}
{chunkSplitMode === DataChunkSplitModeEnum.char && (
<HStack mt={1.5}>
<Box flex={'1 0 0'}>
<MySelect<string>
list={customSplitList}
size={'sm'}
bg={'myGray.50'}
value={customListSelectValue}
h={'32px'}
onChange={(val) => {
setCustomListSelectValue(val);
}}
/>
</Box>
{customListSelectValue === 'Other' && (
<Input
flex={'1 0 0'}
h={'32px'}
size={'sm'}
bg={'myGray.50'}
placeholder="\n;======;==SPLIT=="
{...register('chunkSplitter')}
/>
)}
</HStack>
)}
</Box>
{trainingType === DatasetCollectionDataProcessModeEnum.chunk && (
<Box>
<Flex alignItems={'center'} mt={3}>
<Box>{t('dataset:index_size')}</Box>
<QuestionTip label={t('dataset:index_size_tips')} />
</Flex>
<Box mt={1}>
<MySelect<number>
bg={'myGray.50'}
list={indexSizeSeletorList}
value={indexSize}
onChange={(val) => {
setValue('indexSize', val);
}}
/>
</Box>
</Box>
)}
{showQAPromptInput && (
<Box mt={3}>
<Box>{t('common:core.dataset.collection.QA Prompt')}</Box>
<Box
position={'relative'}
py={2}
px={3}
bg={'myGray.50'}
fontSize={'xs'}
whiteSpace={'pre-wrap'}
border={'1px'}
borderColor={'borderColor.base'}
borderRadius={'md'}
maxH={'140px'}
overflow={'auto'}
_hover={{
'& .mask': {
display: 'block'
}
}}
>
{qaPrompt}
<Box
display={'none'}
className="mask"
position={'absolute'}
top={0}
right={0}
bottom={0}
left={0}
background={
'linear-gradient(182deg, rgba(255, 255, 255, 0.00) 1.76%, #FFF 84.07%)'
}
>
<Button
size="xs"
variant={'whiteBase'}
leftIcon={<MyIcon name={'edit'} w={'13px'} />}
color={'black'}
position={'absolute'}
right={2}
bottom={2}
onClick={onOpenCustomPrompt}
>
{t('common:core.dataset.import.Custom prompt')}
</Button>
</Box>
</Box>
</Box>
)}
</Box>
)
}
]}
gridGap={3}
px={3}
py={3}
defaultBg="white"
activeBg="white"
value={chunkSettingMode}
w={'100%'}
onChange={(e) => {
setValue('chunkSettingMode', e);
}}
/>
</Box>
{/* @ts-ignore */}
<CollectionChunkForm form={processParamsForm} />
</AccordionPanel>
</AccordionItem>
@ -425,57 +114,8 @@ function DataProcess() {
</Flex>
</Accordion>
</Box>
{isOpenCustomPrompt && (
<PromptTextarea
defaultValue={qaPrompt}
onChange={(e) => {
setValue('qaPrompt', e);
}}
onClose={onCloseCustomPrompt}
/>
)}
</>
);
}
export default React.memo(DataProcess);
const PromptTextarea = ({
defaultValue,
onChange,
onClose
}: {
defaultValue: string;
onChange: (e: string) => void;
onClose: () => void;
}) => {
const ref = useRef<HTMLTextAreaElement>(null);
const { t } = useTranslation();
return (
<MyModal
isOpen
title={t('common:core.dataset.import.Custom prompt')}
iconSrc="modal/edit"
w={'600px'}
onClose={onClose}
>
<ModalBody whiteSpace={'pre-wrap'} fontSize={'sm'} px={[3, 6]} pt={[3, 6]}>
<Textarea ref={ref} rows={8} fontSize={'sm'} defaultValue={defaultValue} />
<Box>{Prompt_AgentQA.fixedText}</Box>
</ModalBody>
<ModalFooter>
<Button
onClick={() => {
const val = ref.current?.value || Prompt_AgentQA.description;
onChange(val);
onClose();
}}
>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};

View File

@ -85,9 +85,13 @@ const MetaDataCard = ({ datasetId }: { datasetId: string }) => {
value: t(DatasetCollectionDataProcessModeMap[collection.trainingType]?.label as any)
},
{
label: t('common:core.dataset.collection.metadata.Chunk Size'),
label: t('dataset:chunk_size'),
value: collection.chunkSize || '-'
},
{
label: t('dataset:index_size'),
value: collection.indexSize || '-'
},
...(webSelector
? [
{

View File

@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { addHours } from 'date-fns';
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
@ -56,7 +55,6 @@ async function checkInvalidImg(start: Date, end: Date, limit = 50) {
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };

View File

@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
import { PluginTypeEnum } from '@fastgpt/global/core/plugin/constants';
@ -8,7 +7,6 @@ import { PluginTypeEnum } from '@fastgpt/global/core/plugin/constants';
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await MongoPlugin.updateMany(

View File

@ -1,13 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
// 删除索引

Some files were not shown because too many files have changed in this diff Show More