V4.7-alpha (#985)
Some checks failed
deploy-docs / deploy-production (push) Has been cancelled
Build docs images and copy image to docker hub / build-fastgpt-docs-images (push) Has been cancelled
Build FastGPT images in Personal warehouse / build-fastgpt-images (push) Has been cancelled
Build docs images and copy image to docker hub / update-docs-image (push) Has been cancelled

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer 2024-03-13 10:50:02 +08:00 committed by GitHub
parent 5bca15f12f
commit 9501c3f3a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
170 changed files with 5786 additions and 2342 deletions

View File

@ -20,14 +20,13 @@ llm模型全部合并
```json
{
"systemEnv": {
"openapiPrefix": "fastgpt",
"vectorMaxProcess": 15,
"qaMaxProcess": 15,
"pgHNSWEfSearch": 100
"pgHNSWEfSearch": 100 // 向量搜索参数。越大搜索越精确但是速度越慢。设置为100有99%+精度。
},
"llmModels": [
{
"model": "gpt-3.5-turbo-1106", // 模型名
"model": "gpt-3.5-turbo", // 模型名
"name": "gpt-3.5-turbo", // 别名
"maxContext": 16000, // 最大上下文
"maxResponse": 4000, // 最大回复
@ -37,12 +36,16 @@ llm模型全部合并
"censor": false,
"vision": false, // 是否支持图片输入
"datasetProcess": false, // 是否设置为知识库处理模型QA务必保证至少有一个为true否则知识库会报错
"toolChoice": true, // 是否支持工具选择
"functionCall": false, // 是否支持函数调用
"usedInClassify": true, // 是否用于问题分类务必保证至少有一个为true
"usedInExtractFields": true, // 是否用于内容提取务必保证至少有一个为true
"useInToolCall": true, // 是否用于工具调用务必保证至少有一个为true
"usedInQueryExtension": true, // 是否用于问题优化务必保证至少有一个为true
"toolChoice": true, // 是否支持工具选择务必保证至少有一个为true
"functionCall": false, // 是否支持函数调用(特殊功能,会优先使用 toolChoice如果为false则使用 functionCall如果仍为 false则使用提示词模式
"customCQPrompt": "", // 自定义文本分类提示词(不支持工具和函数调用的模型
"customExtractPrompt": "", // 自定义内容提取提示词
"defaultSystemChatPrompt": "", // 对话默认携带的系统提示词
"defaultConfig":{} // 对话默认配置(比如 GLM4 的 top_p
"defaultConfig":{} // LLM默认配置可以针对不同模型设置特殊值(比如 GLM4 的 top_p
},
{
"model": "gpt-3.5-turbo-16k",
@ -55,6 +58,10 @@ llm模型全部合并
"censor": false,
"vision": false,
"datasetProcess": true,
"usedInClassify": true,
"usedInExtractFields": true,
"useInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@ -73,6 +80,10 @@ llm模型全部合并
"censor": false,
"vision": false,
"datasetProcess": false,
"usedInClassify": true,
"usedInExtractFields": true,
"useInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@ -91,6 +102,10 @@ llm模型全部合并
"censor": false,
"vision": true,
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"useInToolCall": false,
"usedInQueryExtension": false,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",

View File

@ -120,6 +120,10 @@ CHAT_API_KEY=sk-xxxxxx
"censor": false,
"vision": false, // 是否支持图片输入
"datasetProcess": false, // 是否设置为知识库处理模型
"usedInClassify": true, // 是否用于问题分类
"usedInExtractFields": true, // 是否用于字段提取
"useInToolCall": true, // 是否用于工具调用
"usedInQueryExtension": true, // 是否用于问题优化
"toolChoice": true, // 是否支持工具选择
"functionCall": false, // 是否支持函数调用
"customCQPrompt": "", // 自定义文本分类提示词(不支持工具和函数调用的模型

View File

@ -0,0 +1,19 @@
---
title: 'V4.7(进行中)'
description: 'FastGPT V4.7更新说明'
icon: 'upgrade'
draft: false
toc: true
weight: 826
---
## 修改配置文件
增加一些 Boolean 值,用于决定不同功能块可以使用哪些模型:[点击查看最新的配置文件](/docs/development/configuration/)
## V4.7 更新说明
1. 新增 - 工具调用模块可以让LLM模型根据用户意图动态的选择其他模型或插件执行。
2. 新增 - 分类和内容提取支持 functionCall 模式。部分模型支持 functionCall 不支持 ToolCall也可以使用了。需要把 LLM 模型配置文件里的 `functionCall` 设置为 `true` `toolChoice`设置为 `false`。如果 `toolChoice` 为 true会走 tool 模式。
3. 优化 - 高级编排性能

View File

@ -6,7 +6,7 @@
"prepare": "husky install",
"format-code": "prettier --config \"./.prettierrc.js\" --write \"./**/src/**/*.{ts,tsx,scss}\"",
"format-doc": "zhlint --dir ./docSite *.md --fix",
"gen:theme-typings": "chakra-cli tokens projects/app/src/web/styles/theme.ts --out node_modules/.pnpm/node_modules/@chakra-ui/styled-system/dist/theming.types.d.ts",
"gen:theme-typings": "chakra-cli tokens packages/web/styles/theme.ts --out node_modules/.pnpm/node_modules/@chakra-ui/styled-system/dist/theming.types.d.ts",
"postinstall": "sh ./scripts/postinstall.sh",
"initIcon": "node ./scripts/icon/init.js",
"previewIcon": "node ./scripts/icon/index.js"

View File

@ -1,10 +1,15 @@
/* Only the token of gpt-3.5-turbo is used */
import type { ChatItemType } from '../../../core/chat/type';
import { Tiktoken } from 'js-tiktoken/lite';
import { adaptChat2GptMessages } from '../../../core/chat/adapt';
import { ChatCompletionRequestMessageRoleEnum } from '../../../core/ai/constant';
import { chats2GPTMessages } from '../../../core/chat/adapt';
import encodingJson from './cl100k_base.json';
import { ChatMessageItemType } from '../../../core/ai/type';
import {
ChatCompletionMessageParam,
ChatCompletionContentPart,
ChatCompletionCreateParams,
ChatCompletionTool
} from '../../../core/ai/type';
import { ChatCompletionRequestMessageRoleEnum } from '../../../core/ai/constants';
/* init tikToken obj */
export function getTikTokenEnc() {
@ -29,18 +34,25 @@ export function getTikTokenEnc() {
/* count one prompt tokens */
export function countPromptTokens(
prompt = '',
role: '' | `${ChatCompletionRequestMessageRoleEnum}` = '',
tools?: any
prompt: string | ChatCompletionContentPart[] | null | undefined = '',
role: '' | `${ChatCompletionRequestMessageRoleEnum}` = ''
) {
const enc = getTikTokenEnc();
const toolText = tools
? JSON.stringify(tools)
.replace('"', '')
.replace('\n', '')
.replace(/( ){2,}/g, ' ')
: '';
const text = `${role}\n${prompt}\n${toolText}`.trim();
const promptText = (() => {
if (!prompt) return '';
if (typeof prompt === 'string') return prompt;
let promptText = '';
prompt.forEach((item) => {
if (item.type === 'text') {
promptText += item.text;
} else if (item.type === 'image_url') {
promptText += item.image_url.url;
}
});
return promptText;
})();
const text = `${role}\n${promptText}`.trim();
try {
const encodeText = enc.encode(text);
@ -50,15 +62,66 @@ export function countPromptTokens(
return text.length;
}
}
export const countToolsTokens = (
tools?: ChatCompletionTool[] | ChatCompletionCreateParams.Function[]
) => {
if (!tools || tools.length === 0) return 0;
const enc = getTikTokenEnc();
const toolText = tools
? JSON.stringify(tools)
.replace('"', '')
.replace('\n', '')
.replace(/( ){2,}/g, ' ')
: '';
return enc.encode(toolText).length;
};
/* count messages tokens */
export const countMessagesTokens = (messages: ChatItemType[], tools?: any) => {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
export const countMessagesTokens = (messages: ChatItemType[]) => {
const adaptMessages = chats2GPTMessages({ messages, reserveId: true });
return countGptMessagesTokens(adaptMessages, tools);
return countGptMessagesTokens(adaptMessages);
};
export const countGptMessagesTokens = (messages: ChatMessageItemType[], tools?: any) =>
messages.reduce((sum, item) => sum + countPromptTokens(item.content, item.role, tools), 0);
export const countGptMessagesTokens = (
messages: ChatCompletionMessageParam[],
tools?: ChatCompletionTool[],
functionCall?: ChatCompletionCreateParams.Function[]
) =>
messages.reduce((sum, item) => {
// Evaluates the text of toolcall and functioncall
const functionCallPrompt = (() => {
let prompt = '';
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant) {
const toolCalls = item.tool_calls;
prompt +=
toolCalls
?.map((item) => `${item?.function?.name} ${item?.function?.arguments}`.trim())
?.join('') || '';
const functionCall = item.function_call;
prompt += `${functionCall?.name} ${functionCall?.arguments}`.trim();
}
return prompt;
})();
const contentPrompt = (() => {
if (!item.content) return '';
if (typeof item.content === 'string') return item.content;
return item.content
.map((item) => {
if (item.type === 'text') return item.text;
return '';
})
.join('');
})();
return sum + countPromptTokens(`${contentPrompt}${functionCallPrompt}`, item.role);
}, 0) +
countToolsTokens(tools) +
countToolsTokens(functionCall);
/* slice messages from top to bottom by maxTokens */
export function sliceMessagesTB({
@ -68,7 +131,7 @@ export function sliceMessagesTB({
messages: ChatItemType[];
maxTokens: number;
}) {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
const adaptMessages = chats2GPTMessages({ messages, reserveId: true });
let reduceTokens = maxTokens;
let result: ChatItemType[] = [];

View File

@ -1,7 +0,0 @@
export enum ChatCompletionRequestMessageRoleEnum {
'System' = 'system',
'User' = 'user',
'Assistant' = 'assistant',
'Function' = 'function',
'Tool' = 'tool'
}

View File

@ -0,0 +1,27 @@
export enum ChatCompletionRequestMessageRoleEnum {
'System' = 'system',
'User' = 'user',
'Assistant' = 'assistant',
'Function' = 'function',
'Tool' = 'tool'
}
export enum ChatMessageTypeEnum {
text = 'text',
image_url = 'image_url'
}
export enum LLMModelTypeEnum {
all = 'all',
classify = 'classify',
extractFields = 'extractFields',
toolCall = 'toolCall',
queryExtension = 'queryExtension'
}
export const llmModelTypeFilterMap = {
[LLMModelTypeEnum.all]: 'model',
[LLMModelTypeEnum.classify]: 'usedInClassify',
[LLMModelTypeEnum.extractFields]: 'usedInExtractFields',
[LLMModelTypeEnum.toolCall]: 'usedInToolCall',
[LLMModelTypeEnum.queryExtension]: 'usedInQueryExtension'
};

View File

@ -10,7 +10,13 @@ export type LLMModelItemType = {
censor?: boolean;
vision?: boolean;
datasetProcess?: boolean;
// diff function model
datasetProcess?: boolean; // dataset
usedInClassify?: boolean; // classify
usedInExtractFields?: boolean; // extract fields
usedInToolCall?: boolean; // tool call
usedInQueryExtension?: boolean; // query extension
functionCall: boolean;
toolChoice: boolean;

View File

@ -1,20 +1,33 @@
import openai from 'openai';
import type {
ChatCompletion,
ChatCompletionCreateParams,
ChatCompletionMessageToolCall,
ChatCompletionChunk,
ChatCompletionMessageParam,
ChatCompletionContentPart
ChatCompletionToolMessageParam,
ChatCompletionAssistantMessageParam
} from 'openai/resources';
import { ChatMessageTypeEnum } from './constants';
export type ChatCompletionContentPart = ChatCompletionContentPart;
export type ChatCompletionCreateParams = ChatCompletionCreateParams;
export type ChatMessageItemType = Omit<ChatCompletionMessageParam, 'name'> & {
name?: any;
export * from 'openai/resources';
export type ChatCompletionMessageParam = ChatCompletionMessageParam & {
dataId?: string;
content: any;
} & any;
};
export type ChatCompletionToolMessageParam = ChatCompletionToolMessageParam & { name: string };
export type ChatCompletionAssistantToolParam = {
role: 'assistant';
tool_calls: ChatCompletionMessageToolCall[];
};
export type ChatCompletion = ChatCompletion;
export type ChatCompletionMessageToolCall = ChatCompletionMessageToolCall & {
toolName?: string;
toolAvatar?: string;
};
export type ChatCompletionMessageFunctionCall = ChatCompletionAssistantMessageParam.FunctionCall & {
id?: string;
toolName?: string;
toolAvatar?: string;
};
export type StreamChatType = Stream<ChatCompletionChunk>;
export type PromptTemplateItem = {
@ -22,3 +35,6 @@ export type PromptTemplateItem = {
desc: string;
value: string;
};
export default openai;
export * from 'openai';

View File

@ -1,40 +1,298 @@
import type { ChatItemType } from '../../core/chat/type.d';
import { ChatRoleEnum } from '../../core/chat/constants';
import { ChatCompletionRequestMessageRoleEnum } from '../../core/ai/constant';
import type { ChatMessageItemType } from '../../core/ai/type.d';
import type {
ChatItemType,
ChatItemValueItemType,
RuntimeUserPromptType,
UserChatItemType
} from '../../core/chat/type.d';
import { ChatFileTypeEnum, ChatItemValueTypeEnum, ChatRoleEnum } from '../../core/chat/constants';
import type {
ChatCompletionContentPart,
ChatCompletionFunctionMessageParam,
ChatCompletionMessageFunctionCall,
ChatCompletionMessageParam,
ChatCompletionMessageToolCall,
ChatCompletionToolMessageParam
} from '../../core/ai/type.d';
import { ChatCompletionRequestMessageRoleEnum } from '../../core/ai/constants';
const chat2Message = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System,
[ChatRoleEnum.Function]: ChatCompletionRequestMessageRoleEnum.Function,
[ChatRoleEnum.Tool]: ChatCompletionRequestMessageRoleEnum.Tool
};
const message2Chat = {
const GPT2Chat = {
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.Function]: ChatRoleEnum.Function,
[ChatCompletionRequestMessageRoleEnum.Tool]: ChatRoleEnum.Tool
[ChatCompletionRequestMessageRoleEnum.Function]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.Tool]: ChatRoleEnum.AI
};
export function adaptRole_Chat2Message(role: `${ChatRoleEnum}`) {
return chat2Message[role];
}
export function adaptRole_Message2Chat(role: `${ChatCompletionRequestMessageRoleEnum}`) {
return message2Chat[role];
return GPT2Chat[role];
}
export const adaptChat2GptMessages = ({
export const simpleUserContentPart = (content: ChatCompletionContentPart[]) => {
if (content.length === 1 && content[0].type === 'text') {
return content[0].text;
}
return content;
};
export const chats2GPTMessages = ({
messages,
reserveId
reserveId,
reserveTool = false
}: {
messages: ChatItemType[];
reserveId: boolean;
}): ChatMessageItemType[] => {
return messages.map((item) => ({
...(reserveId && { dataId: item.dataId }),
role: chat2Message[item.obj],
content: item.value || ''
}));
reserveTool?: boolean;
}): ChatCompletionMessageParam[] => {
let results: ChatCompletionMessageParam[] = [];
messages.forEach((item) => {
const dataId = reserveId ? item.dataId : undefined;
if (item.obj === ChatRoleEnum.Human) {
const value = item.value
.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
return {
type: 'text',
text: item.text?.content || ''
};
}
if (item.type === 'file' && item.file?.type === ChatFileTypeEnum.image) {
return {
type: 'image_url',
image_url: {
url: item.file?.url || ''
}
};
}
return;
})
.filter(Boolean) as ChatCompletionContentPart[];
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.User,
content: simpleUserContentPart(value)
});
} else if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else {
item.value.forEach((value) => {
if (value.type === ChatItemValueTypeEnum.tool && value.tools && reserveTool) {
const tool_calls: ChatCompletionMessageToolCall[] = [];
const toolResponse: ChatCompletionToolMessageParam[] = [];
value.tools.forEach((tool) => {
tool_calls.push({
id: tool.id,
type: 'function',
function: {
name: tool.functionName,
arguments: tool.params
}
});
toolResponse.push({
tool_call_id: tool.id,
role: ChatCompletionRequestMessageRoleEnum.Tool,
name: tool.functionName,
content: tool.response
});
});
results = results
.concat({
dataId,
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls
})
.concat(toolResponse);
} else if (value.text) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: value.text.content
});
}
});
}
});
return results;
};
export const GPTMessages2Chats = (
messages: ChatCompletionMessageParam[],
reserveTool = true
): ChatItemType[] => {
return messages
.map((item) => {
const value: ChatItemType['value'] = [];
const obj = GPT2Chat[item.role];
if (
obj === ChatRoleEnum.System &&
item.role === ChatCompletionRequestMessageRoleEnum.System
) {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
}
});
} else if (
obj === ChatRoleEnum.Human &&
item.role === ChatCompletionRequestMessageRoleEnum.User
) {
if (typeof item.content === 'string') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
}
});
} else if (Array.isArray(item.content)) {
item.content.forEach((item) => {
if (item.type === 'text') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.text
}
});
} else if (item.type === 'image_url') {
value.push({
//@ts-ignore
type: 'file',
file: {
type: ChatFileTypeEnum.image,
name: '',
url: item.image_url.url
}
});
}
});
// @ts-ignore
}
} else if (
obj === ChatRoleEnum.AI &&
item.role === ChatCompletionRequestMessageRoleEnum.Assistant
) {
if (item.content && typeof item.content === 'string') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
}
});
} else if (item.tool_calls && reserveTool) {
// save tool calls
const toolCalls = item.tool_calls as ChatCompletionMessageToolCall[];
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.tool,
tools: toolCalls.map((tool) => {
let toolResponse =
messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Tool &&
msg.tool_call_id === tool.id
)?.content || '';
toolResponse =
typeof toolResponse === 'string' ? toolResponse : JSON.stringify(toolResponse);
return {
id: tool.id,
toolName: tool.toolName || '',
toolAvatar: tool.toolAvatar || '',
functionName: tool.function.name,
params: tool.function.arguments,
response: toolResponse as string
};
})
});
} else if (item.function_call && reserveTool) {
const functionCall = item.function_call as ChatCompletionMessageFunctionCall;
const functionResponse = messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Function &&
msg.name === item.function_call?.name
) as ChatCompletionFunctionMessageParam;
if (functionResponse) {
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.tool,
tools: [
{
id: functionCall.id || '',
toolName: functionCall.toolName || '',
toolAvatar: functionCall.toolAvatar || '',
functionName: functionCall.name,
params: functionCall.arguments,
response: functionResponse.content || ''
}
]
});
}
}
}
return {
dataId: item.dataId,
obj,
value
} as ChatItemType;
})
.filter((item) => item.value.length > 0);
};
export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): RuntimeUserPromptType => {
const prompt: RuntimeUserPromptType = {
files: [],
text: ''
};
value.forEach((item) => {
if (item.type === 'file' && item.file) {
prompt.files?.push(item.file);
} else if (item.text) {
prompt.text += item.text.content;
}
});
return prompt;
};
export const runtimePrompt2ChatsValue = (
prompt: RuntimeUserPromptType
): UserChatItemType['value'] => {
const value: UserChatItemType['value'] = [];
if (prompt.files) {
prompt.files.forEach((file) => {
value.push({
type: ChatItemValueTypeEnum.file,
file
});
});
}
if (prompt.text) {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: prompt.text
}
});
}
return value;
};
export const getSystemPrompt = (prompt?: string): ChatItemType[] => {
if (!prompt) return [];
return [
{
obj: ChatRoleEnum.System,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: prompt } }]
}
];
};

View File

@ -1,28 +1,30 @@
export enum ChatRoleEnum {
System = 'System',
Human = 'Human',
AI = 'AI',
Function = 'Function',
Tool = 'Tool'
AI = 'AI'
}
export const ChatRoleMap = {
[ChatRoleEnum.System]: {
name: '系统提示词'
name: '系统'
},
[ChatRoleEnum.Human]: {
name: '用户'
},
[ChatRoleEnum.AI]: {
name: 'AI'
},
[ChatRoleEnum.Function]: {
name: 'Function'
},
[ChatRoleEnum.Tool]: {
name: 'Tool'
}
};
export enum ChatFileTypeEnum {
image = 'image',
file = 'file'
}
export enum ChatItemValueTypeEnum {
text = 'text',
file = 'file',
tool = 'tool'
}
export enum ChatSourceEnum {
test = 'test',
online = 'online',

View File

@ -1,11 +1,20 @@
import { ClassifyQuestionAgentItemType } from '../module/type';
import { SearchDataResponseItemType } from '../dataset/type';
import { ChatRoleEnum, ChatSourceEnum, ChatStatusEnum } from './constants';
import {
ChatFileTypeEnum,
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum,
ChatStatusEnum
} from './constants';
import { FlowNodeTypeEnum } from '../module/node/constant';
import { ModuleOutputKeyEnum } from '../module/constants';
import { DispatchNodeResponseKeyEnum } from '../module/runtime/constants';
import { AppSchema } from '../app/type';
import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d';
import { DatasetSearchModeEnum } from '../dataset/constants';
import { ChatBoxInputType } from '../../../../projects/app/src/components/ChatBox/type';
import { DispatchNodeResponseType } from '../module/runtime/type.d';
export type ChatSchema = {
_id: string;
@ -30,7 +39,53 @@ export type ChatWithAppSchema = Omit<ChatSchema, 'appId'> & {
appId: AppSchema;
};
export type ChatItemSchema = {
export type UserChatItemValueItemType = {
type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.file;
text?: {
content: string;
};
file?: {
type: `${ChatFileTypeEnum}`;
name?: string;
url: string;
};
};
export type UserChatItemType = {
obj: ChatRoleEnum.Human;
value: UserChatItemValueItemType[];
};
export type SystemChatItemValueItemType = {
type: ChatItemValueTypeEnum.text;
text?: {
content: string;
};
};
export type SystemChatItemType = {
obj: ChatRoleEnum.System;
value: SystemChatItemValueItemType[];
};
export type AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.tool;
text?: {
content: string;
};
tools?: ToolModuleResponseItemType[];
};
export type AIChatItemType = {
obj: ChatRoleEnum.AI;
value: AIChatItemValueItemType[];
userGoodFeedback?: string;
userBadFeedback?: string;
customFeedbacks?: string[];
adminFeedback?: AdminFbkType;
[DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType[];
};
export type ChatItemValueItemType =
| UserChatItemValueItemType
| SystemChatItemValueItemType
| AIChatItemValueItemType;
export type ChatItemSchema = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
dataId: string;
chatId: string;
userId: string;
@ -38,13 +93,6 @@ export type ChatItemSchema = {
tmbId: string;
appId: string;
time: Date;
obj: `${ChatRoleEnum}`;
value: string;
userGoodFeedback?: string;
userBadFeedback?: string;
customFeedbacks?: string[];
adminFeedback?: AdminFbkType;
[ModuleOutputKeyEnum.responseData]?: ChatHistoryItemResType[];
};
export type AdminFbkType = {
@ -56,22 +104,16 @@ export type AdminFbkType = {
};
/* --------- chat item ---------- */
export type ChatItemType = {
export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
dataId?: string;
obj: ChatItemSchema['obj'];
value: any;
userGoodFeedback?: string;
userBadFeedback?: string;
customFeedbacks?: ChatItemSchema['customFeedbacks'];
adminFeedback?: ChatItemSchema['feedback'];
[ModuleOutputKeyEnum.responseData]?: ChatHistoryItemResType[];
};
export type ChatSiteItemType = ChatItemType & {
export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
dataId?: string;
status: `${ChatStatusEnum}`;
moduleName?: string;
ttsBuffer?: Uint8Array;
};
} & ChatBoxInputType;
/* --------- team chat --------- */
export type ChatAppListSchema = {
@ -93,60 +135,25 @@ export type ChatHistoryItemType = HistoryItemType & {
};
/* ------- response data ------------ */
export type moduleDispatchResType = {
// common
moduleLogo?: string;
runningTime?: number;
query?: string;
textOutput?: string;
// bill
tokens?: number;
model?: string;
contextTotalLen?: number;
totalPoints?: number;
// chat
temperature?: number;
maxToken?: number;
quoteList?: SearchDataResponseItemType[];
historyPreview?: ChatItemType[]; // completion context array. history will slice
// dataset search
similarity?: number;
limit?: number;
searchMode?: `${DatasetSearchModeEnum}`;
searchUsingReRank?: boolean;
extensionModel?: string;
extensionResult?: string;
extensionTokens?: number;
// cq
cqList?: ClassifyQuestionAgentItemType[];
cqResult?: string;
// content extract
extractDescription?: string;
extractResult?: Record<string, any>;
// http
params?: Record<string, any>;
body?: Record<string, any>;
headers?: Record<string, any>;
httpResult?: Record<string, any>;
// plugin output
pluginOutput?: Record<string, any>;
pluginDetail?: ChatHistoryItemResType[];
// tf switch
tfSwitchResult?: boolean;
// abandon
tokens?: number;
};
export type ChatHistoryItemResType = moduleDispatchResType & {
export type ChatHistoryItemResType = DispatchNodeResponseType & {
moduleType: `${FlowNodeTypeEnum}`;
moduleName: string;
};
/* One tool run response */
export type ToolRunResponseItemType = Record<string, any> | Array;
/* tool module response */
export type ToolModuleResponseItemType = {
id: string;
toolName: string; // tool name
toolAvatar: string;
params: string; // tool params
response: string;
functionName: string;
};
/* dispatch run time */
export type RuntimeUserPromptType = {
files?: UserChatItemValueItemType['file'][];
text: string;
};

View File

@ -1,6 +1,79 @@
import { IMG_BLOCK_KEY, FILE_BLOCK_KEY } from './constants';
import { DispatchNodeResponseType } from '../module/runtime/type';
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '../module/node/constant';
import { ChatItemValueTypeEnum, ChatRoleEnum } from './constants';
import { ChatHistoryItemResType, ChatItemType } from './type.d';
export function chatContentReplaceBlock(content: string = '') {
const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g');
return content.replace(regex, '').trim();
}
export const getChatTitleFromChatMessage = (message?: ChatItemType, defaultValue = '新对话') => {
// @ts-ignore
const textMsg = message?.value.find((item) => item.type === ChatItemValueTypeEnum.text);
if (textMsg?.text?.content) {
return textMsg.text.content.slice(0, 20);
}
return defaultValue;
};
export const getHistoryPreview = (
completeMessages: ChatItemType[]
): {
obj: `${ChatRoleEnum}`;
value: string;
}[] => {
return completeMessages.map((item, i) => {
if (item.obj === ChatRoleEnum.System || i >= completeMessages.length - 2) {
return {
obj: item.obj,
value: item.value?.[0]?.text?.content || ''
};
}
const content = item.value
.map((item) => {
if (item.text?.content) {
const content =
item.text.content.length > 20
? `${item.text.content.slice(0, 20)}...`
: item.text.content;
return content;
}
return '';
})
.filter(Boolean)
.join('\n');
return {
obj: item.obj,
value: content
};
});
};
export const filterPublicNodeResponseData = ({
flowResponses = []
}: {
flowResponses?: ChatHistoryItemResType[];
}) => {
const filedList = ['quoteList', 'moduleType'];
const filterModuleTypeList: any[] = [
FlowNodeTypeEnum.pluginModule,
FlowNodeTypeEnum.datasetSearchNode,
FlowNodeTypeEnum.tools
];
return flowResponses
.filter((item) => filterModuleTypeList.includes(item.moduleType))
.map((item) => {
const obj: DispatchNodeResponseType = {};
for (let key in item) {
if (key === 'toolDetail' || key === 'pluginDetail') {
// @ts-ignore
obj[key] = filterPublicNodeResponseData({ flowResponses: item[key] });
} else if (filedList.includes(key)) {
// @ts-ignore
obj[key] = item[key];
}
}
return obj as ChatHistoryItemResType;
});
};

View File

@ -83,17 +83,17 @@ export const TrainingTypeMap = {
[TrainingModeEnum.chunk]: {
label: 'core.dataset.training.Chunk mode',
tooltip: 'core.dataset.import.Chunk Split Tip',
isPlus: true
openSource: true
},
[TrainingModeEnum.auto]: {
label: 'core.dataset.training.Auto mode',
tooltip: 'core.dataset.training.Auto mode Tip',
isPlus: true
openSource: false
},
[TrainingModeEnum.qa]: {
label: 'core.dataset.training.QA mode',
tooltip: 'core.dataset.import.QA Import Tip',
isPlus: true
openSource: true
}
};

View File

@ -21,7 +21,10 @@ export enum ModuleIOValueTypeEnum {
// plugin special type
selectApp = 'selectApp',
selectDataset = 'selectDataset'
selectDataset = 'selectDataset',
// tool
tools = 'tools'
}
/* reg: modulename key */
@ -89,12 +92,10 @@ export enum ModuleInputKeyEnum {
export enum ModuleOutputKeyEnum {
// common
responseData = 'responseData',
moduleDispatchBills = 'moduleDispatchBills',
userChatInput = 'userChatInput',
finish = 'finish',
history = 'history',
answerText = 'answerText', // answer module text key
answerText = 'answerText', // module answer. the value will be show and save to history
success = 'success',
failed = 'failed',
text = 'system_text',
@ -110,7 +111,13 @@ export enum ModuleOutputKeyEnum {
// tf switch
resultTrue = 'system_resultTrue',
resultFalse = 'system_resultFalse'
resultFalse = 'system_resultFalse',
// tools
selectedTools = 'selectedTools',
// http
httpRawResponse = 'httpRawResponse'
}
export enum VariableInputEnum {

View File

@ -56,7 +56,8 @@ export enum FlowNodeTypeEnum {
pluginModule = 'pluginModule',
pluginInput = 'pluginInput',
pluginOutput = 'pluginOutput',
queryExtension = 'cfr'
queryExtension = 'cfr',
tools = 'tools'
// abandon
}

View File

@ -2,6 +2,7 @@ import { FlowNodeInputTypeEnum, FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleOutputKeyEnum } from '../constants';
import { SelectedDatasetType } from '../api';
import { EditInputFieldMap, EditOutputFieldMap } from './type';
import { LLMModelTypeEnum } from '../../ai/constants';
export type FlowNodeChangeProps = {
moduleId: string;
@ -28,6 +29,7 @@ export type FlowNodeInputItemType = {
label: string;
description?: string;
required?: boolean;
toolDescription?: string; // If this field is not empty, it is entered as a tool
edit?: boolean; // Whether to allow editing
editField?: EditInputFieldMap;
@ -49,6 +51,8 @@ export type FlowNodeInputItemType = {
step?: number; // slider
max?: number; // slider, number input
min?: number; // slider, number input
llmModelType?: `${LLMModelTypeEnum}`;
};
export type FlowNodeOutputTargetItemType = {
@ -62,6 +66,8 @@ export type FlowNodeOutputItemType = {
label?: string;
description?: string;
required?: boolean;
defaultValue?: any;
edit?: boolean;
editField?: EditOutputFieldMap;
@ -74,12 +80,14 @@ export type FlowNodeOutputItemType = {
export type EditInputFieldMap = EditOutputFieldMap & {
inputType?: boolean;
required?: boolean;
isToolInput?: boolean;
};
export type EditOutputFieldMap = {
name?: boolean;
key?: boolean;
description?: boolean;
dataType?: boolean;
defaultValue?: boolean;
};
export type EditNodeFieldType = {
inputType?: `${FlowNodeInputTypeEnum}`; // input type
@ -89,6 +97,8 @@ export type EditNodeFieldType = {
label?: string;
description?: string;
valueType?: `${ModuleIOValueTypeEnum}`;
isToolInput?: boolean;
defaultValue?: string;
};
/* ------------- item type --------------- */

View File

@ -0,0 +1,19 @@
export enum SseResponseEventEnum {
error = 'error',
answer = 'answer', // animation stream
fastAnswer = 'fastAnswer', // direct answer text, not animation
flowNodeStatus = 'flowNodeStatus', // update node status
toolCall = 'toolCall', // tool start
toolParams = 'toolParams', // tool params return
toolResponse = 'toolResponse', // tool response return
flowResponses = 'flowResponses' // sse response request
}
export enum DispatchNodeResponseKeyEnum {
nodeResponse = 'responseData', // run node response
nodeDispatchUsages = 'nodeDispatchUsages', // the node bill.
childrenResponses = 'childrenResponses', // Some nodes make recursive calls that need to be returned
toolResponses = 'toolResponses', // The result is passed back to the tool node for use
assistantResponses = 'assistantResponses' // assistant response
}

View File

@ -0,0 +1,101 @@
import { ChatNodeUsageType } from '../../../support/wallet/bill/type';
import { ChatItemValueItemType, ToolRunResponseItemType } from '../../chat/type';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../node/type';
import { ModuleItemType } from '../type';
import { DispatchNodeResponseKeyEnum } from './constants';
export type RunningModuleItemType = {
name: ModuleItemType['name'];
avatar: ModuleItemType['avatar'];
intro?: ModuleItemType['intro'];
moduleId: ModuleItemType['moduleId'];
flowType: ModuleItemType['flowType'];
showStatus?: ModuleItemType['showStatus'];
isEntry?: ModuleItemType['isEntry'];
inputs: {
key: string;
value?: any;
valueType?: FlowNodeInputItemType['valueType'];
required?: boolean;
toolDescription?: string;
}[];
outputs: {
key: string;
required?: boolean;
defaultValue?: any;
answer?: boolean;
response?: boolean;
value?: any;
valueType?: FlowNodeOutputItemType['valueType'];
targets: {
moduleId: string;
key: string;
}[];
}[];
};
export type DispatchNodeResponseType = {
// common
moduleLogo?: string;
runningTime?: number;
query?: string;
textOutput?: string;
// bill
tokens?: number;
model?: string;
contextTotalLen?: number;
totalPoints?: number;
// chat
temperature?: number;
maxToken?: number;
quoteList?: SearchDataResponseItemType[];
historyPreview?: {
obj: `${ChatRoleEnum}`;
value: string;
}[]; // completion context array. history will slice
// dataset search
similarity?: number;
limit?: number;
searchMode?: `${DatasetSearchModeEnum}`;
searchUsingReRank?: boolean;
extensionModel?: string;
extensionResult?: string;
extensionTokens?: number;
// cq
cqList?: ClassifyQuestionAgentItemType[];
cqResult?: string;
// content extract
extractDescription?: string;
extractResult?: Record<string, any>;
// http
params?: Record<string, any>;
body?: Record<string, any>;
headers?: Record<string, any>;
httpResult?: Record<string, any>;
// plugin output
pluginOutput?: Record<string, any>;
pluginDetail?: ChatHistoryItemResType[];
// tf switch
tfSwitchResult?: boolean;
// tool
toolCallTokens?: number;
toolDetail?: ChatHistoryItemResType[];
};
export type DispatchNodeResultType<T> = {
[DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; //
[DispatchNodeResponseKeyEnum.childrenResponses]?: DispatchNodeResultType[];
[DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType;
[DispatchNodeResponseKeyEnum.assistantResponses]?: ChatItemValueItemType[];
} & T;

View File

@ -0,0 +1,31 @@
import { ChatCompletionRequestMessageRoleEnum } from '../../ai/constants';
export const textAdaptGptResponse = ({
text,
model = '',
finish_reason = null,
extraData = {}
}: {
model?: string;
text: string | null;
finish_reason?: null | 'stop';
extraData?: Object;
}) => {
return JSON.stringify({
...extraData,
id: '',
object: '',
created: 0,
model,
choices: [
{
delta:
text === null
? {}
: { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: text },
index: 0,
finish_reason
}
]
});
};

View File

@ -1,26 +1,25 @@
import { UserGuideModule } from '@fastgpt/global/core/module/template/system/userGuide';
import { UserInputModule } from '@fastgpt/global/core/module/template/system/userInput';
import { AiChatModule } from '@fastgpt/global/core/module/template/system/aiChat';
import { DatasetSearchModule } from '@fastgpt/global/core/module/template/system/datasetSearch';
import { DatasetConcatModule } from '@fastgpt/global/core/module/template/system/datasetConcat';
import { AssignedAnswerModule } from '@fastgpt/global/core/module/template/system/assignedAnswer';
import { ClassifyQuestionModule } from '@fastgpt/global/core/module/template/system/classifyQuestion';
import { ContextExtractModule } from '@fastgpt/global/core/module/template/system/contextExtract';
import { HttpModule468 } from '@fastgpt/global/core/module/template/system/http468';
import { HttpModule } from '@fastgpt/global/core/module/template/system/abandon/http';
import { UserGuideModule } from './system/userGuide';
import { UserInputModule } from './system/userInput';
import { AiChatModule } from './system/aiChat';
import { DatasetSearchModule } from './system/datasetSearch';
import { DatasetConcatModule } from './system/datasetConcat';
import { AssignedAnswerModule } from './system/assignedAnswer';
import { ClassifyQuestionModule } from './system/classifyQuestion';
import { ContextExtractModule } from './system/contextExtract';
import { HttpModule468 } from './system/http468';
import { HttpModule } from './system/abandon/http';
import { ToolModule } from './system/tools';
import { RunAppModule } from '@fastgpt/global/core/module/template/system/runApp';
import { PluginInputModule } from '@fastgpt/global/core/module/template/system/pluginInput';
import { PluginOutputModule } from '@fastgpt/global/core/module/template/system/pluginOutput';
import { RunPluginModule } from '@fastgpt/global/core/module/template/system/runPlugin';
import { AiQueryExtension } from '@fastgpt/global/core/module/template/system/queryExtension';
import { RunAppModule } from './system/runApp';
import { PluginInputModule } from './system/pluginInput';
import { PluginOutputModule } from './system/pluginOutput';
import { RunPluginModule } from './system/runPlugin';
import { AiQueryExtension } from './system/queryExtension';
import type {
FlowModuleTemplateType,
moduleTemplateListType
} from '@fastgpt/global/core/module/type.d';
import { ModuleTemplateTypeEnum } from '@fastgpt/global/core/module/constants';
import type { FlowModuleTemplateType, moduleTemplateListType } from '../../module/type.d';
import { ModuleTemplateTypeEnum } from '../../module/constants';
/* app flow module templates */
export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
UserGuideModule,
UserInputModule,
@ -29,11 +28,13 @@ export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
DatasetSearchModule,
DatasetConcatModule,
RunAppModule,
ToolModule,
ClassifyQuestionModule,
ContextExtractModule,
HttpModule468,
AiQueryExtension
];
/* plugin flow module templates */
export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
PluginInputModule,
PluginOutputModule,
@ -42,12 +43,14 @@ export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
DatasetSearchModule,
DatasetConcatModule,
RunAppModule,
ToolModule,
ClassifyQuestionModule,
ContextExtractModule,
HttpModule468,
AiQueryExtension
];
/* all module */
export const moduleTemplatesFlat: FlowModuleTemplateType[] = [
UserGuideModule,
UserInputModule,
@ -59,6 +62,8 @@ export const moduleTemplatesFlat: FlowModuleTemplateType[] = [
ContextExtractModule,
HttpModule468,
HttpModule,
ToolModule,
AiChatModule,
RunAppModule,
PluginInputModule,
PluginOutputModule,

View File

@ -2,6 +2,7 @@ import type { FlowNodeInputItemType } from '../node/type.d';
import { DYNAMIC_INPUT_KEY, ModuleInputKeyEnum } from '../constants';
import { FlowNodeInputTypeEnum } from '../node/constant';
import { ModuleIOValueTypeEnum } from '../constants';
import { chatNodeSystemPromptTip } from './tip';
export const Input_Template_Switch: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.switch,
@ -58,6 +59,28 @@ export const Input_Template_DynamicInput: FlowNodeInputItemType = {
hideInApp: true
};
export const Input_Template_AiModel: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
};
export const Input_Template_System_Prompt: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiSystemPrompt,
type: FlowNodeInputTypeEnum.textarea,
max: 3000,
valueType: ModuleIOValueTypeEnum.string,
label: 'core.ai.Prompt',
description: chatNodeSystemPromptTip,
placeholder: chatNodeSystemPromptTip,
showTargetInApp: true,
showTargetInPlugin: true
};
export const Input_Template_Dataset_Quote: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiChatDatasetQuote,
type: FlowNodeInputTypeEnum.target,

View File

@ -11,9 +11,11 @@ import {
ModuleTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_Dataset_Quote,
Input_Template_History,
Input_Template_Switch,
Input_Template_System_Prompt,
Input_Template_UserChatInput
} from '../input';
import { chatNodeSystemPromptTip } from '../tip';
@ -24,20 +26,13 @@ export const AiChatModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.textAnswer,
flowType: FlowNodeTypeEnum.chatNode,
avatar: '/imgs/module/AI.png',
name: 'core.module.template.Ai chat',
intro: 'core.module.template.Ai chat intro',
name: 'AI 对话',
intro: 'AI 大模型对话',
showStatus: true,
// isTool: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
},
Input_Template_AiModel,
// --- settings modal
{
key: ModuleInputKeyEnum.aiChatTemperature,
@ -98,18 +93,13 @@ export const AiChatModule: FlowModuleTemplateType = {
},
// settings modal ---
{
key: ModuleInputKeyEnum.aiSystemPrompt,
type: FlowNodeInputTypeEnum.textarea,
...Input_Template_System_Prompt,
label: 'core.ai.Prompt',
max: 300,
valueType: ModuleIOValueTypeEnum.string,
description: chatNodeSystemPromptTip,
placeholder: chatNodeSystemPromptTip,
showTargetInApp: true,
showTargetInPlugin: true
placeholder: chatNodeSystemPromptTip
},
Input_Template_History,
Input_Template_UserChatInput,
{ ...Input_Template_UserChatInput, toolDescription: '用户问题' },
Input_Template_Dataset_Quote
],
outputs: [

View File

@ -9,8 +9,9 @@ export const AssignedAnswerModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.textAnswer,
flowType: FlowNodeTypeEnum.answerNode,
avatar: '/imgs/module/reply.png',
name: 'core.module.template.Assigned reply',
intro: 'core.module.template.Assigned reply intro',
name: '指定回复',
intro:
'该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。',
inputs: [
Input_Template_Switch,
{

View File

@ -6,40 +6,34 @@ import {
import { FlowModuleTemplateType } from '../../type.d';
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleTemplateTypeEnum } from '../../constants';
import {
Input_Template_AiModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput
} from '../input';
import { Output_Template_UserChatInput } from '../output';
import { Input_Template_System_Prompt } from '../input';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ClassifyQuestionModule: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.classifyQuestion,
templateType: ModuleTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.classifyQuestion,
avatar: '/imgs/module/cq.png',
name: 'core.module.template.Classify question',
intro: `core.module.template.Classify question intro`,
name: '问题分类',
intro: `根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于商品“使用”问题\n类型3: 关于商品“购买”问题\n类型4: 其他问题`,
showStatus: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
valueType: ModuleIOValueTypeEnum.string,
label: 'core.module.input.label.Classify model',
required: true,
showTargetInApp: false,
showTargetInPlugin: false
...Input_Template_AiModel,
llmModelType: LLMModelTypeEnum.classify
},
{
key: ModuleInputKeyEnum.aiSystemPrompt,
type: FlowNodeInputTypeEnum.textarea,
valueType: ModuleIOValueTypeEnum.string,
...Input_Template_System_Prompt,
label: 'core.module.input.label.Background',
description: 'core.module.input.description.Background',
placeholder: 'core.module.input.placeholder.Classify background',
showTargetInApp: true,
showTargetInPlugin: true
placeholder: 'core.module.input.placeholder.Classify background'
},
Input_Template_History,
Input_Template_UserChatInput,

View File

@ -10,26 +10,23 @@ import {
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
} from '../../constants';
import { Input_Template_History, Input_Template_Switch } from '../input';
import { Input_Template_AiModel, Input_Template_History, Input_Template_Switch } from '../input';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ContextExtractModule: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.contentExtract,
templateType: ModuleTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.contentExtract,
avatar: '/imgs/module/extract.png',
name: 'core.module.template.Extract field',
intro: 'core.module.template.Extract field intro',
name: '文本内容提取',
intro: '可从文本中提取指定的数据例如sql语句、搜索关键词、代码等',
showStatus: true,
isTool: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
valueType: ModuleIOValueTypeEnum.string,
label: 'core.module.input.label.LLM',
required: true,
showTargetInApp: false,
showTargetInPlugin: false
...Input_Template_AiModel,
llmModelType: LLMModelTypeEnum.extractFields
},
{
key: ModuleInputKeyEnum.description,
@ -52,7 +49,8 @@ export const ContextExtractModule: FlowModuleTemplateType = {
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: true,
showTargetInPlugin: true
showTargetInPlugin: true,
toolDescription: '需要检索的内容'
},
{
key: ModuleInputKeyEnum.extractKeys,

View File

@ -26,7 +26,7 @@ export const DatasetConcatModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.tools,
avatar: '/imgs/module/concat.svg',
name: '知识库搜索引用合并',
intro: 'core.module.template.Dataset search result concat intro',
intro: '可以将多个知识库搜索结果进行合并输出。使用 RRF 的合并方式进行最终排序输出。',
showStatus: false,
inputs: [
Input_Template_Switch,

View File

@ -19,9 +19,10 @@ export const DatasetSearchModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.datasetSearchNode,
avatar: '/imgs/module/db.png',
name: 'core.module.template.Dataset search',
intro: 'core.module.template.Dataset search intro',
name: '知识库搜索',
intro: '调用知识库搜索能力,查找“有可能”与问题相关的内容',
showStatus: true,
isTool: true,
inputs: [
Input_Template_Switch,
{
@ -97,7 +98,10 @@ export const DatasetSearchModule: FlowModuleTemplateType = {
showTargetInPlugin: false,
value: ''
},
Input_Template_UserChatInput
{
...Input_Template_UserChatInput,
toolDescription: '需要检索的内容'
}
],
outputs: [
Output_Template_UserChatInput,

View File

@ -5,9 +5,9 @@ import {
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type';
import {
DYNAMIC_INPUT_KEY,
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
} from '../../constants';
import {
@ -22,9 +22,10 @@ export const HttpModule468: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.httpRequest468,
avatar: '/imgs/module/http.png',
name: 'core.module.template.Http request',
intro: 'core.module.template.Http request intro',
name: 'HTTP 请求',
intro: '可以发出一个 HTTP 请求,实现更为复杂的操作(联网搜索、数据库查询等)',
showStatus: true,
isTool: true,
inputs: [
Input_Template_Switch,
{
@ -86,7 +87,6 @@ export const HttpModule468: FlowModuleTemplateType = {
editField: {
key: true,
description: true,
required: true,
dataType: true
},
defaultEditField: {
@ -94,19 +94,27 @@ export const HttpModule468: FlowModuleTemplateType = {
key: '',
description: '',
inputType: FlowNodeInputTypeEnum.target,
valueType: ModuleIOValueTypeEnum.string,
required: true
valueType: ModuleIOValueTypeEnum.string
}
}
],
outputs: [
Output_Template_Finish,
{
key: ModuleOutputKeyEnum.httpRawResponse,
label: '原始响应',
description: 'HTTP请求的原始响应。只能接受字符串或JSON类型响应数据。',
valueType: ModuleIOValueTypeEnum.any,
type: FlowNodeOutputTypeEnum.source,
targets: []
},
{
...Output_Template_AddOutput,
editField: {
key: true,
description: true,
dataType: true
dataType: true,
defaultValue: true
},
defaultEditField: {
label: '',

View File

@ -13,28 +13,26 @@ import {
import {
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput
Input_Template_UserChatInput,
Input_Template_AiModel
} from '../input';
import { Output_Template_UserChatInput } from '../output';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const AiQueryExtension: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.chatNode,
templateType: ModuleTemplateTypeEnum.other,
flowType: FlowNodeTypeEnum.queryExtension,
avatar: '/imgs/module/cfr.svg',
name: 'core.module.template.Query extension',
intro: 'core.module.template.Query extension intro',
name: '问题优化',
intro:
'使用问题优化功能,可以提高知识库连续对话时搜索的精度。使用该功能后,会先利用 AI 根据上下文构建一个或多个新的检索词,这些检索词更利于进行知识库搜索。该模块已内置在知识库搜索模块中,如果您仅进行一次知识库搜索,可直接使用知识库内置的补全功能。',
showStatus: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
...Input_Template_AiModel,
llmModelType: LLMModelTypeEnum.queryExtension
},
{
key: ModuleInputKeyEnum.aiSystemPrompt,

View File

@ -22,8 +22,8 @@ export const RunAppModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.runApp,
avatar: '/imgs/module/app.png',
name: 'core.module.template.Running app',
intro: 'core.module.template.Running app intro',
name: '应用调用',
intro: '可以选择一个其他应用进行调用',
showStatus: true,
inputs: [
Input_Template_Switch,
@ -52,7 +52,7 @@ export const RunAppModule: FlowModuleTemplateType = {
},
{
key: ModuleOutputKeyEnum.answerText,
label: 'AI回复',
label: '回复的文本',
description: '将在应用完全结束后触发',
valueType: ModuleIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.source,

View File

@ -9,6 +9,7 @@ export const RunPluginModule: FlowModuleTemplateType = {
intro: '',
name: '',
showStatus: false,
isTool: true,
inputs: [], // [{key:'pluginId'},...]
outputs: []
};

View File

@ -0,0 +1,52 @@
import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_System_Prompt,
Input_Template_UserChatInput
} from '../input';
import { chatNodeSystemPromptTip } from '../tip';
import { Output_Template_Finish, Output_Template_UserChatInput } from '../output';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ToolModule: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.tools,
flowType: FlowNodeTypeEnum.tools,
templateType: ModuleTemplateTypeEnum.functionCall,
avatar: '/imgs/module/tool.svg',
name: '工具调用(实验)',
intro: '通过AI模型自动选择一个或多个工具进行调用。工具可以是其他功能块或插件。',
showStatus: true,
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
llmModelType: LLMModelTypeEnum.toolCall
},
{
...Input_Template_System_Prompt,
label: 'core.ai.Prompt',
description: chatNodeSystemPromptTip,
placeholder: chatNodeSystemPromptTip
},
Input_Template_History,
Input_Template_UserChatInput
],
outputs: [
Output_Template_UserChatInput,
{
key: ModuleOutputKeyEnum.selectedTools,
valueType: ModuleIOValueTypeEnum.tools,
type: FlowNodeOutputTypeEnum.hidden,
targets: []
},
Output_Template_Finish
]
};

View File

@ -8,7 +8,7 @@ export const UserGuideModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.userGuide,
flowType: FlowNodeTypeEnum.userGuide,
avatar: '/imgs/module/userGuide.png',
name: 'core.module.template.User guide',
name: '全局配置',
intro: userGuideTip,
inputs: [
{

View File

@ -16,8 +16,8 @@ export const UserInputModule: FlowModuleTemplateType = {
templateType: ModuleTemplateTypeEnum.systemInput,
flowType: FlowNodeTypeEnum.questionInput,
avatar: '/imgs/module/userChatInput.svg',
name: 'core.module.template.Chat entrance',
intro: 'core.module.template.Chat entrance intro',
name: '对话入口',
intro: '当用户发送一个内容后,流程将会从这个模块开始执行。',
inputs: [
{
key: ModuleInputKeyEnum.userChatInput,

View File

@ -5,10 +5,16 @@ import {
ModuleTemplateTypeEnum,
VariableInputEnum
} from './constants';
import { DispatchNodeResponseKeyEnum } from './runtime/constants';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from './node/type';
import { UserModelSchema } from 'support/user/type';
import { moduleDispatchResType } from '..//chat/type';
import { ChatModuleUsageType } from '../../support/wallet/bill/type';
import {
ChatItemValueItemType,
ToolRunResponseItemType,
UserChatItemValueItemType
} from '../chat/type';
import { ChatNodeUsageType } from '../../support/wallet/bill/type';
import { RunningModuleItemType } from './runtime/type';
export type FlowModuleTemplateType = {
id: string; // module id, unique
@ -17,6 +23,7 @@ export type FlowModuleTemplateType = {
avatar?: string;
name: string;
intro: string; // template list intro
isTool?: boolean; // can be connected by tool
showStatus?: boolean; // chatting response step status
inputs: FlowNodeInputItemType[];
outputs: FlowNodeOutputItemType[];
@ -44,6 +51,9 @@ export type ModuleItemType = {
showStatus?: boolean;
inputs: FlowNodeInputItemType[];
outputs: FlowNodeOutputItemType[];
// runTime field
isEntry?: boolean;
};
/* --------------- function type -------------------- */
@ -85,30 +95,6 @@ export type ContextExtractAgentItemType = {
};
/* -------------- running module -------------- */
export type RunningModuleItemType = {
name: ModuleItemType['name'];
moduleId: ModuleItemType['moduleId'];
flowType: ModuleItemType['flowType'];
showStatus?: ModuleItemType['showStatus'];
} & {
inputs: {
key: string;
value?: any;
valueType?: `${ModuleIOValueTypeEnum}`;
}[];
outputs: {
key: string;
answer?: boolean;
response?: boolean;
value?: any;
valueType?: `${ModuleIOValueTypeEnum}`;
targets: {
moduleId: string;
key: string;
}[];
}[];
};
export type ChatDispatchProps = {
res: NextApiResponse;
mode: 'test' | 'chat';
@ -120,15 +106,13 @@ export type ChatDispatchProps = {
responseChatItemId?: string;
histories: ChatItemType[];
variables: Record<string, any>;
inputFiles?: UserChatItemValueItemType['file'][];
stream: boolean;
detail: boolean; // response detail
};
export type ModuleDispatchProps<T> = ChatDispatchProps & {
module: RunningModuleItemType;
runtimeModules: RunningModuleItemType[];
params: T;
};
export type ModuleDispatchResponse<T> = T & {
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
[ModuleOutputKeyEnum.moduleDispatchBills]?: ChatModuleUsageType[];
};

View File

@ -10,6 +10,7 @@ import { AppTTSConfigType, ModuleItemType, VariableItemType } from './type';
import { Input_Template_Switch } from './template/input';
import { EditorVariablePickerType } from '../../../web/components/common/Textarea/PromptEditor/type';
/* module */
export const getGuideModule = (modules: ModuleItemType[]) =>
modules.find((item) => item.flowType === FlowNodeTypeEnum.userGuide);
@ -57,13 +58,13 @@ export const getModuleInputUiField = (input: FlowNodeInputItemType) => {
return {};
};
export function plugin2ModuleIO(
export const plugin2ModuleIO = (
pluginId: string,
modules: ModuleItemType[]
): {
inputs: FlowNodeInputItemType[];
outputs: FlowNodeOutputItemType[];
} {
} => {
const pluginInput = modules.find((module) => module.flowType === FlowNodeTypeEnum.pluginInput);
const pluginOutput = modules.find((module) => module.flowType === FlowNodeTypeEnum.pluginOutput);
@ -99,7 +100,7 @@ export function plugin2ModuleIO(
}))
: []
};
}
};
export const formatEditorVariablePickerIcon = (
variables: { key: string; label: string; type?: `${VariableInputEnum}` }[]

View File

@ -6,7 +6,7 @@
"dayjs": "^1.11.7",
"encoding": "^0.1.13",
"js-tiktoken": "^1.0.7",
"openai": "4.23.0",
"openai": "4.28.0",
"nanoid": "^4.0.1",
"timezones-list": "^3.0.2"
},

View File

@ -1,4 +1,4 @@
import type { HistoryItemType, ChatSiteItemType } from '../../core/chat/type.d';
import type { HistoryItemType } from '../../core/chat/type.d';
import { OutLinkSchema } from './type.d';
export type AuthOutLinkInitProps = {

View File

@ -22,7 +22,7 @@ export type BillSchemaType = {
username: string;
};
export type ChatModuleUsageType = {
export type ChatNodeUsageType = {
tokens?: number;
totalPoints: number;
moduleName: string;

View File

@ -1,7 +0,0 @@
export enum sseResponseEventEnum {
error = 'error',
answer = 'answer', // animation stream
response = 'response', // direct response, not animation
moduleStatus = 'moduleStatus',
appStreamResponse = 'appStreamResponse' // sse response request
}

View File

@ -1,5 +1,5 @@
import type { NextApiResponse } from 'next';
import { sseResponseEventEnum } from './constant';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
import { addLog } from '../system/log';
import { clearCookie } from '../../support/permission/controller';
@ -70,7 +70,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
return responseWrite({
res,
event: sseResponseEventEnum.error,
event: SseResponseEventEnum.error,
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
});
}
@ -90,7 +90,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
responseWrite({
res,
event: sseResponseEventEnum.error,
event: SseResponseEventEnum.error,
data: JSON.stringify({ message: replaceSensitiveText(msg) })
});
};
@ -132,3 +132,22 @@ export function responseWrite({
event && Write(`event: ${event}\n`);
Write(`data: ${data}\n\n`);
}
export const responseWriteNodeStatus = ({
res,
status = 'running',
name
}: {
res?: NextApiResponse;
status?: 'running';
name: string;
}) => {
responseWrite({
res,
event: SseResponseEventEnum.flowNodeStatus,
data: JSON.stringify({
status,
name
})
});
};

View File

@ -1,4 +1,4 @@
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import { getAIApi } from '../config';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
@ -8,10 +8,10 @@ export async function createQuestionGuide({
messages,
model
}: {
messages: ChatMessageItemType[];
messages: ChatCompletionMessageParam[];
model: string;
}) {
const concatMessages: ChatMessageItemType[] = [
const concatMessages: ChatCompletionMessageParam[] = [
...messages,
{
role: 'user',

View File

@ -2,6 +2,7 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { getAIApi } from '../config';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
/*
query extension -
@ -133,7 +134,7 @@ A: ${chatBg}
histories: concatFewShot
})
}
];
] as ChatCompletionMessageParam[];
const result = await ai.chat.completions.create({
model: model,
temperature: 0.01,

View File

@ -10,6 +10,7 @@ import {
import { appCollectionName } from '../app/schema';
import { userCollectionName } from '../../support/user/schema';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
export const ChatItemCollectionName = 'chatitems';
@ -54,8 +55,8 @@ const ChatItemSchema = new Schema({
},
value: {
// chat content
type: String,
default: ''
type: Array,
default: []
},
userGoodFeedback: {
type: String
@ -75,7 +76,7 @@ const ChatItemSchema = new Schema({
a: String
}
},
[ModuleOutputKeyEnum.responseData]: {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
type: Array,
default: []
}

View File

@ -1,6 +1,7 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { MongoChatItem } from './chatItemSchema';
import { addLog } from '../../common/system/log';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
export async function getChatItems({
appId,
@ -24,8 +25,27 @@ export async function getChatItems({
history.reverse();
history.forEach((item) => {
// @ts-ignore
item.value = adaptStringValue(item.value);
});
return { history };
}
/* 临时适配旧的对话记录,清洗完数据后可删除4.30刪除) */
export const adaptStringValue = (value: any): ChatItemValueItemType[] => {
if (typeof value === 'string') {
return [
{
type: ChatItemValueTypeEnum.text,
text: {
content: value
}
}
];
}
return value;
};
export const addCustomFeedbacks = async ({
appId,

View File

@ -1,21 +1,40 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum, IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import type { ChatCompletionContentPart } from '@fastgpt/global/core/ai/type.d';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import type {
ChatCompletionContentPart,
ChatCompletionMessageParam
} from '@fastgpt/global/core/ai/type.d';
import axios from 'axios';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
/* slice chat context by tokens */
export function ChatContextFilter({
export function filterGPTMessageByMaxTokens({
messages = [],
maxTokens
}: {
messages: ChatItemType[];
messages: ChatCompletionMessageParam[];
maxTokens: number;
}) {
if (!Array.isArray(messages)) {
return [];
}
const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0);
const rawTextLen = messages.reduce((sum, item) => {
if (typeof item.content === 'string') {
return sum + item.content.length;
}
if (Array.isArray(item.content)) {
return (
sum +
item.content.reduce((sum, item) => {
if (item.type === 'text') {
return sum + item.text.length;
}
return sum;
}, 0)
);
}
return sum;
}, 0);
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
@ -23,19 +42,21 @@ export function ChatContextFilter({
}
// filter startWith system prompt
const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex);
const chatStartIndex = messages.findIndex(
(item) => item.role !== ChatCompletionRequestMessageRoleEnum.System
);
const systemPrompts: ChatCompletionMessageParam[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatCompletionMessageParam[] = messages.slice(chatStartIndex);
// reduce token of systemPrompt
maxTokens -= countMessagesTokens(systemPrompts);
maxTokens -= countGptMessagesTokens(systemPrompts);
// Save the last chat prompt(question)
const question = chatPrompts.pop();
if (!question) {
return systemPrompts;
}
const chats: ChatItemType[] = [question];
const chats: ChatCompletionMessageParam[] = [question];
// 从后往前截取对话内容, 每次需要截取2个
while (1) {
@ -45,7 +66,7 @@ export function ChatContextFilter({
break;
}
const tokens = countMessagesTokens([assistant, user]);
const tokens = countGptMessagesTokens([assistant, user]);
maxTokens -= tokens;
/* 整体 tokens 超出范围,截断 */
if (maxTokens < 0) {
@ -62,6 +83,30 @@ export function ChatContextFilter({
return [...systemPrompts, ...chats];
}
export const formatGPTMessagesInRequestBefore = (messages: ChatCompletionMessageParam[]) => {
return messages
.map((item) => {
if (!item.content) return;
if (typeof item.content === 'string') {
return {
...item,
content: item.content.trim()
};
}
// array
if (item.content.length === 0) return;
if (item.content.length === 1 && item.content[0].type === 'text') {
return {
...item,
content: item.content[0].text
};
}
return item;
})
.filter(Boolean) as ChatCompletionMessageParam[];
};
/**
string to vision model. Follow the markdown code block rule for interception:
@ -175,3 +220,21 @@ export async function formatStr2ChatContent(str: string) {
return content ? content : null;
}
export const loadChatImgToBase64 = async (content: string | ChatCompletionContentPart[]) => {
if (typeof content === 'string') {
return content;
}
return Promise.all(
content.map(async (item) => {
if (item.type === 'text') return item;
// load image
const response = await axios.get(item.image_url.url, {
responseType: 'arraybuffer'
});
const base64 = Buffer.from(response.data).toString('base64');
item.image_url.url = `data:${response.headers['content-type']};base64,${base64}`;
return item;
})
);
};

View File

@ -25,12 +25,12 @@ export const pushResult2Remote = async ({
outLinkUid,
shareId,
appName,
responseData
flowResponses
}: {
outLinkUid?: string; // raw id, not parse
shareId?: string;
appName: string;
responseData?: ChatHistoryItemResType[];
flowResponses?: ChatHistoryItemResType[];
}) => {
if (!shareId || !outLinkUid || !FastGPTProUrl) return;
try {
@ -46,7 +46,7 @@ export const pushResult2Remote = async ({
data: {
token: outLinkUid,
appName,
responseData
responseData: flowResponses
}
});
} catch (error) {}

View File

@ -7,7 +7,8 @@ import {
ModalCloseButton,
ModalContentProps,
Box,
Image
Image,
useMediaQuery
} from '@chakra-ui/react';
import MyIcon from '../Icon';
@ -31,12 +32,14 @@ const CustomModal = ({
maxW = ['90vw', '600px'],
...props
}: MyModalProps) => {
const [isPc] = useMediaQuery('(min-width: 900px)');
return (
<Modal
isOpen={isOpen}
onClose={() => onClose && onClose()}
autoFocus={false}
isCentered={isCentered}
isCentered={isPc ? isCentered : true}
>
<ModalOverlay />
<ModalContent

View File

@ -6,6 +6,7 @@ export const iconPaths = {
collectionLight: () => import('./icons/collectionLight.svg'),
collectionSolid: () => import('./icons/collectionSolid.svg'),
'common/addCircleLight': () => import('./icons/common/addCircleLight.svg'),
'common/addLight': () => import('./icons/common/addLight.svg'),
'common/backFill': () => import('./icons/common/backFill.svg'),
'common/backLight': () => import('./icons/common/backLight.svg'),
'common/clearLight': () => import('./icons/common/clearLight.svg'),

View File

@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 13 12" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M6.5 2C6.77614 2 7 2.22386 7 2.5V5.5H10C10.2761 5.5 10.5 5.72386 10.5 6C10.5 6.27614 10.2761 6.5 10 6.5H7V9.5C7 9.77614 6.77614 10 6.5 10C6.22386 10 6 9.77614 6 9.5V6.5H3C2.72386 6.5 2.5 6.27614 2.5 6C2.5 5.72386 2.72386 5.5 3 5.5H6V2.5C6 2.22386 6.22386 2 6.5 2Z" />
</svg>

After

Width:  |  Height:  |  Size: 408 B

View File

@ -1,8 +1,4 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1694067364830"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="5118"
xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 12 12">
<path
d="M727.950222 274.773333l-55.296-9.329777a38.741333 38.741333 0 0 0-12.856889 76.344888l193.308445 32.597334c1.991111 0.113778 1.991111 0.113778 2.844444 0 2.275556 0.227556 4.266667 0.113778 7.850667-0.284445l0.682667-0.056889a28.216889 28.216889 0 0 0 5.632-0.967111c1.080889 0 1.080889 0 3.185777-0.568889a15.530667 15.530667 0 0 0 4.039111-2.332444l1.137778-0.796444 0.796445-0.398223a28.444444 28.444444 0 0 0 4.152889-2.730666 37.091556 37.091556 0 0 0 6.542222-6.826667l0.796444-0.967111c1.080889-1.422222 1.080889-1.422222 2.161778-3.128889a37.432889 37.432889 0 0 0 3.697778-9.557333c0.568889-1.194667 0.568889-1.194667 1.137778-3.128889 0.113778-1.763556 0.113778-1.763556 0-2.503111v0.910222a36.579556 36.579556 0 0 0-0.341334-10.24l-0.113778-0.967111a22.755556 22.755556 0 0 0-0.682666-3.982222c0-1.080889 0-1.080889-0.568889-3.128889l-68.494222-183.751111a38.798222 38.798222 0 0 0-49.777778-22.755556 38.798222 38.798222 0 0 0-22.755556 49.777778l16.270223 43.804444A397.880889 397.880889 0 0 0 512 113.777778C292.408889 113.777778 113.777778 292.408889 113.777778 512s178.631111 398.222222 398.222222 398.222222 398.222222-178.631111 398.222222-398.222222a38.684444 38.684444 0 1 0-77.368889 0c0 176.924444-143.928889 320.853333-320.853333 320.853333S191.146667 688.924444 191.146667 512 335.075556 191.146667 512 191.146667c80.099556 0 157.070222 29.980444 215.950222 83.626666z"
p-id="5119"></path>
d="M4.82661 10.9785C3.86099 10.7071 3.07349 10.1763 2.46411 9.38585C1.85474 8.59543 1.55005 7.68544 1.55005 6.65587C1.55005 6.12256 1.63911 5.61489 1.81724 5.13286C1.99536 4.65082 2.24849 4.20883 2.57661 3.80688C2.67974 3.69461 2.8063 3.63604 2.9563 3.63117C3.1063 3.62631 3.24224 3.68488 3.36411 3.80688C3.46724 3.9098 3.52124 4.03611 3.52611 4.18581C3.53099 4.33551 3.48167 4.47586 3.37817 4.60685C3.15317 4.89689 2.97974 5.215 2.85786 5.56119C2.73599 5.90737 2.67505 6.27226 2.67505 6.65587C2.67505 7.41373 2.8978 8.08981 3.3433 8.68413C3.7888 9.27844 4.36292 9.683 5.06567 9.89782C5.18755 9.93525 5.28824 10.0054 5.36774 10.1083C5.44724 10.2113 5.48717 10.3235 5.48755 10.4452C5.48755 10.6323 5.42192 10.7797 5.29067 10.8875C5.15942 10.9953 5.00474 11.0256 4.82661 10.9785ZM7.27349 10.9785C7.09536 11.0253 6.94067 10.9925 6.80942 10.8802C6.67817 10.768 6.61255 10.6183 6.61255 10.4311C6.61255 10.3189 6.65249 10.2113 6.73236 10.1083C6.81224 10.0054 6.91292 9.93525 7.03442 9.89782C7.73755 9.67327 8.31186 9.26627 8.75736 8.67683C9.20286 8.08738 9.42542 7.41373 9.42505 6.65587C9.42505 5.72024 9.09692 4.92496 8.44067 4.27002C7.78442 3.61508 6.98755 3.28761 6.05005 3.28761H6.00786L6.23286 3.51216C6.33599 3.61508 6.38755 3.74607 6.38755 3.90512C6.38755 4.06418 6.33599 4.19517 6.23286 4.29809C6.12974 4.40101 5.99849 4.45247 5.83911 4.45247C5.67974 4.45247 5.54849 4.40101 5.44536 4.29809L4.26411 3.1192C4.20786 3.06306 4.16811 3.00224 4.14486 2.93675C4.12161 2.87126 4.1098 2.80108 4.10942 2.72623C4.10942 2.65138 4.12124 2.58121 4.14486 2.51572C4.16849 2.45022 4.20824 2.38941 4.26411 2.33327L5.44536 1.15438C5.54849 1.05146 5.67974 1 5.83911 1C5.99849 1 6.12974 1.05146 6.23286 1.15438C6.33599 1.2573 6.38755 1.38829 6.38755 1.54734C6.38755 1.7064 6.33599 1.83739 6.23286 1.94031L6.00786 2.16486H6.05005C7.3063 2.16486 8.37036 2.59992 9.24224 3.47006C10.1141 4.34019 10.55 5.40213 10.55 6.65587C10.55 7.67571 10.2454 8.58326 9.63599 9.37855C9.02661 10.1738 8.23911 10.7071 7.27349 10.9785Z" />
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -1 +1,8 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1689057990782" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1770" xmlns:xlink="http://www.w3.org/1999/xlink"><path d="M878.5 255.1H770V146.7c0-43.6-35.5-79.1-79.1-79.1H145.2c-43.6 0-79.1 35.5-79.1 79.1v545.8c0 43.6 35.5 79.1 79.1 79.1h108.4V880c0 43.6 35.5 79.1 79.1 79.1h545.8c43.6 0 79.1-35.5 79.1-79.1V334.2c-0.1-43.6-35.6-79.1-79.1-79.1zM145.2 707.5c-8.3 0-15.1-6.8-15.1-15.1V146.7c0-8.3 6.8-15.1 15.1-15.1H691c8.3 0 15.1 6.8 15.1 15.1v545.8c0 8.3-6.8 15.1-15.1 15.1H145.2zM893.5 880c0 8.3-6.8 15.1-15.1 15.1H332.7c-8.3 0-15.1-6.8-15.1-15.1V771.5H691c43.6 0 79.1-35.5 79.1-79.1V319.1h108.4c8.3 0 15.1 6.8 15.1 15.1V880z" p-id="1771"></path></svg>
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1689057990782"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1770"
xmlns:xlink="http://www.w3.org/1999/xlink">
<path
d="M878.5 255.1H770V146.7c0-43.6-35.5-79.1-79.1-79.1H145.2c-43.6 0-79.1 35.5-79.1 79.1v545.8c0 43.6 35.5 79.1 79.1 79.1h108.4V880c0 43.6 35.5 79.1 79.1 79.1h545.8c43.6 0 79.1-35.5 79.1-79.1V334.2c-0.1-43.6-35.6-79.1-79.1-79.1zM145.2 707.5c-8.3 0-15.1-6.8-15.1-15.1V146.7c0-8.3 6.8-15.1 15.1-15.1H691c8.3 0 15.1 6.8 15.1 15.1v545.8c0 8.3-6.8 15.1-15.1 15.1H145.2zM893.5 880c0 8.3-6.8 15.1-15.1 15.1H332.7c-8.3 0-15.1-6.8-15.1-15.1V771.5H691c43.6 0 79.1-35.5 79.1-79.1V319.1h108.4c8.3 0 15.1 6.8 15.1 15.1V880z"
p-id="1771"></path>
</svg>

Before

Width:  |  Height:  |  Size: 840 B

After

Width:  |  Height:  |  Size: 871 B

View File

@ -1,8 +1,4 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1702264166621"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3126"
xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128">
<path
d="M743.328 31.04c71.232 0 127.008 32.96 157.568 93.632 2.784 5.504 3.872 10.944 4.928 16.256 2.24 11.04 5.6 27.776 30.432 57.536 36.32 43.52 29.984 90.08 24.896 127.488-1.312 9.76-2.656 19.456-3.232 29.024-1.856 31.648 3.968 41.184 19.488 66.656 3.136 5.216 6.592 10.848 10.272 17.184 23.68 40.896 22.752 90.912-2.464 133.792-28.96 49.248-82.624 78.624-143.488 78.624-51.552 0-108.256-1.12-141.408-1.952 8.64 50.048 25.44 154.528 25.44 210.656 0 92.64-67.04 125.504-124.448 125.504-63.68 0-113.536-58.688-113.536-133.6 0-57.92 0-65.344-22.976-97.088-60.608-83.776-122.464-142.272-150.304-142.272L117.76 612.48c-49.056 0-88.992-39.392-88.992-87.776L28.768 118.848c0-48.416 39.904-87.808 88.992-87.808l133.888 0c12.896 0 23.392 10.368 23.392 23.104l0 512.224 39.424 0c62.176 0 144.704 101.28 188.416 161.632 31.68 43.744 31.68 63.968 31.68 123.904 0 48.192 29.92 87.424 66.784 87.424 23.296 0 77.632-7.712 77.632-79.328 0-69.824-29.344-228.704-29.664-230.304-1.28-6.848 0.64-13.92 5.28-19.2 4.608-5.28 10.688-8.288 18.464-8 0.864 0 90.72 2.656 168.672 2.656 44.032 0 82.528-20.832 103.008-55.648 16.768-28.48 17.632-61.28 2.304-87.712-3.456-6.016-6.72-11.392-9.792-16.32-16.704-27.456-28.736-47.264-26.048-93.12 0.608-10.752 2.08-21.6 3.552-32.512 4.672-34.368 8.704-64.064-14.624-92.032-32.352-38.784-37.28-63.296-40.192-77.92-0.416-1.984-0.672-3.712-1.408-5.568-15.392-30.624-47.872-67.136-115.168-67.136l-316.704 0c-12.896 0-23.392-10.336-23.392-23.072 0-12.768 10.496-23.104 23.392-23.104L743.328 31.008 743.328 31.04zM117.76 566.368l110.496 0L228.256 77.248 117.76 77.248c-23.232 0-42.176 18.656-42.176 41.632L75.584 524.8C75.552 547.712 94.496 566.368 117.76 566.368L117.76 566.368zM117.76 566.368"
p-id="3127"></path>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 12 12">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M6.89916 10.544C6.75871 10.8601 6.44531 11.0637 6.09948 11.0637C5.20183 11.0637 4.47414 10.336 4.47414 9.43838V7.86912H2.91903C1.77347 7.86912 0.897458 6.84803 1.07165 5.7158L1.56312 2.5212C1.7034 1.60939 2.48796 0.936302 3.4105 0.936302H9.53785C10.3181 0.936302 10.9506 1.56881 10.9506 2.34904V5.54364C10.9506 6.32388 10.3181 6.95638 9.53785 6.95638H8.49367L6.89916 10.544ZM7.66874 1.9363H3.4105C2.98153 1.9363 2.61672 2.24928 2.55149 2.67326L2.06002 5.86786C1.97902 6.39433 2.38636 6.86912 2.91903 6.86912H4.51777C5.04596 6.86912 5.47414 7.29731 5.47414 7.8255V9.43838C5.47414 9.757 5.71243 10.02 6.02051 10.0588L7.59138 6.52433C7.61342 6.47474 7.63936 6.42768 7.66874 6.38345V1.9363ZM8.66874 5.95638V1.9363H9.53785C9.7658 1.9363 9.95059 2.12109 9.95059 2.34904V5.54364C9.95059 5.77159 9.7658 5.95638 9.53785 5.95638H8.66874Z" />
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

After

Width:  |  Height:  |  Size: 960 B

View File

@ -1,9 +1,4 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1702264142648"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2973"
data-spm-anchor-id="a313x.manage_type_myprojects.0.i0.65e73a81QqWlcB" xmlns:xlink="http://www.w3.org/1999/xlink"
width="128" height="128">
<path
d="M743.328 985.536l-316.704 0c-12.896 0-23.392-10.368-23.392-23.104s10.496-23.072 23.392-23.072l316.704 0c67.296 0 99.808-36.512 115.168-67.136 0.768-1.856 1.024-3.552 1.408-5.568 2.912-14.624 7.84-39.168 40.192-77.92 23.328-27.968 19.328-57.664 14.624-92.032-1.472-10.912-2.944-21.76-3.552-32.512-2.688-45.856 9.344-65.664 26.048-93.12 3.04-4.928 6.304-10.304 9.792-16.32 15.328-26.432 14.464-59.232-2.304-87.712-20.48-34.816-59.008-55.648-103.008-55.648-77.952 0-167.808 2.656-168.672 2.656-7.776 0.288-13.888-2.72-18.464-8-4.64-5.28-6.528-12.352-5.28-19.2 0.32-1.6 29.664-160.48 29.664-230.304 0-71.616-54.336-79.328-77.632-79.328-36.832 0-66.784 39.2-66.784 87.424 0 59.936 0 80.16-31.68 123.904-43.712 60.352-126.24 161.632-188.416 161.632L275.04 450.176l0 512.224c0 12.768-10.496 23.104-23.392 23.104L117.76 985.504c-49.056 0-88.992-39.392-88.992-87.808L28.768 491.808c0-48.384 39.904-87.776 88.992-87.776l196.704 0c27.84 0 89.696-58.496 150.304-142.272 22.976-31.712 22.976-39.168 22.976-97.088 0-74.944 49.856-133.6 113.536-133.6 57.408 0 124.448 32.896 124.448 125.504 0 56.128-16.8 160.576-25.44 210.656 33.184-0.8 89.856-1.952 141.408-1.952 60.864 0 114.56 29.376 143.488 78.624 25.216 42.88 26.144 92.928 2.464 133.792-3.68 6.336-7.104 11.968-10.272 17.184-15.52 25.472-21.344 35.008-19.488 66.656 0.576 9.568 1.952 19.296 3.232 29.024 5.088 37.408 11.424 83.968-24.896 127.488-24.832 29.792-28.192 46.496-30.432 57.536-1.056 5.28-2.176 10.752-4.928 16.256C870.336 952.544 814.56 985.536 743.328 985.536L743.328 985.536zM117.76 450.208c-23.232 0-42.176 18.656-42.176 41.6l0 405.952c0 22.976 18.944 41.632 42.176 41.632l110.496 0L228.256 450.208 117.76 450.208 117.76 450.208zM117.76 450.208"
p-id="2974"></path>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 12 12">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M5.10094 1.45598C5.24139 1.13995 5.55479 0.936279 5.90063 0.936279C6.79827 0.936279 7.52596 1.66397 7.52596 2.56162V4.13088H9.08108C10.2266 4.13088 11.1026 5.15197 10.9285 6.2842L10.437 9.4788C10.2967 10.3906 9.51214 11.0637 8.5896 11.0637H2.46225C1.68202 11.0637 1.04951 10.4312 1.04951 9.65096V6.45636C1.04951 5.67612 1.68202 5.04362 2.46225 5.04362H3.50643L5.10094 1.45598ZM4.33137 10.0637H8.5896C9.01857 10.0637 9.38338 9.75072 9.44861 9.32674L9.94008 6.13214C10.0211 5.60567 9.61374 5.13088 9.08108 5.13088H7.48233C6.95414 5.13088 6.52596 4.70269 6.52596 4.1745V2.56162C6.52596 2.243 6.28768 1.98004 5.97959 1.94122L4.40873 5.47567C4.38668 5.52526 4.36075 5.57232 4.33137 5.61655V10.0637ZM3.33137 6.04362V10.0637H2.46225C2.2343 10.0637 2.04951 9.87891 2.04951 9.65096V6.45636C2.04951 6.22841 2.2343 6.04362 2.46225 6.04362H3.33137Z" />
</svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 969 B

View File

@ -1 +1,4 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1681997838051" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4520" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M898 178.7H665.3c4.3-9.8 6.7-20.6 6.7-32 0-44-36-80-80-80H432c-44 0-80 36-80 80 0 11.4 2.4 22.2 6.7 32H126c-13.2 0-24 10.8-24 24s10.8 24 24 24h772c13.2 0 24-10.8 24-24s-10.8-24-24-24z m-466 0c-8.5 0-16.5-3.4-22.6-9.4-6.1-6.1-9.4-14.1-9.4-22.6s3.4-16.5 9.4-22.6c6.1-6.1 14.1-9.4 22.6-9.4h160c8.5 0 16.5 3.4 22.6 9.4 6.1 6.1 9.4 14.1 9.4 22.6 0 8.5-3.4 16.5-9.4 22.6-6.1 6.1-14.1 9.4-22.6 9.4H432zM513 774.7c18.1 0 33-14.8 33-33v-334c0-18.1-14.9-33-33-33h-2c-18.1 0-33 14.8-33 33v334c0 18.2 14.8 33 33 33h2zM363 774.7c18.1 0 33-14.8 33-33v-334c0-18.1-14.9-33-33-33h-2c-18.1 0-33 14.8-33 33v334c0 18.2 14.8 33 33 33h2zM663 774.7c18.1 0 33-14.8 33-33v-334c0-18.1-14.9-33-33-33h-2c-18.1 0-33 14.8-33 33v334c0 18.2 14.8 33 33 33h2z" p-id="4521"></path><path d="M812 280.7c-13.3 0-24 10.7-24 24v530c0 41.9-34.1 76-76 76H312c-41.9 0-76-34.1-76-76v-530c0-13.3-10.7-24-24-24s-24 10.7-24 24v530c0 68.4 55.6 124 124 124h400c68.4 0 124-55.6 124-124v-530c0-13.2-10.7-24-24-24z" p-id="4522"></path></svg>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 12 12">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M5.61187 0.885132H6.38814C6.6304 0.885123 6.84165 0.885114 7.0161 0.899368C7.20093 0.914469 7.38803 0.948028 7.56898 1.04023C7.83672 1.17665 8.05441 1.39434 8.19083 1.66208C8.28303 1.84303 8.31659 2.03013 8.33169 2.21496C8.34371 2.362 8.34559 2.53519 8.34588 2.73106H10.1533C10.4295 2.73106 10.6533 2.95491 10.6533 3.23106C10.6533 3.5072 10.4295 3.73106 10.1533 3.73106H9.73037V8.42004C9.73038 8.79037 9.73038 9.09832 9.70985 9.34966C9.68846 9.61144 9.64232 9.85552 9.52498 10.0858C9.34431 10.4404 9.05603 10.7287 8.70145 10.9094C8.47114 11.0267 8.22706 11.0728 7.96528 11.0942C7.71394 11.1148 7.40599 11.1148 7.03568 11.1148H4.96432C4.59401 11.1148 4.28606 11.1148 4.03473 11.0942C3.77295 11.0728 3.52887 11.0267 3.29856 10.9094C2.94398 10.7287 2.65569 10.4404 2.47503 10.0858C2.35768 9.85552 2.31154 9.61144 2.29016 9.34966C2.26962 9.09832 2.26963 8.79037 2.26963 8.42005L2.26963 3.73106H1.84667C1.57053 3.73106 1.34667 3.5072 1.34667 3.23106C1.34667 2.95491 1.57053 2.73106 1.84667 2.73106H3.65413C3.65442 2.53519 3.6563 2.362 3.66831 2.21496C3.68341 2.03013 3.71697 1.84303 3.80917 1.66208C3.9456 1.39434 4.16328 1.17665 4.43103 1.04023C4.61198 0.948028 4.79907 0.914469 4.9839 0.899368C5.15836 0.885114 5.36961 0.885123 5.61187 0.885132ZM3.26963 3.73106V8.39965C3.26963 8.79558 3.27002 9.06245 3.28684 9.26823C3.30317 9.46817 3.33249 9.566 3.36603 9.63184C3.45083 9.79826 3.58613 9.93356 3.75255 10.0184C3.81839 10.0519 3.91622 10.0812 4.11616 10.0976C4.32194 10.1144 4.58881 10.1148 4.98474 10.1148H7.01526C7.41119 10.1148 7.67807 10.1144 7.88384 10.0976C8.08378 10.0812 8.18161 10.0519 8.24746 10.0184C8.41387 9.93356 8.54918 9.79826 8.63397 9.63184C8.66752 9.566 8.69683 9.46817 8.71317 9.26823C8.72998 9.06245 8.73037 8.79558 8.73037 8.39965V3.73106H3.26963ZM7.34584 2.73106H4.65416C4.65452 2.53723 4.6563 2.40278 4.66499 2.29639C4.67504 2.1734 4.69178 2.13256 4.70018 2.11607C4.74073 2.03649 4.80543 1.97178 4.88502 1.93123C4.9015 1.92283 4.94235 1.9061 5.06533 1.89605C5.19416 1.88552 5.36411 1.88513 5.63082 1.88513H6.36919C6.63589 1.88513 6.80585 1.88552 6.93467 1.89605C7.05766 1.9061 7.0985 1.92283 7.11498 1.93123C7.19457 1.97178 7.25928 2.03649 7.29983 2.11607C7.30822 2.13256 7.32496 2.1734 7.33501 2.29639C7.3437 2.40278 7.34548 2.53723 7.34584 2.73106ZM5.07704 5.2692C5.35318 5.2692 5.57704 5.49306 5.57704 5.7692V8.07661C5.57704 8.35275 5.35318 8.57661 5.07704 8.57661C4.8009 8.57661 4.57704 8.35275 4.57704 8.07661V5.7692C4.57704 5.49306 4.8009 5.2692 5.07704 5.2692ZM6.92296 5.2692C7.19911 5.2692 7.42296 5.49306 7.42296 5.7692V8.07661C7.42296 8.35275 7.19911 8.57661 6.92296 8.57661C6.64682 8.57661 6.42296 8.35275 6.42296 8.07661V5.7692C6.42296 5.49306 6.64682 5.2692 6.92296 5.2692Z" />
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@ -1 +1,14 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683436563791" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1062" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M162.42900021 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887345-116.48887345s52.19035577-116.48887344 116.48887345-116.48887345 116.48887344 52.19035577 116.48887344 116.48887345S226.72751789 628.38449281 162.42900021 628.38449281z" fill="#575B66" p-id="1063"></path><path d="M511.89561936 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887345-116.48887345s52.19035577-116.48887344 116.48887345-116.48887345 116.48887344 52.19035577 116.48887345 116.48887345S576.19413706 628.38449281 511.89561936 628.38449281z" fill="#575B66" p-id="1064"></path><path d="M861.57099979 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887344-116.48887345s52.19035577-116.48887344 116.48887344-116.48887345 116.48887344 52.19035577 116.48887345 116.48887345S925.66075619 628.38449281 861.57099979 628.38449281z" fill="#575B66" p-id="1065"></path></svg>
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683436563791"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1062"
xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64">
<path
d="M162.42900021 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887345-116.48887345s52.19035577-116.48887344 116.48887345-116.48887345 116.48887344 52.19035577 116.48887344 116.48887345S226.72751789 628.38449281 162.42900021 628.38449281z"
p-id="1063"></path>
<path
d="M511.89561936 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887345-116.48887345s52.19035577-116.48887344 116.48887345-116.48887345 116.48887344 52.19035577 116.48887345 116.48887345S576.19413706 628.38449281 511.89561936 628.38449281z"
p-id="1064"></path>
<path
d="M861.57099979 628.38449281c-64.29851769 0-116.48887344-52.19035577-116.48887344-116.48887345s52.19035577-116.48887344 116.48887344-116.48887345 116.48887344 52.19035577 116.48887345 116.48887345S925.66075619 628.38449281 861.57099979 628.38449281z"
p-id="1065"></path>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,12 +1,14 @@
import React from 'react';
import { Tooltip, TooltipProps } from '@chakra-ui/react';
import { Tooltip, TooltipProps, useMediaQuery } from '@chakra-ui/react';
interface Props extends TooltipProps {
forceShow?: boolean;
}
const MyTooltip = ({ children, shouldWrapChildren = true, ...props }: Props) => {
return (
const MyTooltip = ({ children, forceShow = false, shouldWrapChildren = true, ...props }: Props) => {
const [isPc] = useMediaQuery('(min-width: 900px)');
return isPc || forceShow ? (
<Tooltip
className="tooltip"
bg={'white'}
@ -25,6 +27,8 @@ const MyTooltip = ({ children, shouldWrapChildren = true, ...props }: Props) =>
>
{children}
</Tooltip>
) : (
<>{children}</>
);
};

View File

@ -0,0 +1,48 @@
import React, { useMemo } from 'react';
import { Flex, type FlexProps } from '@chakra-ui/react';
interface Props extends FlexProps {
children: React.ReactNode | React.ReactNode[];
colorSchema?: 'blue' | 'green' | 'gray' | 'purple';
}
const FillTag = ({ children, colorSchema = 'blue', ...props }: Props) => {
const theme = useMemo(() => {
const map = {
blue: {
bg: 'primary.50',
color: 'primary.600'
},
green: {
bg: 'green.50',
color: 'green.600'
},
purple: {
bg: '#F6EEFA',
color: '#A558C9'
},
gray: {
bg: 'myGray.50',
color: 'myGray.700'
}
};
return map[colorSchema];
}, [colorSchema]);
return (
<Flex
{...theme}
px={2}
lineHeight={1}
py={1}
borderRadius={'sm'}
fontSize={'xs'}
alignItems={'center'}
{...props}
>
{children}
</Flex>
);
};
export default FillTag;

View File

@ -0,0 +1,129 @@
import React, { useCallback, useRef } from 'react';
import {
ModalFooter,
ModalBody,
Input,
useDisclosure,
Button,
Box,
Textarea
} from '@chakra-ui/react';
import MyModal from '../components/common/CustomModal';
import { useToast } from './useToast';
import { useTranslation } from 'next-i18next';
export const useEditTextarea = ({
title,
tip,
placeholder = '',
canEmpty = true,
valueRule
}: {
title: string;
tip?: string;
placeholder?: string;
canEmpty?: boolean;
valueRule?: (val: string) => string | void;
}) => {
const { t } = useTranslation();
const { isOpen, onOpen, onClose } = useDisclosure();
const textareaRef = useRef<HTMLTextAreaElement | null>(null);
const onSuccessCb = useRef<(content: string) => void | Promise<void>>();
const onErrorCb = useRef<(err: any) => void>();
const { toast } = useToast();
const defaultValue = useRef('');
const onOpenModal = useCallback(
({
defaultVal,
onSuccess,
onError
}: {
defaultVal: string;
onSuccess: (content: string) => any;
onError?: (err: any) => void;
}) => {
onOpen();
onSuccessCb.current = onSuccess;
onErrorCb.current = onError;
defaultValue.current = defaultVal;
},
[onOpen]
);
const onclickConfirm = useCallback(async () => {
if (!textareaRef.current || !onSuccessCb.current) return;
const val = textareaRef.current.value;
if (!canEmpty && !val) {
textareaRef.current.focus();
return;
}
if (valueRule) {
const result = valueRule(val);
if (result) {
return toast({
status: 'warning',
title: result
});
}
}
try {
await onSuccessCb.current(val);
onClose();
} catch (err) {
onErrorCb.current?.(err);
}
}, [canEmpty, onClose]);
// eslint-disable-next-line react/display-name
const EditModal = useCallback(
({
maxLength = 30,
iconSrc = 'modal/edit',
closeBtnText = t('common.Close')
}: {
maxLength?: number;
iconSrc?: string;
closeBtnText?: string;
}) => (
<MyModal isOpen={isOpen} onClose={onClose} iconSrc={iconSrc} title={title} maxW={'500px'}>
<ModalBody>
{!!tip && (
<Box mb={2} color={'myGray.500'} fontSize={'sm'}>
{tip}
</Box>
)}
<Textarea
ref={textareaRef}
defaultValue={defaultValue.current}
placeholder={placeholder}
autoFocus
maxLength={maxLength}
rows={10}
bg={'myGray.50'}
/>
</ModalBody>
<ModalFooter>
{!!closeBtnText && (
<Button mr={3} variant={'whiteBase'} onClick={onClose}>
{closeBtnText}
</Button>
)}
<Button onClick={onclickConfirm}>{t('common.Confirm')}</Button>
</ModalFooter>
</MyModal>
),
[isOpen, onClose, onclickConfirm, placeholder, tip, title]
);
return {
onOpenModal,
EditModal
};
};

View File

@ -5,6 +5,7 @@ import { useMutation } from '@tanstack/react-query';
import { throttle } from 'lodash';
import { useToast } from './useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
const thresholdVal = 100;
@ -62,7 +63,7 @@ export function usePagination<T = any>({
onChange && onChange(num);
} catch (error: any) {
toast({
title: error?.message || '获取数据异常',
title: getErrText(error, '获取数据异常'),
status: 'error'
});
console.log(error);

View File

@ -1,13 +1,21 @@
import { useToast as uToast, UseToastOptions } from '@chakra-ui/react';
import { useCallback, useMemo } from 'react';
export const useToast = (props?: UseToastOptions) => {
const toast = uToast({
position: 'top',
duration: 2000,
...(props && props)
...props
});
const myToast = useCallback(
(options?: UseToastOptions) => {
toast(options);
},
[props]
);
return {
toast
toast: myToast
};
};

View File

@ -51,8 +51,8 @@ importers:
specifier: ^4.0.1
version: 4.0.1
openai:
specifier: 4.23.0
version: 4.23.0(encoding@0.1.13)
specifier: 4.28.0
version: 4.28.0(encoding@0.1.13)
timezones-list:
specifier: ^3.0.2
version: 3.0.2
@ -9406,8 +9406,8 @@ packages:
mimic-fn: 4.0.0
dev: true
/openai@4.23.0(encoding@0.1.13):
resolution: {integrity: sha512-ey2CXh1OTcTUa0AWZWuTpgA9t5GuAG3DVU1MofCRUI7fQJij8XJ3Sr0VtgxoAE69C9wbHBMCux8Z/IQZfSwHiA==}
/openai@4.28.0(encoding@0.1.13):
resolution: {integrity: sha512-JM8fhcpmpGN0vrUwGquYIzdcEQHtFuom6sRCbbCM6CfzZXNuRk33G7KfeRAIfnaCxSpzrP5iHtwJzIm6biUZ2Q==}
hasBin: true
dependencies:
'@types/node': 18.19.21

View File

@ -7,7 +7,7 @@
},
"llmModels": [
{
"model": "gpt-3.5-turbo-1106",
"model": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo",
"maxContext": 16000,
"maxResponse": 4000,
@ -17,6 +17,10 @@
"censor": false,
"vision": false,
"datasetProcess": false,
"usedInClassify": true,
"usedInExtractFields": true,
"useInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@ -35,6 +39,10 @@
"censor": false,
"vision": false,
"datasetProcess": true,
"usedInClassify": true,
"usedInExtractFields": true,
"useInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@ -53,6 +61,10 @@
"censor": false,
"vision": false,
"datasetProcess": false,
"usedInClassify": true,
"usedInExtractFields": true,
"useInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@ -71,6 +83,10 @@
"censor": false,
"vision": true,
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"useInToolCall": false,
"usedInQueryExtension": false,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",

View File

@ -1,6 +1,6 @@
{
"name": "app",
"version": "4.6.9",
"version": "4.7",
"private": false,
"scripts": {
"dev": "next dev",

View File

@ -1,11 +1,7 @@
### Fast GPT V4.6.9
### FastGPT V4.7
1. 新增 - 知识库新增“增强处理”训练模式,可生成更多类型索引。
2. 新增 - 完善了HTTP模块的变量提示。
3. 新增 - HTTP模块支持OpenAI单接口导入。
4. 新增 - 全局变量支持增加外部变量。可通过分享链接的Query或 API 的 variables 参数传入。
5. 新增 - 内容提取模块增加默认值。
6. 优化 - 问题补全。增加英文类型。同时可以设置为单独模块,方便复用。
7. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro)
8. [使用文档](https://doc.fastgpt.in/docs/intro/)
9. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/)
1. 新增 - 工具调用模块可以让LLM模型根据用户意图动态的选择其他模型或插件执行。
2. 优化 - 高级编排性能
3. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro)
4. [使用文档](https://doc.fastgpt.in/docs/intro/)
5. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/)

View File

@ -0,0 +1,12 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none">
<path
d="M7.16966 2.66659H8.98677C9.57588 2.66659 10.0534 3.14415 10.0534 3.73325V3.9589H11.3332C11.3512 3.9589 11.3691 3.9589 11.3868 3.9589V3.73325C11.3868 2.40777 10.3123 1.33325 8.98677 1.33325H7.16966C5.84418 1.33325 4.76966 2.40777 4.76966 3.73325V3.9589H6.10299V3.73325C6.10299 3.14415 6.58056 2.66659 7.16966 2.66659Z"
fill="#6F5DD7" />
<path
d="M14.9573 6.83844H1.04263C1.05186 6.04556 1.09409 5.57531 1.28709 5.19652C1.5044 4.77002 1.85116 4.42327 2.27766 4.20596C2.76253 3.9589 3.39725 3.9589 4.66671 3.9589H11.3332C12.6027 3.9589 13.2374 3.9589 13.7223 4.20596C14.1488 4.42327 14.4955 4.77002 14.7129 5.19652C14.9059 5.57531 14.9481 6.04556 14.9573 6.83844Z"
fill="#6F5DD7" />
<path
d="M9.75703 8.02511H14.9599V11.04C14.9599 12.3094 14.9599 12.9442 14.7129 13.429C14.4955 13.8555 14.1488 14.2023 13.7223 14.4196C13.2374 14.6666 12.6027 14.6666 11.3332 14.6666H4.6667C3.39725 14.6666 2.76253 14.6666 2.27766 14.4196C1.85116 14.2023 1.5044 13.8555 1.28709 13.429C1.04004 12.9442 1.04004 12.3094 1.04004 11.04V8.02511H6.24291V10.2151C6.24291 10.436 6.422 10.6151 6.64291 10.6151H9.35703C9.57794 10.6151 9.75703 10.436 9.75703 10.2151V8.02511Z"
fill="#6F5DD7" />
<path d="M7.30958 8.02511H8.69036V9.54843H7.30958V8.02511Z" fill="#6F5DD7" />
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -89,6 +89,7 @@
"Edit": "Edit",
"Exit": "Exit",
"Expired Time": "Expired",
"Field": "Field",
"File": "File",
"Filed is repeat": "Filed is repeated",
"Filed is repeated": "",
@ -409,7 +410,7 @@
"Speaking": "I'm listening...",
"Start Chat": "Start Chat",
"Stop Speak": "Stop Speak",
"Type a message": "Input problem",
"Type a message": "Enter your question here",
"Unpin": "Unpin",
"You need to a chat app": "You don't have apps available",
"error": {
@ -444,9 +445,11 @@
"response": {
"Complete Response": "Complete Response",
"Extension model": "Extension model",
"Plugin Resonse Detail": "Plugin Detail",
"Plugin response detail": "Plugin Detail",
"Read complete response": "Read Detail",
"Read complete response tips": "Click to see the detailed process",
"Tool call response detail": "Tool call detail",
"Tool call tokens": "Tool call tokens",
"context total length": "Context Length",
"module cq": "Question classification list",
"module cq result": "Classification Result",
@ -776,6 +779,7 @@
},
"Default value": "Default ",
"Default value placeholder": "Null characters are returned by default",
"Edit intro": "Edit",
"Field Description": "Description",
"Field Name": "Name",
"Field Type": "Type",
@ -933,6 +937,9 @@
"textEditor": {
"Text Edit": "Text Edit"
},
"tool": {
"Tool input": "Tool input"
},
"valueType": {
"any": "Any",
"boolean": "Boolean",
@ -942,7 +949,8 @@
"number": "Number",
"selectApp": "Select App",
"selectDataset": "Select Dataset",
"string": "String"
"string": "String",
"tools": "tools"
},
"variable": {
"External type": "External",

View File

@ -89,6 +89,7 @@
"Edit": "编辑",
"Exit": "退出",
"Expired Time": "过期时间",
"Field": "字段",
"File": "文件",
"Filed is repeat": "",
"Filed is repeated": "字段重复了",
@ -444,9 +445,11 @@
"response": {
"Complete Response": "完整响应",
"Extension model": "问题优化模型",
"Plugin Resonse Detail": "插件详情",
"Plugin response detail": "插件详情",
"Read complete response": "查看详情",
"Read complete response tips": "点击查看详细流程",
"Tool call response detail": "工具运行详情",
"Tool call tokens": "工具调用Tokens消耗",
"context total length": "上下文总长度",
"module cq": "问题分类列表",
"module cq result": "分类结果",
@ -778,10 +781,11 @@
},
"Default value": "默认值",
"Default value placeholder": "不填则默认返回空字符",
"Edit intro": "编辑描述",
"Field Description": "字段描述",
"Field Name": "字段名",
"Field Type": "字段类型",
"Field key": "字段 Key",
"Field key": "字段Key",
"Http request props": "请求参数",
"Http request settings": "请求配置",
"Input Type": "输入类型",
@ -935,6 +939,9 @@
"textEditor": {
"Text Edit": "文本加工"
},
"tool": {
"Tool input": "工具输入"
},
"valueType": {
"any": "任意",
"boolean": "布尔",
@ -944,7 +951,8 @@
"number": "数字",
"selectApp": "应用选择",
"selectDataset": "知识库选择",
"string": "字符串"
"string": "字符串",
"tools": "工具调用"
},
"variable": {
"External type": "外部传入",

View File

@ -1,13 +1,13 @@
import React from 'react';
import { ModalBody, Box, useTheme, Flex, Image } from '@chakra-ui/react';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { ModalBody, Box, useTheme } from '@chakra-ui/react';
import MyModal from '../MyModal';
import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d';
const ContextModal = ({
context = [],
onClose
}: {
context: ChatItemType[];
context: DispatchNodeResponseType['historyPreview'];
onClose: () => void;
}) => {
const theme = useTheme();
@ -17,7 +17,7 @@ const ContextModal = ({
isOpen={true}
onClose={onClose}
iconSrc="/imgs/modal/chatHistory.svg"
title={`完整对话记录(${context.length}条)`}
title={`上下文预览(${context.length}条)`}
h={['90vh', '80vh']}
minW={['90vw', '600px']}
isCentered

View File

@ -1,36 +1,24 @@
import { useSpeech } from '@/web/common/hooks/useSpeech';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback, useState, useTransition } from 'react';
import React, { useRef, useEffect, useCallback, useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import MyTooltip from '../MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useRouter } from 'next/router';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { compressImgFileAndUpload } from '@/web/common/file/controller';
import { customAlphabet } from 'nanoid';
import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import { addDays } from 'date-fns';
import { useRequest } from '@/web/common/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from './type';
import { textareaMinH } from './constants';
import { UseFormReturn, useFieldArray } from 'react-hook-form';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
enum FileTypeEnum {
image = 'image',
file = 'file'
}
type FileItemType = {
id: string;
rawFile: File;
type: `${FileTypeEnum}`;
name: string;
icon: string; // img is base64
src?: string;
};
const MessageInput = ({
onChange,
onSendMessage,
onStop,
isChatting,
@ -40,17 +28,29 @@ const MessageInput = ({
shareId,
outLinkUid,
teamId,
teamToken
teamToken,
chatForm
}: OutLinkChatAuthProps & {
onChange?: (e: string) => void;
onSendMessage: (e: string) => void;
onSendMessage: (val: ChatBoxInputType) => void;
onStop: () => void;
isChatting: boolean;
showFileSelector?: boolean;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
resetInputVal: (val: string) => void;
resetInputVal: (val: ChatBoxInputType) => void;
chatForm: UseFormReturn<ChatBoxInputFormType>;
}) => {
const [, startSts] = useTransition();
const { setValue, watch, control } = chatForm;
const inputValue = watch('input');
const {
update: updateFile,
remove: removeFile,
fields: fileList,
append: appendFile,
replace: replaceFile
} = useFieldArray({
control,
name: 'files'
});
const {
isSpeaking,
@ -64,45 +64,38 @@ const MessageInput = ({
const { isPc } = useSystemStore();
const canvasRef = useRef<HTMLCanvasElement>(null);
const { t } = useTranslation();
const textareaMinH = '22px';
const [fileList, setFileList] = useState<FileItemType[]>([]);
const havInput = !!TextareaDom.current?.value || fileList.length > 0;
const havInput = !!inputValue || fileList.length > 0;
/* file selector and upload */
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: 'image/*',
multiple: true,
maxCount: 10
});
const { mutate: uploadFile } = useRequest({
mutationFn: async (file: FileItemType) => {
if (file.type === FileTypeEnum.image) {
mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => {
if (file.type === ChatFileTypeEnum.image && file.rawFile) {
try {
const src = await compressImgFileAndUpload({
const url = await compressImgFileAndUpload({
type: MongoImageTypeEnum.chatImage,
file: file.rawFile,
maxW: 4329,
maxH: 4329,
maxSize: 1024 * 1024 * 5,
// 30 day expired.
// 7 day expired.
expiredTime: addDays(new Date(), 7),
shareId,
outLinkUid,
teamId,
teamToken
});
setFileList((state) =>
state.map((item) =>
item.id === file.id
? {
...item,
src: `${location.origin}${src}`
}
: item
)
);
updateFile(fileIndex, {
...file,
url: `${location.origin}${url}`
});
} catch (error) {
setFileList((state) => state.filter((item) => item.id !== file.id));
removeFile(fileIndex);
console.log(error);
return Promise.reject(error);
}
@ -110,7 +103,6 @@ const MessageInput = ({
},
errorToast: t('common.Upload File Failed')
});
const onSelectFile = useCallback(
async (files: File[]) => {
if (!files || files.length === 0) {
@ -119,7 +111,7 @@ const MessageInput = ({
const loadFiles = await Promise.all(
files.map(
(file) =>
new Promise<FileItemType>((resolve, reject) => {
new Promise<UserInputFileItemType>((resolve, reject) => {
if (file.type.includes('image')) {
const reader = new FileReader();
reader.readAsDataURL(file);
@ -127,11 +119,10 @@ const MessageInput = ({
const item = {
id: nanoid(),
rawFile: file,
type: FileTypeEnum.image,
type: ChatFileTypeEnum.image,
name: file.name,
icon: reader.result as string
};
uploadFile(item);
resolve(item);
};
reader.onerror = () => {
@ -141,7 +132,7 @@ const MessageInput = ({
resolve({
id: nanoid(),
rawFile: file,
type: FileTypeEnum.file,
type: ChatFileTypeEnum.file,
name: file.name,
icon: 'file/pdf'
});
@ -149,29 +140,28 @@ const MessageInput = ({
})
)
);
appendFile(loadFiles);
setFileList((state) => [...state, ...loadFiles]);
loadFiles.forEach((file, i) =>
uploadFile({
file,
fileIndex: i + fileList.length
})
);
},
[uploadFile]
[appendFile, fileList.length, uploadFile]
);
/* on send */
const handleSend = useCallback(async () => {
const textareaValue = TextareaDom.current?.value || '';
const images = fileList.filter((item) => item.type === FileTypeEnum.image);
const imagesText =
images.length === 0
? ''
: `\`\`\`${IMG_BLOCK_KEY}
${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
\`\`\`
`;
const inputMessage = `${imagesText}${textareaValue}`;
onSendMessage(inputMessage);
setFileList([]);
}, [TextareaDom, fileList, onSendMessage]);
onSendMessage({
text: textareaValue.trim(),
files: fileList
});
replaceFile([]);
}, [TextareaDom, fileList, onSendMessage, replaceFile]);
useEffect(() => {
if (!stream) {
@ -231,7 +221,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
{/* file preview */}
<Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
{fileList.map((item) => (
{fileList.map((item, index) => (
<Box
key={item.id}
border={'1px solid rgba(0,0,0,0.12)'}
@ -240,11 +230,11 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
rounded={'md'}
position={'relative'}
_hover={{
'.close-icon': { display: item.src ? 'block' : 'none' }
'.close-icon': { display: item.url ? 'block' : 'none' }
}}
>
{/* uploading */}
{!item.src && (
{!item.url && (
<Flex
position={'absolute'}
alignItems={'center'}
@ -272,12 +262,12 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
right={'-8px'}
top={'-8px'}
onClick={() => {
setFileList((state) => state.filter((file) => file.id !== item.id));
removeFile(index);
}}
className="close-icon"
display={['', 'none']}
/>
{item.type === FileTypeEnum.image && (
{item.type === ChatFileTypeEnum.image && (
<Image
alt={'img'}
src={item.icon}
@ -335,14 +325,12 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
boxShadow={'none !important'}
color={'myGray.900'}
isDisabled={isSpeaking}
value={inputValue}
onChange={(e) => {
const textarea = e.target;
textarea.style.height = textareaMinH;
textarea.style.height = `${textarea.scrollHeight}px`;
startSts(() => {
onChange?.(textarea.value);
});
setValue('input', textarea.value);
}}
onKeyDown={(e) => {
// enter send.(pc or iframe && enter and unPress shift)
@ -406,7 +394,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
if (isSpeaking) {
return stopSpeak();
}
startSpeak(resetInputVal);
startSpeak((text) => resetInputVal({ text }));
}}
>
<MyTooltip label={isSpeaking ? t('core.chat.Stop Speak') : t('core.chat.Record')}>

View File

@ -1,5 +1,6 @@
import React, { useMemo, useState } from 'react';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { Flex, BoxProps, useDisclosure, useTheme, Box } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
@ -14,15 +15,18 @@ import ChatBoxDivider from '@/components/core/chat/Divider';
import { strIsLink } from '@fastgpt/global/common/string/tools';
import MyIcon from '@fastgpt/web/components/common/Icon';
const QuoteModal = dynamic(() => import('./QuoteModal'), { ssr: false });
const ContextModal = dynamic(() => import('./ContextModal'), { ssr: false });
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'), { ssr: false });
const QuoteModal = dynamic(() => import('./QuoteModal'));
const ContextModal = dynamic(() => import('./ContextModal'));
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'));
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
const ResponseTags = ({
responseData = [],
flowResponses = [],
showDetail
}: {
responseData?: ChatHistoryItemResType[];
flowResponses?: ChatHistoryItemResType[];
showDetail: boolean;
}) => {
const theme = useTheme();
@ -36,7 +40,8 @@ const ResponseTags = ({
sourceName: string;
};
}>();
const [contextModalData, setContextModalData] = useState<ChatItemType[]>();
const [contextModalData, setContextModalData] =
useState<DispatchNodeResponseType['historyPreview']>();
const {
isOpen: isOpenWholeModal,
onOpen: onOpenWholeModal,
@ -44,18 +49,29 @@ const ResponseTags = ({
} = useDisclosure();
const {
chatAccount,
llmModuleAccount,
quoteList = [],
sourceList = [],
historyPreview = [],
runningTime = 0
} = useMemo(() => {
const chatData = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.chatNode);
const quoteList = responseData
.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode)
const flatResponse = flowResponses
.map((item) => {
if (item.pluginDetail || item.toolDetail) {
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
}
return item;
})
.flat();
const chatData = flatResponse.find(isLLMNode);
const quoteList = flatResponse
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
.map((item) => item.quoteList)
.flat()
.filter(Boolean) as SearchDataResponseItemType[];
const sourceList = quoteList.reduce(
(acc: Record<string, SearchDataResponseItemType[]>, cur) => {
if (!acc[cur.collectionId]) {
@ -67,8 +83,7 @@ const ResponseTags = ({
);
return {
chatAccount: responseData.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode)
.length,
llmModuleAccount: flatResponse.filter(isLLMNode).length,
quoteList,
sourceList: Object.values(sourceList)
.flat()
@ -80,16 +95,16 @@ const ResponseTags = ({
collectionId: item.collectionId
})),
historyPreview: chatData?.historyPreview,
runningTime: +responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
runningTime: +flowResponses.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
};
}, [showDetail, responseData]);
}, [showDetail, flowResponses]);
const TagStyles: BoxProps = {
mr: 2,
bg: 'transparent'
};
return responseData.length === 0 ? null : (
return flowResponses.length === 0 ? null : (
<>
{sourceList.length > 0 && (
<>
@ -148,10 +163,10 @@ const ResponseTags = ({
</Tag>
</MyTooltip>
)}
{chatAccount === 1 && (
{llmModuleAccount === 1 && (
<>
{historyPreview.length > 0 && (
<MyTooltip label={'点击查看完整对话记录'}>
<MyTooltip label={'点击查看上下文预览'}>
<Tag
colorSchema="green"
cursor={'pointer'}
@ -164,7 +179,7 @@ const ResponseTags = ({
)}
</>
)}
{chatAccount > 1 && (
{llmModuleAccount > 1 && (
<Tag colorSchema="blue" {...TagStyles}>
AI
</Tag>
@ -196,7 +211,7 @@ const ResponseTags = ({
)}
{isOpenWholeModal && (
<WholeResponseModal
response={responseData}
response={flowResponses}
showDetail={showDetail}
onClose={onCloseWholeModal}
/>

View File

@ -2,7 +2,7 @@ import React, { useMemo, useState } from 'react';
import { Box, useTheme, Flex, Image } from '@chakra-ui/react';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { useTranslation } from 'next-i18next';
import { moduleTemplatesFlat } from '@/web/core/modules/template/system';
import { moduleTemplatesFlat } from '@fastgpt/global/core/module/template/constants';
import Tabs from '../Tabs';
import MyModal from '../MyModal';
@ -143,6 +143,11 @@ const ResponseBox = React.memo(function ResponseBox({
/>
<Row label={t('core.chat.response.module model')} value={activeModule?.model} />
<Row label={t('core.chat.response.module tokens')} value={`${activeModule?.tokens}`} />
<Row
label={t('core.chat.response.Tool call tokens')}
value={`${activeModule?.toolCallTokens}`}
/>
<Row label={t('core.chat.response.module query')} value={activeModule?.query} />
<Row
label={t('core.chat.response.context total length')}
@ -182,12 +187,6 @@ const ResponseBox = React.memo(function ResponseBox({
)
}
/>
{activeModule.quoteList && activeModule.quoteList.length > 0 && (
<Row
label={t('core.chat.response.module quoteList')}
rawDom={<QuoteList showDetail={showDetail} rawSearch={activeModule.quoteList} />}
/>
)}
</>
{/* dataset search */}
@ -213,6 +212,12 @@ const ResponseBox = React.memo(function ResponseBox({
label={t('support.wallet.usage.Extension result')}
value={`${activeModule?.extensionResult}`}
/>
{activeModule.quoteList && activeModule.quoteList.length > 0 && (
<Row
label={t('core.chat.response.module quoteList')}
rawDom={<QuoteList showDetail={showDetail} rawSearch={activeModule.quoteList} />}
/>
)}
</>
{/* classify question */}
@ -276,7 +281,7 @@ const ResponseBox = React.memo(function ResponseBox({
)}
{activeModule?.pluginDetail && activeModule?.pluginDetail.length > 0 && (
<Row
label={t('core.chat.response.Plugin Resonse Detail')}
label={t('core.chat.response.Plugin response detail')}
rawDom={<ResponseBox response={activeModule.pluginDetail} showDetail={showDetail} />}
/>
)}
@ -284,6 +289,14 @@ const ResponseBox = React.memo(function ResponseBox({
{/* text output */}
<Row label={t('core.chat.response.text output')} value={activeModule?.textOutput} />
{/* tool call */}
{activeModule?.toolDetail && activeModule?.toolDetail.length > 0 && (
<Row
label={t('core.chat.response.Tool call response detail')}
rawDom={<ResponseBox response={activeModule.toolDetail} showDetail={showDetail} />}
/>
)}
</Box>
</>
);

View File

@ -0,0 +1,23 @@
import Avatar from '@/components/Avatar';
import { Box } from '@chakra-ui/react';
import { useTheme } from '@chakra-ui/system';
import React from 'react';
const ChatAvatar = ({ src, type }: { src?: string; type: 'Human' | 'AI' }) => {
const theme = useTheme();
return (
<Box
w={['28px', '34px']}
h={['28px', '34px']}
p={'2px'}
borderRadius={'sm'}
border={theme.borders.base}
boxShadow={'0 0 5px rgba(0,0,0,0.1)'}
bg={type === 'Human' ? 'white' : 'primary.50'}
>
<Avatar src={src} w={'100%'} h={'100%'} />
</Box>
);
};
export default React.memo(ChatAvatar);

View File

@ -0,0 +1,236 @@
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { useAudioPlay } from '@/web/common/utils/voice';
import { Flex, FlexProps, Image, css, useTheme } from '@chakra-ui/react';
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import { AppTTSConfigType } from '@fastgpt/global/core/module/type';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import React from 'react';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { formatChatValue2InputType } from '../utils';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
export type ChatControllerProps = {
isChatting: boolean;
chat: ChatSiteItemType;
setChatHistories?: React.Dispatch<React.SetStateAction<ChatSiteItemType[]>>;
showVoiceIcon?: boolean;
ttsConfig?: AppTTSConfigType;
onRetry?: () => void;
onDelete?: () => void;
onMark?: () => void;
onReadUserDislike?: () => void;
onCloseUserLike?: () => void;
onAddUserLike?: () => void;
onAddUserDislike?: () => void;
};
const ChatController = ({
isChatting,
chat,
setChatHistories,
showVoiceIcon,
ttsConfig,
onReadUserDislike,
onCloseUserLike,
onMark,
onRetry,
onDelete,
onAddUserDislike,
onAddUserLike,
shareId,
outLinkUid,
teamId,
teamToken
}: OutLinkChatAuthProps & ChatControllerProps & FlexProps) => {
const theme = useTheme();
const { t } = useTranslation();
const { copyData } = useCopyData();
const { audioLoading, audioPlaying, hasAudio, playAudio, cancelAudio } = useAudioPlay({
ttsConfig,
shareId,
outLinkUid,
teamId,
teamToken
});
const controlIconStyle = {
w: '14px',
cursor: 'pointer',
p: '5px',
bg: 'white',
borderRight: theme.borders.base
};
const controlContainerStyle = {
className: 'control',
color: 'myGray.400',
display: 'flex'
};
return (
<Flex
{...controlContainerStyle}
borderRadius={'sm'}
overflow={'hidden'}
border={theme.borders.base}
// 最后一个子元素没有border
css={css({
'& > *:last-child, & > *:last-child svg': {
borderRight: 'none',
borderRadius: 'md'
}
})}
>
<MyTooltip label={t('common.Copy')}>
<MyIcon
{...controlIconStyle}
name={'copy'}
_hover={{ color: 'primary.600' }}
onClick={() => copyData(formatChatValue2InputType(chat.value).text || '')}
/>
</MyTooltip>
{!!onDelete && !isChatting && (
<>
{onRetry && (
<MyTooltip label={t('core.chat.retry')}>
<MyIcon
{...controlIconStyle}
name={'common/retryLight'}
_hover={{ color: 'green.500' }}
onClick={onRetry}
/>
</MyTooltip>
)}
<MyTooltip label={t('common.Delete')}>
<MyIcon
{...controlIconStyle}
name={'delete'}
_hover={{ color: 'red.600' }}
onClick={onDelete}
/>
</MyTooltip>
</>
)}
{showVoiceIcon &&
hasAudio &&
(audioLoading ? (
<MyTooltip label={t('common.Loading')}>
<MyIcon {...controlIconStyle} name={'common/loading'} />
</MyTooltip>
) : audioPlaying ? (
<Flex alignItems={'center'}>
<MyTooltip label={t('core.chat.tts.Stop Speech')}>
<MyIcon
{...controlIconStyle}
borderRight={'none'}
name={'core/chat/stopSpeech'}
color={'#E74694'}
onClick={() => cancelAudio()}
/>
</MyTooltip>
<Image src="/icon/speaking.gif" w={'23px'} alt={''} borderRight={theme.borders.base} />
</Flex>
) : (
<MyTooltip label={t('core.app.TTS')}>
<MyIcon
{...controlIconStyle}
name={'common/voiceLight'}
_hover={{ color: '#E74694' }}
onClick={async () => {
const response = await playAudio({
buffer: chat.ttsBuffer,
chatItemId: chat.dataId,
text: formatChatValue2InputType(chat.value).text || ''
});
if (!setChatHistories || !response.buffer) return;
setChatHistories((state) =>
state.map((item) =>
item.dataId === chat.dataId
? {
...item,
ttsBuffer: response.buffer
}
: item
)
);
}}
/>
</MyTooltip>
))}
{!!onMark && (
<MyTooltip label={t('core.chat.Mark')}>
<MyIcon
{...controlIconStyle}
name={'core/app/markLight'}
_hover={{ color: '#67c13b' }}
onClick={onMark}
/>
</MyTooltip>
)}
{chat.obj === ChatRoleEnum.AI && (
<>
{!!onCloseUserLike && chat.userGoodFeedback && (
<MyTooltip label={t('core.chat.feedback.Close User Like')}>
<MyIcon
{...controlIconStyle}
color={'white'}
bg={'green.500'}
fontWeight={'bold'}
name={'core/chat/feedback/goodLight'}
onClick={onCloseUserLike}
/>
</MyTooltip>
)}
{!!onReadUserDislike && chat.userBadFeedback && (
<MyTooltip label={t('core.chat.feedback.Read User dislike')}>
<MyIcon
{...controlIconStyle}
color={'white'}
bg={'#FC9663'}
fontWeight={'bold'}
name={'core/chat/feedback/badLight'}
onClick={onReadUserDislike}
/>
</MyTooltip>
)}
{!!onAddUserLike && (
<MyIcon
{...controlIconStyle}
{...(!!chat.userGoodFeedback
? {
color: 'white',
bg: 'green.500',
fontWeight: 'bold'
}
: {
_hover: { color: 'green.600' }
})}
name={'core/chat/feedback/goodLight'}
onClick={onAddUserLike}
/>
)}
{!!onAddUserDislike && (
<MyIcon
{...controlIconStyle}
{...(!!chat.userBadFeedback
? {
color: 'white',
bg: '#FC9663',
fontWeight: 'bold',
onClick: onAddUserDislike
}
: {
_hover: { color: '#FB7C3C' },
onClick: onAddUserDislike
})}
name={'core/chat/feedback/badLight'}
/>
)}
</>
)}
</Flex>
);
};
export default React.memo(ChatController);

View File

@ -0,0 +1,236 @@
import {
Box,
BoxProps,
Card,
Flex,
useTheme,
Accordion,
AccordionItem,
AccordionButton,
AccordionPanel,
AccordionIcon,
Button,
Image,
Grid
} from '@chakra-ui/react';
import React, { useMemo } from 'react';
import ChatController, { type ChatControllerProps } from './ChatController';
import ChatAvatar from './ChatAvatar';
import { MessageCardStyle } from '../constants';
import { formatChatValue2InputType } from '../utils';
import Markdown, { CodeClassName } from '@/components/Markdown';
import styles from '../index.module.scss';
import MyIcon from '@fastgpt/web/components/common/Icon';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatStatusEnum
} from '@fastgpt/global/core/chat/constants';
import FilesBlock from './FilesBox';
const colorMap = {
[ChatStatusEnum.loading]: {
bg: 'myGray.100',
color: 'myGray.600'
},
[ChatStatusEnum.running]: {
bg: 'green.50',
color: 'green.700'
},
[ChatStatusEnum.finish]: {
bg: 'green.50',
color: 'green.700'
}
};
const ChatItem = ({
type,
avatar,
statusBoxData,
children,
isLastChild,
questionGuides = [],
...chatControllerProps
}: {
type: ChatRoleEnum.Human | ChatRoleEnum.AI;
avatar?: string;
statusBoxData?: {
status: `${ChatStatusEnum}`;
name: string;
};
isLastChild?: boolean;
questionGuides?: string[];
children?: React.ReactNode;
} & ChatControllerProps) => {
const theme = useTheme();
const styleMap: BoxProps =
type === ChatRoleEnum.Human
? {
order: 0,
borderRadius: '8px 0 8px 8px',
justifyContent: 'flex-end',
textAlign: 'right',
bg: 'primary.100'
}
: {
order: 1,
borderRadius: '0 8px 8px 8px',
justifyContent: 'flex-start',
textAlign: 'left',
bg: 'myGray.50'
};
const { chat, isChatting } = chatControllerProps;
const ContentCard = useMemo(() => {
if (type === 'Human') {
const { text, files = [] } = formatChatValue2InputType(chat.value);
return (
<>
{files.length > 0 && <FilesBlock files={files} />}
<Markdown source={text} isChatting={false} />
</>
);
}
/* AI */
return (
<Flex flexDirection={'column'} gap={2}>
{chat.value.map((value, i) => {
const key = `${chat.dataId}-ai-${i}`;
if (value.text) {
let source = value.text?.content || '';
if (isLastChild && !isChatting && questionGuides.length > 0) {
source = `${source}
\`\`\`${CodeClassName.questionGuide}
${JSON.stringify(questionGuides)}`;
}
return <Markdown key={key} source={source} isChatting={isLastChild && isChatting} />;
}
if (value.type === ChatItemValueTypeEnum.tool && value.tools) {
return (
<Box key={key}>
{value.tools.map((tool) => {
const toolParams = (() => {
try {
return JSON.stringify(JSON.parse(tool.params), null, 2);
} catch (error) {
return tool.params;
}
})();
const toolResponse = (() => {
try {
return JSON.stringify(JSON.parse(tool.response), null, 2);
} catch (error) {
return tool.response;
}
})();
return (
<Box key={tool.id}>
<Accordion allowToggle>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton
w={'auto'}
bg={'white'}
borderRadius={'md'}
borderWidth={'1px'}
borderColor={'myGray.200'}
boxShadow={'1'}
_hover={{
bg: 'auto',
color: 'primary.600'
}}
>
<Image src={tool.toolAvatar} alt={''} w={'14px'} mr={2} />
<Box mr={1}>{tool.toolName}</Box>
{isChatting && !tool.response && (
<MyIcon name={'common/loading'} w={'14px'} />
)}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
px={0}
mt={0}
borderRadius={'md'}
overflow={'hidden'}
maxH={'500px'}
overflowY={'auto'}
>
{toolParams && (
<Markdown
source={`~~~json#Input
${toolParams}`}
/>
)}
{toolResponse && (
<Markdown
source={`~~~json#Response
${toolResponse}`}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
</Box>
);
})}
</Box>
);
}
})}
</Flex>
);
}, [chat.dataId, chat.value, isChatting, isLastChild, questionGuides, type]);
const chatStatusMap = useMemo(() => {
if (!statusBoxData?.status) return;
return colorMap[statusBoxData.status];
}, [statusBoxData?.status]);
return (
<>
{/* control icon */}
<Flex w={'100%'} alignItems={'center'} gap={2} justifyContent={styleMap.justifyContent}>
{isChatting && type === ChatRoleEnum.AI && isLastChild ? null : (
<Box order={styleMap.order} ml={styleMap.ml}>
<ChatController {...chatControllerProps} />
</Box>
)}
<ChatAvatar src={avatar} type={type} />
{!!chatStatusMap && statusBoxData && isLastChild && (
<Flex alignItems={'center'} px={3} py={'1.5px'} borderRadius="md" bg={chatStatusMap.bg}>
<Box
className={styles.statusAnimation}
bg={chatStatusMap.color}
w="8px"
h="8px"
borderRadius={'50%'}
mt={'1px'}
/>
<Box ml={2} color={'myGray.600'}>
{statusBoxData.name}
</Box>
</Flex>
)}
</Flex>
{/* content */}
<Box mt={['6px', 2]} textAlign={styleMap.textAlign}>
<Card
className="markdown"
{...MessageCardStyle}
bg={styleMap.bg}
borderRadius={styleMap.borderRadius}
textAlign={'left'}
>
{ContentCard}
{children}
</Card>
</Box>
</>
);
};
export default ChatItem;

View File

@ -0,0 +1,23 @@
import Markdown from '@/components/Markdown';
import { useMarkdown } from '@/web/common/hooks/useMarkdown';
import { Box, Card } from '@chakra-ui/react';
import React from 'react';
const Empty = () => {
const { data: chatProblem } = useMarkdown({ url: '/chatProblem.md' });
const { data: versionIntro } = useMarkdown({ url: '/versionIntro.md' });
return (
<Box pt={6} w={'85%'} maxW={'600px'} m={'auto'} alignItems={'center'} justifyContent={'center'}>
{/* version intro */}
<Card p={4} mb={10} minH={'200px'}>
<Markdown source={versionIntro} />
</Card>
<Card p={4} minH={'600px'}>
<Markdown source={chatProblem} />
</Card>
</Box>
);
};
export default React.memo(Empty);

View File

@ -0,0 +1,22 @@
import { Box, Flex, Grid } from '@chakra-ui/react';
import MdImage from '@/components/Markdown/img/Image';
import { UserInputFileItemType } from '@/components/ChatBox/type';
const FilesBlock = ({ files }: { files: UserInputFileItemType[] }) => {
return (
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
{files.map(({ id, type, name, url }, i) => {
if (type === 'image') {
return (
<Box key={i} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
<MdImage src={url} />
</Box>
);
}
return null;
})}
</Grid>
);
};
export default FilesBlock;

View File

@ -0,0 +1,119 @@
import { VariableItemType } from '@fastgpt/global/core/module/type';
import React, { useState } from 'react';
import { UseFormReturn } from 'react-hook-form';
import { useTranslation } from 'next-i18next';
import { Box, Button, Card, Input, Textarea } from '@chakra-ui/react';
import ChatAvatar from './ChatAvatar';
import { MessageCardStyle } from '../constants';
import { VariableInputEnum } from '@fastgpt/global/core/module/constants';
import MySelect from '@fastgpt/web/components/common/MySelect';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { ChatBoxInputFormType } from '../type.d';
const VariableInput = ({
appAvatar,
variableModules,
variableIsFinish,
chatForm,
onSubmitVariables
}: {
appAvatar?: string;
variableModules: VariableItemType[];
variableIsFinish: boolean;
onSubmitVariables: (e: Record<string, any>) => void;
chatForm: UseFormReturn<ChatBoxInputFormType>;
}) => {
const { t } = useTranslation();
const [refresh, setRefresh] = useState(false);
const { register, setValue, handleSubmit: handleSubmitChat, watch } = chatForm;
const variables = watch('variables');
return (
<Box py={3}>
{/* avatar */}
<ChatAvatar src={appAvatar} type={'AI'} />
{/* message */}
<Box textAlign={'left'}>
<Card
order={2}
mt={2}
w={'400px'}
{...MessageCardStyle}
bg={'white'}
boxShadow={'0 0 8px rgba(0,0,0,0.15)'}
>
{variableModules.map((item) => (
<Box key={item.id} mb={4}>
<Box as={'label'} display={'inline-block'} position={'relative'} mb={1}>
{item.label}
{item.required && (
<Box
position={'absolute'}
top={'-2px'}
right={'-10px'}
color={'red.500'}
fontWeight={'bold'}
>
*
</Box>
)}
</Box>
{item.type === VariableInputEnum.input && (
<Input
isDisabled={variableIsFinish}
bg={'myWhite.400'}
{...register(`variables.${item.key}`, {
required: item.required
})}
/>
)}
{item.type === VariableInputEnum.textarea && (
<Textarea
isDisabled={variableIsFinish}
bg={'myWhite.400'}
{...register(`variables.${item.key}`, {
required: item.required
})}
rows={5}
maxLength={4000}
/>
)}
{item.type === VariableInputEnum.select && (
<MySelect
width={'100%'}
isDisabled={variableIsFinish}
list={(item.enums || []).map((item) => ({
label: item.value,
value: item.value
}))}
{...register(`variables.${item.key}`, {
required: item.required
})}
value={variables[item.key]}
onchange={(e) => {
setValue(`variables.${item.key}`, e);
setRefresh((state) => !state);
}}
/>
)}
</Box>
))}
{!variableIsFinish && (
<Button
leftIcon={<MyIcon name={'core/chat/chatFill'} w={'16px'} />}
size={'sm'}
maxW={'100px'}
onClick={handleSubmitChat((data) => {
onSubmitVariables(data);
})}
>
{t('core.chat.Start Chat')}
</Button>
)}
</Card>
</Box>
</Box>
);
};
export default React.memo(VariableInput);

View File

@ -0,0 +1,28 @@
import { Box, Card } from '@chakra-ui/react';
import React from 'react';
import { MessageCardStyle } from '../constants';
import Markdown from '@/components/Markdown';
import ChatAvatar from './ChatAvatar';
const WelcomeBox = ({ appAvatar, welcomeText }: { appAvatar?: string; welcomeText: string }) => {
return (
<Box py={3}>
{/* avatar */}
<ChatAvatar src={appAvatar} type={'AI'} />
{/* message */}
<Box textAlign={'left'}>
<Card
order={2}
mt={2}
{...MessageCardStyle}
bg={'white'}
boxShadow={'0 0 8px rgba(0,0,0,0.15)'}
>
<Markdown source={`~~~guide \n${welcomeText}`} isChatting={false} />
</Card>
</Box>
</Box>
);
};
export default WelcomeBox;

View File

@ -0,0 +1,13 @@
import { BoxProps } from '@chakra-ui/react';
export const textareaMinH = '22px';
export const MessageCardStyle: BoxProps = {
px: 4,
py: 3,
borderRadius: '0 8px 8px 8px',
boxShadow: 'none',
display: 'inline-block',
maxW: ['calc(100% - 25px)', 'calc(100% - 40px)'],
color: 'myGray.900'
};

View File

@ -0,0 +1,78 @@
import { ExportChatType } from '@/types/chat';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { useCallback } from 'react';
import { htmlTemplate } from '@/constants/common';
import { fileDownload } from '@/web/common/file/utils';
export const useChatBox = () => {
const onExportChat = useCallback(
({ type, history }: { type: ExportChatType; history: ChatItemType[] }) => {
const getHistoryHtml = () => {
const historyDom = document.getElementById('history');
if (!historyDom) return;
const dom = Array.from(historyDom.children).map((child, i) => {
const avatar = `<img src="${
child.querySelector<HTMLImageElement>('.avatar')?.src
}" alt="" />`;
const chatContent = child.querySelector<HTMLDivElement>('.markdown');
if (!chatContent) {
return '';
}
const chatContentClone = chatContent.cloneNode(true) as HTMLDivElement;
const codeHeader = chatContentClone.querySelectorAll('.code-header');
codeHeader.forEach((childElement: any) => {
childElement.remove();
});
return `<div class="chat-item">
${avatar}
${chatContentClone.outerHTML}
</div>`;
});
const html = htmlTemplate.replace('{{CHAT_CONTENT}}', dom.join('\n'));
return html;
};
const map: Record<ExportChatType, () => void> = {
md: () => {
fileDownload({
text: history.map((item) => item.value).join('\n\n'),
type: 'text/markdown',
filename: 'chat.md'
});
},
html: () => {
const html = getHistoryHtml();
html &&
fileDownload({
text: html,
type: 'text/html',
filename: '聊天记录.html'
});
},
pdf: () => {
const html = getHistoryHtml();
html &&
// @ts-ignore
html2pdf(html, {
margin: 0,
filename: `聊天记录.pdf`
});
}
};
map[type]();
},
[]
);
return {
onExportChat
};
};

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,53 @@
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import {
ChatItemValueItemType,
ChatSiteItemType,
ToolModuleResponseItemType
} from '@fastgpt/global/core/chat/type';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
export type generatingMessageProps = {
event: `${SseResponseEventEnum}`;
text?: string;
name?: string;
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;
};
export type UserInputFileItemType = {
id: string;
rawFile?: File;
type: `${ChatFileTypeEnum}`;
name: string;
icon: string; // img is base64
url?: string;
};
export type ChatBoxInputFormType = {
input: string;
files: UserInputFileItemType[];
variables: Record<string, any>;
chatStarted: boolean;
};
export type ChatBoxInputType = {
text?: string;
files?: UserInputFileItemType[];
};
export type StartChatFnProps = {
chatList: ChatSiteItemType[];
messages: ChatCompletionMessageParam[];
controller: AbortController;
variables: Record<string, any>;
generatingMessage: (e: generatingMessageProps) => void;
};
export type ComponentRef = {
getChatHistories: () => ChatSiteItemType[];
resetVariables: (data?: Record<string, any>) => void;
resetHistory: (history: ChatSiteItemType[]) => void;
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;
sendPrompt: (question: string) => void;
};

View File

@ -0,0 +1,33 @@
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { ChatBoxInputType, UserInputFileItemType } from './type';
import { getNanoid } from '@fastgpt/global/common/string/tools';
export const formatChatValue2InputType = (value: ChatItemValueItemType[]): ChatBoxInputType => {
if (!Array.isArray(value)) {
console.error('value is error', value);
return { text: '', files: [] };
}
const text = value
.filter((item) => item.text?.content)
.map((item) => item.text?.content || '')
.join('');
const files =
(value
.map((item) =>
item.type === 'file' && item.file
? {
id: getNanoid(),
type: item.file.type,
name: item.file.name,
icon: '',
url: item.file.url
}
: undefined
)
.filter(Boolean) as UserInputFileItemType[]) || [];
return {
text,
files
};
};

View File

@ -1,8 +1,9 @@
import React from 'react';
import React, { useMemo } from 'react';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { Box, Flex, useColorModeValue } from '@chakra-ui/react';
import { Box, Flex } from '@chakra-ui/react';
import Icon from '@fastgpt/web/components/common/Icon';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { useTranslation } from 'next-i18next';
const codeLight: { [key: string]: React.CSSProperties } = {
'code[class*=language-]': {
@ -294,24 +295,41 @@ const CodeLight = ({
inline?: boolean;
match: RegExpExecArray | null;
}) => {
const { t } = useTranslation();
const { copyData } = useCopyData();
if (!inline) {
const codeBoxName = useMemo(() => {
const input = match?.['input'] || '';
if (!input) return match?.[1];
const splitInput = input.split('#');
return splitInput[1] || match?.[1];
}, [match]);
return (
<Box my={3} borderRadius={'md'} overflow={'overlay'} backgroundColor={'#222'}>
<Box
my={3}
borderRadius={'md'}
overflow={'overlay'}
bg={'myGray.900'}
boxShadow={
'0px 0px 1px 0px rgba(19, 51, 107, 0.08), 0px 1px 2px 0px rgba(19, 51, 107, 0.05)'
}
>
<Flex
className="code-header"
py={2}
px={5}
backgroundColor={useColorModeValue('#323641', 'gray.600')}
color={'#fff'}
bg={'myGray.600'}
color={'white'}
fontSize={'sm'}
userSelect={'none'}
>
<Box flex={1}>{match?.[1]}</Box>
<Box flex={1}>{codeBoxName}</Box>
<Flex cursor={'pointer'} onClick={() => copyData(String(children))} alignItems={'center'}>
<Icon name={'copy'} width={15} height={15} fill={'#fff'}></Icon>
<Box ml={1}></Box>
<Icon name={'copy'} width={15} height={15}></Icon>
<Box ml={1}>{t('common.Copy')}</Box>
</Flex>
</Flex>
<SyntaxHighlighter style={codeLight as any} language={match?.[1]} PreTag="pre">

View File

@ -1,34 +0,0 @@
import { Box, Flex, Grid } from '@chakra-ui/react';
import MdImage from '../img/Image';
import { useMemo } from 'react';
const ImageBlock = ({ images }: { images: string }) => {
const formatData = useMemo(
() =>
images
.split('\n')
.filter((item) => item)
.map((item) => {
try {
return JSON.parse(item) as { src: string };
} catch (error) {
return { src: '' };
}
}),
[images]
);
return (
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
{formatData.map(({ src }) => {
return (
<Box key={src} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
<MdImage src={src} />
</Box>
);
})}
</Grid>
);
};
export default ImageBlock;

View File

@ -24,7 +24,6 @@ const EChartsCodeBlock = dynamic(() => import('./img/EChartsCodeBlock'));
const ChatGuide = dynamic(() => import('./chat/Guide'));
const QuestionGuide = dynamic(() => import('./chat/QuestionGuide'));
const ImageBlock = dynamic(() => import('./chat/Image'));
export enum CodeClassName {
guide = 'guide',
@ -32,10 +31,16 @@ export enum CodeClassName {
mermaid = 'mermaid',
echarts = 'echarts',
quote = 'quote',
img = 'img'
files = 'files'
}
const Markdown = ({ source, isChatting = false }: { source: string; isChatting?: boolean }) => {
const Markdown = ({
source = '',
isChatting = false
}: {
source?: string;
isChatting?: boolean;
}) => {
const components = useMemo<any>(
() => ({
img: Image,
@ -91,9 +96,7 @@ const Code = React.memo(function Code(e: any) {
if (codeType === CodeClassName.echarts) {
return <EChartsCodeBlock code={strChildren} />;
}
if (codeType === CodeClassName.img) {
return <ImageBlock images={strChildren} />;
}
return (
<CodeLight className={className} inline={inline} match={match}>
{children}

View File

@ -103,7 +103,7 @@ const QuoteItem = ({
fontSize={'sm'}
whiteSpace={'pre-wrap'}
wordBreak={'break-all'}
_hover={{ '& .hover-data': { display: 'flex' } }}
_hover={{ '& .hover-data': { visibility: 'visible' } }}
h={'100%'}
display={'flex'}
flexDirection={'column'}
@ -218,7 +218,8 @@ const QuoteItem = ({
<MyTooltip label={t('core.dataset.data.Edit')}>
<Box
className="hover-data"
display={['flex', 'none']}
visibility={'hidden'}
display={'flex'}
alignItems={'center'}
justifyContent={'center'}
>
@ -245,7 +246,7 @@ const QuoteItem = ({
<Link
as={NextLink}
className="hover-data"
display={'none'}
visibility={'hidden'}
alignItems={'center'}
color={'primary.500'}
href={`/dataset/detail?datasetId=${quoteItem.datasetId}&currentTab=dataCard&collectionId=${quoteItem.collectionId}`}

View File

@ -86,10 +86,12 @@ const DatasetParamsModal = ({
const cfbBgDesc = watch('datasetSearchExtensionBg');
const chatModelSelectList = (() =>
llmModelList.map((item) => ({
value: item.model,
label: item.name
})))();
llmModelList
.filter((model) => model.usedInQueryExtension)
.map((item) => ({
value: item.model,
label: item.name
})))();
const searchModeList = useMemo(() => {
const list = Object.values(DatasetSearchModeMap);

View File

@ -13,7 +13,8 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { streamFetch } from '@/web/common/api/fetch';
import MyTooltip from '@/components/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
import ChatBox from '@/components/ChatBox';
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
import { getGuideModule } from '@fastgpt/global/core/module/utils';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';

View File

@ -12,7 +12,10 @@ import type {
FlowModuleItemType,
FlowModuleTemplateType
} from '@fastgpt/global/core/module/type.d';
import type { FlowNodeChangeProps } from '@fastgpt/global/core/module/node/type';
import type {
FlowNodeChangeProps,
FlowNodeInputItemType
} from '@fastgpt/global/core/module/node/type';
import React, {
type SetStateAction,
type Dispatch,
@ -20,13 +23,18 @@ import React, {
useCallback,
createContext,
useRef,
useEffect
useEffect,
useMemo
} from 'react';
import { customAlphabet } from 'nanoid';
import { appModule2FlowEdge, appModule2FlowNode } from '@/utils/adapt';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { EDGE_TYPE, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum
} from '@fastgpt/global/core/module/constants';
import { useTranslation } from 'next-i18next';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus';
@ -34,6 +42,14 @@ import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
type OnChange<ChangesType> = (changes: ChangesType[]) => void;
type requestEventType =
| 'onChangeNode'
| 'onCopyNode'
| 'onResetNode'
| 'onDelNode'
| 'onDelConnect'
| 'setNodes';
export type useFlowProviderStoreType = {
reactFlowWrapper: null | React.RefObject<HTMLDivElement>;
mode: 'app' | 'plugin';
@ -57,14 +73,16 @@ export type useFlowProviderStoreType = {
onDelConnect: (id: string) => void;
onConnect: ({ connect }: { connect: Connection }) => any;
initData: (modules: ModuleItemType[]) => void;
splitToolInputs: (
inputs: FlowNodeInputItemType[],
moduleId: string
) => {
isTool: boolean;
toolInputs: FlowNodeInputItemType[];
commonInputs: FlowNodeInputItemType[];
};
hasToolNode: boolean;
};
type requestEventType =
| 'onChangeNode'
| 'onCopyNode'
| 'onResetNode'
| 'onDelNode'
| 'onDelConnect'
| 'setNodes';
const StateContext = createContext<useFlowProviderStoreType>({
reactFlowWrapper: null,
@ -116,7 +134,18 @@ const StateContext = createContext<useFlowProviderStoreType>({
},
onResetNode: function (e): void {
throw new Error('Function not implemented.');
}
},
splitToolInputs: function (
inputs: FlowNodeInputItemType[],
moduleId: string
): {
isTool: boolean;
toolInputs: FlowNodeInputItemType[];
commonInputs: FlowNodeInputItemType[];
} {
throw new Error('Function not implemented.');
},
hasToolNode: false
});
export const useFlowProviderStore = () => useContext(StateContext);
@ -135,6 +164,10 @@ export const FlowProvider = ({
const [nodes = [], setNodes, onNodesChange] = useNodesState<FlowModuleItemType>([]);
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
const hasToolNode = useMemo(() => {
return !!nodes.find((node) => node.data.flowType === FlowNodeTypeEnum.tools);
}, [nodes]);
const onFixView = useCallback(() => {
const btn = document.querySelector('.react-flow__controls-fitview') as HTMLButtonElement;
@ -180,10 +213,13 @@ export const FlowProvider = ({
const type = source?.outputs.find(
(output) => output.key === connect.sourceHandle
)?.valueType;
console.log(type);
if (source?.flowType === FlowNodeTypeEnum.classifyQuestion && !type) {
return ModuleIOValueTypeEnum.boolean;
}
if (source?.flowType === FlowNodeTypeEnum.tools) {
return ModuleIOValueTypeEnum.tools;
}
if (source?.flowType === FlowNodeTypeEnum.pluginInput) {
return source?.inputs.find((input) => input.key === connect.sourceHandle)?.valueType;
}
@ -193,14 +229,17 @@ export const FlowProvider = ({
const targetType = nodes
.find((node) => node.id === connect.target)
?.data?.inputs.find((input) => input.key === connect.targetHandle)?.valueType;
console.log(source, targetType);
if (!sourceType || !targetType) {
if (
connect.sourceHandle === ModuleOutputKeyEnum.selectedTools &&
connect.targetHandle === ModuleOutputKeyEnum.selectedTools
) {
} else if (!sourceType || !targetType) {
return toast({
status: 'warning',
title: t('app.Connection is invalid')
});
}
if (
} else if (
sourceType !== ModuleIOValueTypeEnum.any &&
targetType !== ModuleIOValueTypeEnum.any &&
sourceType !== targetType
@ -215,16 +254,13 @@ export const FlowProvider = ({
addEdge(
{
...connect,
type: EDGE_TYPE,
data: {
onDelete: onDelConnect
}
type: EDGE_TYPE
},
state
)
);
},
[nodes, onDelConnect, setEdges, t, toast]
[nodes, setEdges, t, toast]
);
const onDelNode = useCallback(
@ -359,6 +395,26 @@ export const FlowProvider = ({
[setNodes]
);
/* If the module is connected by a tool, the tool input and the normal input are separated */
const splitToolInputs = useCallback(
(inputs: FlowNodeInputItemType[], moduleId: string) => {
const isTool = !!edges.find(
(edge) =>
edge.targetHandle === ModuleOutputKeyEnum.selectedTools && edge.target === moduleId
);
return {
isTool,
toolInputs: inputs.filter((item) => isTool && item.toolDescription),
commonInputs: inputs.filter((item) => {
if (!isTool) return true;
return !item.toolDescription && item.key !== ModuleInputKeyEnum.switch;
})
};
},
[edges]
);
// reset a node data. delete edge and replace it
const onResetNode = useCallback(
({ id, module }: { id: string; module: FlowModuleTemplateType }) => {
@ -465,7 +521,9 @@ export const FlowProvider = ({
onDelEdge,
onDelConnect,
onConnect,
initData
initData,
splitToolInputs,
hasToolNode
};
return <StateContext.Provider value={value}>{children}</StateContext.Provider>;

View File

@ -17,7 +17,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { getPreviewPluginModule } from '@/web/core/plugin/api';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { moduleTemplatesList } from '@/web/core/modules/template/system';
import { moduleTemplatesList } from '@fastgpt/global/core/module/template/constants';
export type ModuleTemplateProps = {
templates: FlowModuleTemplateType[];

View File

@ -3,6 +3,7 @@ import { BezierEdge, getBezierPath, EdgeLabelRenderer, EdgeProps } from 'reactfl
import { onDelConnect, useFlowProviderStore } from '../../FlowProvider';
import { Flex } from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
const ButtonEdge = (props: EdgeProps) => {
const { nodes } = useFlowProviderStore();
@ -15,6 +16,8 @@ const ButtonEdge = (props: EdgeProps) => {
sourcePosition,
targetPosition,
selected,
sourceHandleId,
animated,
style = {}
} = props;
@ -34,6 +37,8 @@ const ButtonEdge = (props: EdgeProps) => {
targetPosition
});
const isToolEdge = sourceHandleId === ModuleOutputKeyEnum.selectedTools;
const memoEdgeLabel = useMemo(() => {
return (
<EdgeLabelRenderer>
@ -60,29 +65,31 @@ const ButtonEdge = (props: EdgeProps) => {
<MyIcon
name="closeSolid"
w={'100%'}
color={active ? 'primary.800' : 'myGray.400'}
></MyIcon>
</Flex>
<Flex
alignItems={'center'}
justifyContent={'center'}
position={'absolute'}
transform={`translate(-78%, -50%) translate(${targetX}px,${targetY}px)`}
pointerEvents={'all'}
w={'16px'}
h={'16px'}
bg={'white'}
zIndex={active ? 1000 : 0}
>
<MyIcon
name={'common/rightArrowLight'}
w={'100%'}
color={active ? 'primary.800' : 'myGray.400'}
color={active ? 'primary.700' : 'myGray.400'}
></MyIcon>
</Flex>
{!isToolEdge && (
<Flex
alignItems={'center'}
justifyContent={'center'}
position={'absolute'}
transform={`translate(-78%, -50%) translate(${targetX}px,${targetY}px)`}
pointerEvents={'all'}
w={'16px'}
h={'16px'}
bg={'white'}
zIndex={active ? 1000 : 0}
>
<MyIcon
name={'common/rightArrowLight'}
w={'100%'}
color={active ? 'primary.700' : 'myGray.400'}
></MyIcon>
</Flex>
)}
</EdgeLabelRenderer>
);
}, [id, labelX, labelY, active, targetX, targetY]);
}, [labelX, labelY, active, isToolEdge, targetX, targetY, id]);
const memoBezierEdge = useMemo(() => {
const edgeStyle: React.CSSProperties = {
@ -96,7 +103,7 @@ const ButtonEdge = (props: EdgeProps) => {
};
return <BezierEdge {...props} style={edgeStyle} />;
}, [props, active, style]);
}, [style, active, props]);
return (
<>

View File

@ -4,7 +4,7 @@ import { BoxProps } from '@chakra-ui/react';
const Container = ({ children, ...props }: BoxProps) => {
return (
<Box px={4} py={3} position={'relative'} {...props}>
<Box px={'16px'} py={'10px'} position={'relative'} {...props}>
{children}
</Box>
);

View File

@ -2,7 +2,13 @@ import React from 'react';
import { Box, useTheme } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
const Divider = ({ text }: { text?: 'Input' | 'Output' | string }) => {
const Divider = ({
text,
showBorderBottom = true
}: {
text?: 'Input' | 'Output' | string;
showBorderBottom?: boolean;
}) => {
const theme = useTheme();
const { t } = useTranslation();
@ -14,10 +20,10 @@ const Divider = ({ text }: { text?: 'Input' | 'Output' | string }) => {
bg={'#f8f8f8'}
py={isDivider ? '0' : 2}
borderTop={theme.borders.base}
borderBottom={theme.borders.base}
borderBottom={showBorderBottom ? theme.borders.base : 0}
fontSize={'lg'}
>
{text ? t(`common.${text}`) : ''}
{text}
</Box>
);
};

View File

@ -5,14 +5,29 @@ import { FlowModuleItemType } from '@fastgpt/global/core/module/type.d';
import Container from '../modules/Container';
import RenderInput from '../render/RenderInput';
import RenderOutput from '../render/RenderOutput';
import { useFlowProviderStore } from '../../FlowProvider';
import Divider from '../modules/Divider';
import RenderToolInput from '../render/RenderToolInput';
import { useTranslation } from 'next-i18next';
const NodeAnswer = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
const { t } = useTranslation();
const { moduleId, inputs, outputs } = data;
const { splitToolInputs } = useFlowProviderStore();
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
return (
<NodeCard minW={'400px'} selected={selected} {...data}>
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
<RenderInput moduleId={moduleId} flowInputList={inputs} />
{toolInputs.length > 0 && (
<>
<Divider text={t('core.module.tool.Tool input')} />
<Container>
<RenderToolInput moduleId={moduleId} inputs={toolInputs} />
</Container>
</>
)}
<RenderInput moduleId={moduleId} flowInputList={commonInputs} />
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
</Container>
</NodeCard>

View File

@ -23,7 +23,7 @@ const NodeCQNode = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
return (
<NodeCard minW={'400px'} selected={selected} {...data}>
<Divider text="Input" />
<Divider text={t('common.Input')} />
<Container>
<RenderInput
moduleId={moduleId}

View File

@ -26,117 +26,132 @@ import ExtractFieldModal, { defaultField } from './ExtractFieldModal';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
import { onChangeNode } from '../../../FlowProvider';
import { onChangeNode, useFlowProviderStore } from '../../../FlowProvider';
import RenderToolInput from '../../render/RenderToolInput';
const NodeExtract = ({ data }: NodeProps<FlowModuleItemType>) => {
const { inputs, outputs, moduleId } = data;
const { splitToolInputs } = useFlowProviderStore();
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
const { t } = useTranslation();
const [editExtractFiled, setEditExtractField] = useState<ContextExtractAgentItemType>();
return (
<NodeCard minW={'400px'} {...data}>
<Divider text="Input" />
<Container>
<RenderInput
moduleId={moduleId}
flowInputList={inputs}
CustomComponent={{
[ModuleInputKeyEnum.extractKeys]: ({
value: extractKeys = [],
...props
}: {
value?: ContextExtractAgentItemType[];
}) => (
<Box>
<Flex alignItems={'center'}>
<Box flex={'1 0 0'}>{t('core.module.extract.Target field')}</Box>
<Button
size={'sm'}
variant={'whitePrimary'}
leftIcon={<AddIcon fontSize={'10px'} />}
onClick={() => setEditExtractField(defaultField)}
{toolInputs.length > 0 && (
<>
<Divider text={t('core.module.tool.Tool input')} />
<Container>
<RenderToolInput moduleId={moduleId} inputs={toolInputs} />
</Container>
</>
)}
<>
<Divider text={t('common.Input')} />
<Container>
<RenderInput
moduleId={moduleId}
flowInputList={commonInputs}
CustomComponent={{
[ModuleInputKeyEnum.extractKeys]: ({
value: extractKeys = [],
...props
}: {
value?: ContextExtractAgentItemType[];
}) => (
<Box>
<Flex alignItems={'center'}>
<Box flex={'1 0 0'}>{t('core.module.extract.Target field')}</Box>
<Button
size={'sm'}
variant={'whitePrimary'}
leftIcon={<AddIcon fontSize={'10px'} />}
onClick={() => setEditExtractField(defaultField)}
>
{t('core.module.extract.Add field')}
</Button>
</Flex>
<Box
mt={2}
borderRadius={'md'}
overflow={'hidden'}
borderWidth={'1px'}
borderBottom="none"
>
{t('core.module.extract.Add field')}
</Button>
</Flex>
<Box
mt={2}
borderRadius={'md'}
overflow={'hidden'}
borderWidth={'1px'}
borderBottom="none"
>
<TableContainer>
<Table bg={'white'}>
<Thead>
<Tr>
<Th bg={'myGray.50'}> key</Th>
<Th bg={'myGray.50'}></Th>
<Th bg={'myGray.50'}></Th>
<Th bg={'myGray.50'}></Th>
</Tr>
</Thead>
<Tbody>
{extractKeys.map((item, index) => (
<Tr
key={index}
position={'relative'}
whiteSpace={'pre-wrap'}
wordBreak={'break-all'}
>
<Td>{item.key}</Td>
<Td>{item.desc}</Td>
<Td>{item.required ? '✔' : ''}</Td>
<Td whiteSpace={'nowrap'}>
<MyIcon
mr={3}
name={'common/settingLight'}
w={'16px'}
cursor={'pointer'}
onClick={() => {
setEditExtractField(item);
}}
/>
<MyIcon
name={'delete'}
w={'16px'}
cursor={'pointer'}
onClick={() => {
onChangeNode({
moduleId,
type: 'updateInput',
key: ModuleInputKeyEnum.extractKeys,
value: {
...props,
value: extractKeys.filter(
(extract) => item.key !== extract.key
)
}
});
onChangeNode({
moduleId,
type: 'delOutput',
key: item.key
});
}}
/>
</Td>
<TableContainer>
<Table bg={'white'}>
<Thead>
<Tr>
<Th bg={'myGray.50'}> key</Th>
<Th bg={'myGray.50'}></Th>
<Th bg={'myGray.50'}></Th>
<Th bg={'myGray.50'}></Th>
</Tr>
))}
</Tbody>
</Table>
</TableContainer>
</Thead>
<Tbody>
{extractKeys.map((item, index) => (
<Tr
key={index}
position={'relative'}
whiteSpace={'pre-wrap'}
wordBreak={'break-all'}
>
<Td>{item.key}</Td>
<Td>{item.desc}</Td>
<Td>{item.required ? '✔' : ''}</Td>
<Td whiteSpace={'nowrap'}>
<MyIcon
mr={3}
name={'common/settingLight'}
w={'16px'}
cursor={'pointer'}
onClick={() => {
setEditExtractField(item);
}}
/>
<MyIcon
name={'delete'}
w={'16px'}
cursor={'pointer'}
onClick={() => {
onChangeNode({
moduleId,
type: 'updateInput',
key: ModuleInputKeyEnum.extractKeys,
value: {
...props,
value: extractKeys.filter(
(extract) => item.key !== extract.key
)
}
});
onChangeNode({
moduleId,
type: 'delOutput',
key: item.key
});
}}
/>
</Td>
</Tr>
))}
</Tbody>
</Table>
</TableContainer>
</Box>
</Box>
</Box>
)
}}
/>
</Container>
<Divider text="Output" />
<Container>
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
</Container>
)
}}
/>
</Container>
</>
<>
<Divider text={t('common.Output')} />
<Container>
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
</Container>
</>
{!!editExtractFiled && (
<ExtractFieldModal

View File

@ -1,7 +1,7 @@
import React from 'react';
import MyModal from '@/components/MyModal';
import { ModalBody, Button, ModalFooter, useDisclosure, Textarea, Box } from '@chakra-ui/react';
import { useTranslation } from 'react-i18next';
import { useTranslation } from 'next-i18next';
import { onChangeNode } from '../../../FlowProvider';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';

Some files were not shown because too many files have changed in this diff Show More