feat: sub tool response

This commit is contained in:
archer 2025-09-11 10:12:14 +08:00
parent 24cf32f8b0
commit 92bdbb228a
No known key found for this signature in database
GPG Key ID: 4446499B846D4A9E
39 changed files with 798 additions and 733 deletions

View File

@ -7,7 +7,7 @@ import type {
UserChatItemType,
UserChatItemValueItemType
} from '../../core/chat/type.d';
import { ChatFileTypeEnum, ChatItemValueTypeEnum, ChatRoleEnum } from '../../core/chat/constants';
import { ChatFileTypeEnum, ChatRoleEnum } from '../../core/chat/constants';
import type {
ChatCompletionContentPart,
ChatCompletionFunctionMessageParam,
@ -62,13 +62,13 @@ export const chats2GPTMessages = ({
} else if (item.obj === ChatRoleEnum.Human) {
const value = item.value
.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
if (item.text) {
return {
type: 'text',
text: item.text?.content || ''
};
}
if (item.type === ChatItemValueTypeEnum.file) {
if (item.file) {
if (item.file?.type === ChatFileTypeEnum.image) {
return {
type: 'image_url',
@ -98,9 +98,9 @@ export const chats2GPTMessages = ({
} else {
const aiResults: ChatCompletionMessageParam[] = [];
//AI
//AI: 只需要把根节点转化即可
item.value.forEach((value, i) => {
if (value.type === ChatItemValueTypeEnum.tool && value.tools && reserveTool) {
if (value.tools && reserveTool) {
const tool_calls: ChatCompletionMessageToolCall[] = [];
const toolResponse: ChatCompletionToolMessageParam[] = [];
value.tools.forEach((tool) => {
@ -124,21 +124,14 @@ export const chats2GPTMessages = ({
tool_calls
});
aiResults.push(...toolResponse);
} else if (
value.type === ChatItemValueTypeEnum.text &&
typeof value.text?.content === 'string'
) {
} else if (typeof value.text?.content === 'string') {
if (!value.text.content && item.value.length > 1) {
return;
}
// Concat text
const lastValue = item.value[i - 1];
const lastResult = aiResults[aiResults.length - 1];
if (
lastValue &&
lastValue.type === ChatItemValueTypeEnum.text &&
typeof lastResult?.content === 'string'
) {
if (lastValue && typeof lastResult?.content === 'string') {
lastResult.content += value.text.content;
} else {
aiResults.push({
@ -147,7 +140,7 @@ export const chats2GPTMessages = ({
content: value.text.content
});
}
} else if (value.type === ChatItemValueTypeEnum.interactive) {
} else if (value.interactive) {
aiResults.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.Assistant,
@ -187,180 +180,175 @@ export const GPTMessages2Chats = ({
.map((item) => {
const obj = GPT2Chat[item.role];
const value = (() => {
if (
obj === ChatRoleEnum.System &&
item.role === ChatCompletionRequestMessageRoleEnum.System
) {
const value: SystemChatItemValueItemType[] = [];
if (
obj === ChatRoleEnum.System &&
item.role === ChatCompletionRequestMessageRoleEnum.System
) {
const value: SystemChatItemValueItemType[] = [];
if (Array.isArray(item.content)) {
item.content.forEach((item) => [
if (Array.isArray(item.content)) {
item.content.forEach((item) => [
value.push({
text: {
content: item.text
}
})
]);
} else {
value.push({
text: {
content: item.content
}
});
}
return {
dataId: item.dataId,
obj,
hideInUI: item.hideInUI,
value
};
} else if (
obj === ChatRoleEnum.Human &&
item.role === ChatCompletionRequestMessageRoleEnum.User
) {
const value: UserChatItemValueItemType[] = [];
if (typeof item.content === 'string') {
value.push({
text: {
content: item.content
}
});
} else if (Array.isArray(item.content)) {
item.content.forEach((item) => {
if (item.type === 'text') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.text
}
})
]);
} else {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
}
});
}
return value;
} else if (
obj === ChatRoleEnum.Human &&
item.role === ChatCompletionRequestMessageRoleEnum.User
) {
const value: UserChatItemValueItemType[] = [];
if (typeof item.content === 'string') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
}
});
} else if (Array.isArray(item.content)) {
item.content.forEach((item) => {
if (item.type === 'text') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.text
}
});
} else if (item.type === 'image_url') {
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.file,
file: {
type: ChatFileTypeEnum.image,
name: '',
url: item.image_url.url,
key: item.key
}
});
} else if (item.type === 'file_url') {
value.push({
// @ts-ignore
type: ChatItemValueTypeEnum.file,
file: {
type: ChatFileTypeEnum.file,
name: item.name,
url: item.url,
key: item.key
}
});
}
});
}
return value;
} else if (
obj === ChatRoleEnum.AI &&
item.role === ChatCompletionRequestMessageRoleEnum.Assistant
) {
const value: AIChatItemValueItemType[] = [];
if (typeof item.reasoning_text === 'string' && item.reasoning_text) {
value.push({
type: ChatItemValueTypeEnum.reasoning,
reasoning: {
content: item.reasoning_text
}
});
}
if (item.tool_calls && reserveTool) {
// save tool calls
const toolCalls = item.tool_calls as ChatCompletionMessageToolCall[];
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.tool,
tools: toolCalls.map((tool) => {
let toolResponse =
messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Tool &&
msg.tool_call_id === tool.id
)?.content || '';
toolResponse =
typeof toolResponse === 'string' ? toolResponse : JSON.stringify(toolResponse);
const toolInfo = getToolInfo?.(tool.function.name);
return {
id: tool.id,
toolName: toolInfo?.name || '',
toolAvatar: toolInfo?.avatar || '',
functionName: tool.function.name,
params: tool.function.arguments,
response: toolResponse as string
};
})
});
}
if (item.function_call && reserveTool) {
const functionCall = item.function_call as ChatCompletionMessageFunctionCall;
const functionResponse = messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Function &&
msg.name === item.function_call?.name
) as ChatCompletionFunctionMessageParam;
if (functionResponse) {
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.tool,
tools: [
{
id: functionCall.id || '',
toolName: functionCall.toolName || '',
toolAvatar: functionCall.toolAvatar || '',
functionName: functionCall.name,
params: functionCall.arguments,
response: functionResponse.content || ''
}
]
});
}
}
if (item.interactive) {
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.interactive,
interactive: item.interactive
});
}
if (typeof item.content === 'string' && item.content) {
const lastValue = value[value.length - 1];
if (lastValue && lastValue.type === ChatItemValueTypeEnum.text && lastValue.text) {
lastValue.text.content += item.content;
} else {
} else if (item.type === 'image_url') {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: item.content
file: {
type: ChatFileTypeEnum.image,
name: '',
url: item.image_url.url,
key: item.key
}
});
} else if (item.type === 'file_url') {
value.push({
file: {
type: ChatFileTypeEnum.file,
name: item.name,
url: item.url,
key: item.key
}
});
}
}
});
}
return {
dataId: item.dataId,
obj,
hideInUI: item.hideInUI,
value
};
} else if (
obj === ChatRoleEnum.AI &&
item.role === ChatCompletionRequestMessageRoleEnum.Assistant
) {
const value: AIChatItemValueItemType[] = [];
return value;
if (typeof item.reasoning_text === 'string' && item.reasoning_text) {
value.push({
reasoning: {
content: item.reasoning_text
}
});
}
if (item.tool_calls && reserveTool) {
// save tool calls
const toolCalls = item.tool_calls as ChatCompletionMessageToolCall[];
value.push({
tools: toolCalls.map((tool) => {
let toolResponse =
messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Tool &&
msg.tool_call_id === tool.id
)?.content || '';
toolResponse =
typeof toolResponse === 'string' ? toolResponse : JSON.stringify(toolResponse);
const toolInfo = getToolInfo?.(tool.function.name);
return {
id: tool.id,
toolName: toolInfo?.name || '',
toolAvatar: toolInfo?.avatar || '',
functionName: tool.function.name,
params: tool.function.arguments,
response: toolResponse as string
};
})
});
}
if (item.function_call && reserveTool) {
const functionCall = item.function_call as ChatCompletionMessageFunctionCall;
const functionResponse = messages.find(
(msg) =>
msg.role === ChatCompletionRequestMessageRoleEnum.Function &&
msg.name === item.function_call?.name
) as ChatCompletionFunctionMessageParam;
if (functionResponse) {
value.push({
tools: [
{
id: functionCall.id || '',
toolName: functionCall.toolName || '',
toolAvatar: functionCall.toolAvatar || '',
functionName: functionCall.name,
params: functionCall.arguments,
response: functionResponse.content || ''
}
]
});
}
}
if (item.interactive) {
value.push({
interactive: item.interactive
});
}
if (typeof item.content === 'string' && item.content) {
const lastValue = value[value.length - 1];
if (lastValue && lastValue.text) {
lastValue.text.content += item.content;
} else {
value.push({
text: {
content: item.content
}
});
}
}
return [];
})();
return {
dataId: item.dataId,
obj,
hideInUI: item.hideInUI,
value
};
}
return {
dataId: item.dataId,
obj,
hideInUI: item.hideInUI,
value
} as ChatItemType;
value: []
};
})
.filter((item) => item.value.length > 0);
@ -387,7 +375,7 @@ export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): Runtime
text: ''
};
value.forEach((item) => {
if (item.type === 'file' && item.file) {
if ('file' in item && item.file) {
prompt.files.push(item.file);
} else if (item.text) {
prompt.text += item.text.content;
@ -403,14 +391,12 @@ export const runtimePrompt2ChatsValue = (
if (prompt.files) {
prompt.files.forEach((file) => {
value.push({
type: ChatItemValueTypeEnum.file,
file
});
});
}
if (prompt.text) {
value.push({
type: ChatItemValueTypeEnum.text,
text: {
content: prompt.text
}
@ -424,7 +410,7 @@ export const getSystemPrompt_ChatItemType = (prompt?: string): ChatItemType[] =>
return [
{
obj: ChatRoleEnum.System,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: prompt } }]
value: [{ text: { content: prompt } }]
}
];
};

View File

@ -21,13 +21,6 @@ export enum ChatFileTypeEnum {
image = 'image',
file = 'file'
}
export enum ChatItemValueTypeEnum {
text = 'text',
file = 'file',
tool = 'tool',
interactive = 'interactive',
reasoning = 'reasoning'
}
export enum ChatSourceEnum {
test = 'test',

View File

@ -1,12 +1,6 @@
import { ClassifyQuestionAgentItemType } from '../workflow/template/system/classifyQuestion/type';
import type { SearchDataResponseItemType } from '../dataset/type';
import type {
ChatFileTypeEnum,
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum,
ChatStatusEnum
} from './constants';
import type { ChatFileTypeEnum, ChatRoleEnum, ChatSourceEnum, ChatStatusEnum } from './constants';
import type { FlowNodeTypeEnum } from '../workflow/node/constant';
import type { NodeInputKeyEnum, NodeOutputKeyEnum } from '../workflow/constants';
import type { DispatchNodeResponseKeyEnum } from '../workflow/runtime/constants';
@ -20,6 +14,7 @@ import type { WorkflowInteractiveResponseType } from '../workflow/template/syste
import type { FlowNodeInputItemType } from '../workflow/type/io';
import type { FlowNodeTemplateType } from '../workflow/type/node.d';
import { ChatCompletionMessageParam } from '../ai/type';
import type { RequireOnlyOne } from '../../common/type/utils';
/* --------- chat ---------- */
export type ChatSchemaType = {
@ -59,7 +54,6 @@ export type UserChatItemFileItemType = {
url: string;
};
export type UserChatItemValueItemType = {
type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.file;
text?: {
content: string;
};
@ -72,7 +66,6 @@ export type UserChatItemType = {
};
export type SystemChatItemValueItemType = {
type: ChatItemValueTypeEnum.text;
text?: {
content: string;
};
@ -83,12 +76,16 @@ export type SystemChatItemType = {
};
export type AIChatItemValueItemType = {
id?: string; // Client concat stream response
type:
| ChatItemValueTypeEnum.text
| ChatItemValueTypeEnum.reasoning
| ChatItemValueTypeEnum.tool
| ChatItemValueTypeEnum.interactive;
id?: string;
} & RequireOnlyOne<{
text: {
content: string;
};
reasoning: {
content: string;
};
tool: ToolModuleResponseItemType;
interactive: WorkflowInteractiveResponseType;
text?: {
content: string;
@ -96,12 +93,15 @@ export type AIChatItemValueItemType = {
reasoning?: {
content: string;
};
tools?: ToolModuleResponseItemType[];
interactive?: WorkflowInteractiveResponseType;
};
// Abandon
tools?: ToolModuleResponseItemType[];
}>;
export type AIChatItemType = {
obj: ChatRoleEnum.AI;
value: AIChatItemValueItemType[];
subAppsValue?: Record<string, AIChatItemValueItemType[]>;
memories?: Record<string, any>;
userGoodFeedback?: string;
userBadFeedback?: string;

View File

@ -1,6 +1,6 @@
import { type DispatchNodeResponseType } from '../workflow/runtime/type';
import { FlowNodeTypeEnum } from '../workflow/node/constant';
import { ChatItemValueTypeEnum, ChatRoleEnum, ChatSourceEnum } from './constants';
import { ChatRoleEnum, ChatSourceEnum } from './constants';
import {
type AIChatItemValueItemType,
type ChatHistoryItemResType,
@ -24,7 +24,7 @@ export const concatHistories = (histories1: ChatItemType[], histories2: ChatItem
export const getChatTitleFromChatMessage = (message?: ChatItemType, defaultValue = '新对话') => {
// @ts-ignore
const textMsg = message?.value.find((item) => item.type === ChatItemValueTypeEnum.text);
const textMsg = message?.value.find((item) => 'text' in item && item.text);
if (textMsg?.text?.content) {
return textMsg.text.content.slice(0, 20);
@ -168,14 +168,14 @@ export const removeAIResponseCite = <T extends AIChatItemValueItemType[] | strin
export const removeEmptyUserInput = (input?: UserChatItemValueItemType[]) => {
return (
input?.filter((item) => {
if (item.type === ChatItemValueTypeEnum.text && !item.text?.content?.trim()) {
return false;
if (item.text?.content?.trim()) {
return true;
}
// type 为 'file' 时 key 和 url 不能同时为空
if (item.type === ChatItemValueTypeEnum.file && !item.file?.key && !item.file?.url) {
if (!item.file?.key && !item.file?.url) {
return false;
}
return true;
return false;
}) || []
);
};

View File

@ -11,6 +11,7 @@ export enum SseResponseEventEnum {
toolCall = 'toolCall', // tool start
toolParams = 'toolParams', // tool params return
toolResponse = 'toolResponse', // tool response return
flowResponses = 'flowResponses', // sse response request
updateVariables = 'updateVariables',

View File

@ -1,6 +1,6 @@
import json5 from 'json5';
import { replaceVariable, valToStr } from '../../../common/string/tools';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '../../../core/chat/constants';
import { ChatRoleEnum } from '../../../core/chat/constants';
import type { ChatItemType, NodeOutputItemType } from '../../../core/chat/type';
import { ChatCompletionRequestMessageRoleEnum } from '../../ai/constants';
import {
@ -173,11 +173,7 @@ export const getLastInteractiveValue = (
if (lastAIMessage) {
const lastValue = lastAIMessage.value[lastAIMessage.value.length - 1];
if (
!lastValue ||
lastValue.type !== ChatItemValueTypeEnum.interactive ||
!lastValue.interactive
) {
if (!lastValue || !lastValue.interactive) {
return;
}

View File

@ -2,8 +2,11 @@ import { addLog } from '../../common/system/log';
import { MongoChatItem } from './chatItemSchema';
import { MongoChat } from './chatSchema';
import axios from 'axios';
import { type AIChatItemType, type UserChatItemType } from '@fastgpt/global/core/chat/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import {
type AIChatItemType,
type ChatItemType,
type UserChatItemType
} from '@fastgpt/global/core/chat/type';
export type Metadata = {
[key: string]: {
@ -94,9 +97,9 @@ const pushChatLogInternal = async ({
// Pop last two items
const question = chatItemHuman.value
.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
if (item.text) {
return item.text?.content;
} else if (item.type === ChatItemValueTypeEnum.file) {
} else if (item.file) {
if (item.file?.type === 'image') {
return `![${item.file?.name}](${item.file?.url})`;
}

View File

@ -1,7 +1,7 @@
import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type.d';
import { MongoApp } from '../app/schema';
import type { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { MongoChatItem } from './chatItemSchema';
import { MongoChat } from './chatSchema';
import { addLog } from '../../common/system/log';
@ -44,7 +44,7 @@ type Props = {
const beforProcess = (props: Props) => {
// Remove url
props.userContent.value.forEach((item) => {
if (item.type === ChatItemValueTypeEnum.file && item.file?.key) {
if (item.file?.key) {
item.file.url = '';
}
});
@ -61,7 +61,7 @@ const afterProcess = async ({
.map((item) => {
if (item.value && Array.isArray(item.value)) {
return item.value.map((valueItem) => {
if (valueItem.type === ChatItemValueTypeEnum.file && valueItem.file?.key) {
if ('file' in valueItem && valueItem.file?.key) {
return valueItem.file.key;
}
});
@ -349,11 +349,7 @@ export const updateInteractiveChat = async (props: Props) => {
// Update interactive value
const interactiveValue = chatItem.value[chatItem.value.length - 1];
if (
!interactiveValue ||
interactiveValue.type !== ChatItemValueTypeEnum.interactive ||
!interactiveValue.interactive?.params
) {
if (!interactiveValue || !interactiveValue.interactive?.params) {
return;
}

View File

@ -1,4 +1,4 @@
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { getS3ChatSource } from '../../common/s3/sources/chat';
import type { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io';
@ -10,7 +10,7 @@ export const addPreviewUrlToChatItems = async (
) => {
async function addToChatflow(item: ChatItemType) {
for await (const value of item.value) {
if (value.type === ChatItemValueTypeEnum.file && value.file && value.file.key) {
if ('file' in value && value.file?.key) {
value.file.url = await s3ChatSource.createGetChatFileURL({
key: value.file.key,
external: true
@ -23,7 +23,7 @@ export const addPreviewUrlToChatItems = async (
for (let j = 0; j < item.value.length; j++) {
const value = item.value[j];
if (value.type !== ChatItemValueTypeEnum.text) continue;
if (!('text' in value)) continue;
const inputValueString = value.text?.content || '';
const parsedInputValue = JSON.parse(inputValueString) as FlowNodeInputItemType[];

View File

@ -165,6 +165,7 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
return chats2GPTMessages({ messages: chatHistories, reserveId: false });
})();
const userMessages = chats2GPTMessages({
messages: [
{
@ -224,12 +225,11 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
event: SseResponseEventEnum.toolCall,
data: {
tool: {
id: call.id,
id: `${nodeId}/${call.function.name}`,
toolName: toolNode?.name || call.function.name,
toolAvatar: toolNode?.avatar || '',
functionName: call.function.name,
params: call.function.arguments ?? '',
response: ''
params: call.function.arguments ?? ''
}
}
});
@ -240,11 +240,7 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
event: SseResponseEventEnum.toolParams,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params,
response: ''
params
}
}
});
@ -253,15 +249,31 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
handleToolResponse: async ({ call, messages }) => {
const toolId = call.function.name;
const childWorkflowStreamResponse = getWorkflowChildResponseWrite({
subAppId: `${nodeId}/${toolId}`,
id: call.id,
fn: workflowStreamResponse
});
const onReasoning = ({ text }: { text: string }) => {
childWorkflowStreamResponse?.({
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
reasoning_content: text
})
});
};
const onStreaming = ({ text }: { text: string }) => {
childWorkflowStreamResponse?.({
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
text
})
});
};
const {
response,
usages = [],
isEnd,
streamResponse = true
isEnd
} = await (async () => {
try {
if (toolId === SubAppIds.stop) {
@ -278,28 +290,14 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
temperature,
top_p: aiChatTopP,
stream,
onStreaming({ text }) {
//TODO: 需要一个新的 plan sse event
childWorkflowStreamResponse?.({
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params: '',
response: text
}
}
});
}
onReasoning,
onStreaming
});
return {
response,
usages,
isEnd: false,
streamResponse: false
isEnd: false
};
} else if (toolId === SubAppIds.model) {
const { systemPrompt, task } = parseToolArgs<{
@ -314,26 +312,13 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
stream,
systemPrompt,
task,
onStreaming({ text }) {
childWorkflowStreamResponse?.({
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params: '',
response: text
}
}
});
}
onReasoning,
onStreaming
});
return {
response,
usages,
isEnd: false,
streamResponse: false
isEnd: false
};
} else if (toolId === SubAppIds.fileRead) {
const { file_indexes } = parseToolArgs<{
@ -434,12 +419,11 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
) {
const fn =
node.flowNodeType === FlowNodeTypeEnum.appModule ? dispatchApp : dispatchPlugin;
console.log(requestParams, 22);
const { response, usages } = await fn({
...props,
node,
// stream: false,
workflowStreamResponse: undefined,
workflowStreamResponse: childWorkflowStreamResponse,
callParams: {
appId: node.pluginId,
version: node.version,
@ -470,20 +454,16 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
})();
// Push stream response
if (streamResponse) {
childWorkflowStreamResponse?.({
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params: '',
response
}
workflowStreamResponse?.({
id: call.id,
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
response
}
});
}
}
});
// TODO: 推送账单

View File

@ -15,7 +15,6 @@ import {
} from '@fastgpt/global/core/workflow/runtime/utils';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
import { getUserChatInfoAndAuthTeamPoints } from '../../../../../../../support/permission/auth/team';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { getChildAppRuntimeById } from '../../../../../../app/plugin/controller';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { getPluginRunUserQuery } from '@fastgpt/global/core/workflow/utils';
@ -92,7 +91,6 @@ export const dispatchApp = async (props: Props): Promise<DispatchSubAppResponse>
variables: childrenRunVariables,
query: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: userChatInput
}

View File

@ -14,6 +14,7 @@ type ModelAgentConfig = {
type DispatchModelAgentProps = ModelAgentConfig & {
systemPrompt: string;
task: string;
onReasoning: ResponseEvents['onReasoning'];
onStreaming: ResponseEvents['onStreaming'];
};
@ -29,6 +30,7 @@ export async function dispatchModelAgent({
stream,
systemPrompt,
task,
onReasoning,
onStreaming
}: DispatchModelAgentProps): Promise<DispatchPlanAgentResponse> {
const modelData = getLLMModel(model);
@ -56,6 +58,7 @@ export async function dispatchModelAgent({
top_p,
stream
},
onReasoning,
onStreaming
});

View File

@ -20,6 +20,7 @@ type PlanAgentConfig = {
type DispatchPlanAgentProps = PlanAgentConfig & {
messages: ChatCompletionMessageParam[];
tools: ChatCompletionTool[];
onReasoning: ResponseEvents['onReasoning'];
onStreaming: ResponseEvents['onStreaming'];
};
@ -36,6 +37,7 @@ export const dispatchPlanAgent = async ({
temperature,
top_p,
stream,
onReasoning,
onStreaming
}: DispatchPlanAgentProps): Promise<DispatchPlanAgentResponse> => {
const modelData = getLLMModel(model);
@ -63,6 +65,7 @@ export const dispatchPlanAgent = async ({
toolCallMode: modelData.toolChoice ? 'toolChoice' : 'prompt',
parallel_tool_calls: true
},
onReasoning,
onStreaming
});

View File

@ -1,6 +1,6 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/workflow/template/system/classifyQuestion/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
@ -114,7 +114,6 @@ const completions = async ({
obj: ChatRoleEnum.System,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: getCQSystemPrompt({
systemPrompt,
@ -132,7 +131,6 @@ const completions = async ({
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: userChatInput
}

View File

@ -1,7 +1,7 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { filterGPTMessageByMaxContext } from '../../../ai/llm/utils';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/workflow/template/system/contextExtract/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import {
@ -196,7 +196,6 @@ const toolChoice = async (props: ActionProps) => {
obj: ChatRoleEnum.System,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: getExtractJsonToolPrompt({
systemPrompt: description,
@ -211,7 +210,6 @@ const toolChoice = async (props: ActionProps) => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content
}
@ -300,7 +298,6 @@ const completions = async (props: ActionProps) => {
obj: ChatRoleEnum.System,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: getExtractJsonPrompt({
systemPrompt: description,
@ -316,7 +313,6 @@ const completions = async (props: ActionProps) => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content
}

View File

@ -17,7 +17,6 @@ import { formatToolResponse, initToolCallEdges, initToolNodes, parseToolArgs } f
import { computedMaxToken } from '../../../../ai/utils';
import { sliceStrStartEnd } from '@fastgpt/global/common/string/tools';
import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { createLLMResponse } from '../../../../ai/llm/request';
import { toolValueTypeList, valueTypeJsonSchemaMap } from '@fastgpt/global/core/workflow/constants';
@ -338,12 +337,12 @@ export const runToolCall = async (
}
});
},
onToolParam({ tool, params }) {
onToolParam({ call, params }) {
workflowStreamResponse?.({
event: SseResponseEventEnum.toolParams,
data: {
tool: {
id: tool.id,
id: call.id,
toolName: '',
toolAvatar: '',
params,
@ -474,7 +473,7 @@ export const runToolCall = async (
const toolChildAssistants = flatToolsResponseData
.map((item) => item.assistantResponses)
.flat()
.filter((item) => item.type !== ChatItemValueTypeEnum.interactive); // 交互节点留着下次记录
.filter((item) => !item.interactive); // 交互节点留着下次记录
const concatAssistantResponses = [
...assistantResponses,
...toolNodeAssistant.value,

View File

@ -1,5 +1,4 @@
import { replaceVariable, sliceStrStartEnd } from '@fastgpt/global/common/string/tools';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import type {
AIChatItemValueItemType,
UserChatItemValueItemType
@ -18,7 +17,7 @@ import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/co
// Assistant process
export const filterToolResponseToPreview = (response: AIChatItemValueItemType[]) => {
return response.map((item) => {
if (item.type === ChatItemValueTypeEnum.tool) {
if (item.tools) {
const formatTools = item.tools?.map((tool) => {
return {
...tool,
@ -106,15 +105,15 @@ export const toolCallMessagesAdapt = ({
if (skip) return userInput;
const files = userInput.filter((item) => item.type === 'file');
const files = userInput.filter((item) => item.file);
if (files.length > 0) {
const filesCount = files.filter((file) => file.file?.type === 'file').length;
const imgCount = files.filter((file) => file.file?.type === 'image').length;
if (userInput.some((item) => item.type === 'text')) {
if (userInput.some((item) => item.text)) {
return userInput.map((item) => {
if (item.type === 'text') {
if (item.text) {
const text = item.text?.content || '';
return {
@ -131,7 +130,6 @@ export const toolCallMessagesAdapt = ({
// Every input is a file
return [
{
type: ChatItemValueTypeEnum.text,
text: {
content: getMultiplePrompt({ fileCount: filesCount, imgCount, question: '' })
}

View File

@ -24,7 +24,6 @@ import type {
} from '@fastgpt/global/core/workflow/runtime/type';
import type { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type.d';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
import {
checkNodeRunStatus,
@ -136,7 +135,7 @@ export async function dispatchWorkFlow({
// Add preview url to chat items
await addPreviewUrlToChatItems(histories, 'chatFlow');
for (const item of query) {
if (item.type !== ChatItemValueTypeEnum.file || !item.file?.key) continue;
if (!item.file?.key) continue;
item.file.url = await getS3ChatSource().createGetChatFileURL({
key: item.file.key,
external: true
@ -703,7 +702,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
} else {
if (reasoningText) {
this.chatAssistantResponse.push({
type: ChatItemValueTypeEnum.reasoning,
reasoning: {
content: reasoningText
}
@ -711,7 +709,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
}
if (answerText) {
this.chatAssistantResponse.push({
type: ChatItemValueTypeEnum.text,
text: {
content: answerText
}
@ -972,7 +969,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
}
return {
type: ChatItemValueTypeEnum.interactive,
interactive: interactiveResult
};
}
@ -1120,10 +1116,10 @@ const mergeAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) =
// 合并连续的text
for (let i = 0; i < response.length; i++) {
const item = response[i];
if (item.type === ChatItemValueTypeEnum.text) {
if (item.text) {
let text = item.text?.content || '';
const lastItem = result[result.length - 1];
if (lastItem && lastItem.type === ChatItemValueTypeEnum.text && lastItem.text?.content) {
if (lastItem && lastItem.text?.content) {
lastItem.text.content += text;
continue;
}
@ -1134,7 +1130,6 @@ const mergeAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) =
// If result is empty, auto add a text message
if (result.length === 0) {
result.push({
type: ChatItemValueTypeEnum.text,
text: { content: '' }
});
}

View File

@ -43,6 +43,7 @@ export type DispatchFlowResponse = {
export type WorkflowResponseType = (e: {
id?: string;
subAppId?: string;
event: SseResponseEventEnum;
data: Record<string, any>;
}) => void;

View File

@ -40,7 +40,7 @@ export const getWorkflowResponseWrite = ({
id?: string;
showNodeStatus?: boolean;
}) => {
const fn: WorkflowResponseType = ({ id, event, data }) => {
const fn: WorkflowResponseType = ({ id, subAppId, event, data }) => {
if (!res || res.closed || !streamResponse) return;
// Forbid show detail
@ -64,6 +64,7 @@ export const getWorkflowResponseWrite = ({
event: detail ? event : undefined,
data: JSON.stringify({
...data,
...(subAppId && detail && { subAppId }),
...(id && detail && { responseValueId: id })
})
});
@ -72,14 +73,16 @@ export const getWorkflowResponseWrite = ({
};
export const getWorkflowChildResponseWrite = ({
id,
subAppId,
fn
}: {
id: string;
subAppId: string;
fn?: WorkflowResponseType;
}): WorkflowResponseType | undefined => {
if (!fn) return;
return (e: Parameters<WorkflowResponseType>[0]) => {
return fn({ ...e, id });
return fn({ ...e, id, subAppId });
};
};

View File

@ -2,8 +2,9 @@ import Avatar from '@fastgpt/web/components/common/Avatar';
import { Box } from '@chakra-ui/react';
import { useTheme } from '@chakra-ui/system';
import React from 'react';
import type { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
const ChatAvatar = ({ src, type }: { src?: string; type: 'Human' | 'AI' }) => {
const ChatAvatar = ({ src, type }: { src?: string; type: `${ChatRoleEnum}` }) => {
const theme = useTheme();
return (
<Box

View File

@ -1,16 +1,12 @@
import { Box, type BoxProps, Card, Flex } from '@chakra-ui/react';
import React, { useCallback, useEffect, useMemo, useRef } from 'react';
import React, { useMemo } from 'react';
import ChatController, { type ChatControllerProps } from './ChatController';
import ChatAvatar from './ChatAvatar';
import { MessageCardStyle } from '../constants';
import { formatChatValue2InputType } from '../utils';
import Markdown from '@/components/Markdown';
import styles from '../index.module.scss';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatStatusEnum
} from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import FilesBlock from './FilesBox';
import { ChatBoxContext } from '../Provider';
import { useContextSelector } from 'use-context-selector';
@ -20,9 +16,11 @@ import { useCopyData } from '@fastgpt/web/hooks/useCopyData';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import type {
AIChatItemType,
UserChatItemValueItemType} from '@fastgpt/global/core/chat/type';
import {
type AIChatItemValueItemType,
type ChatItemValueItemType
type AIChatItemValueItemType
} from '@fastgpt/global/core/chat/type';
import { CodeClassNameEnum } from '@/components/Markdown/utils';
import { isEqual } from 'lodash';
@ -55,7 +53,7 @@ const colorMap = {
}
};
type BasicProps = {
type Props = {
avatar?: string;
statusBoxData?: {
status: `${ChatStatusEnum}`;
@ -65,10 +63,6 @@ type BasicProps = {
children?: React.ReactNode;
} & ChatControllerProps;
type Props = BasicProps & {
type: ChatRoleEnum.Human | ChatRoleEnum.AI;
};
const RenderQuestionGuide = ({ questionGuides }: { questionGuides: string[] }) => {
return (
<Markdown
@ -79,7 +73,7 @@ ${JSON.stringify(questionGuides)}`}
};
const HumanContentCard = React.memo(
function HumanContentCard({ chatValue }: { chatValue: ChatItemValueItemType[] }) {
function HumanContentCard({ chatValue }: { chatValue: UserChatItemValueItemType[] }) {
const { text, files = [] } = formatChatValue2InputType(chatValue);
return (
<Flex flexDirection={'column'} gap={4}>
@ -92,6 +86,7 @@ const HumanContentCard = React.memo(
);
const AIContentCard = React.memo(function AIContentCard({
chatValue,
subAppsValue = {},
dataId,
isLastChild,
isChatting,
@ -100,6 +95,7 @@ const AIContentCard = React.memo(function AIContentCard({
}: {
dataId: string;
chatValue: AIChatItemValueItemType[];
subAppsValue?: AIChatItemType['subAppsValue'];
isLastChild: boolean;
isChatting: boolean;
questionGuides: string[];
@ -115,6 +111,7 @@ const AIContentCard = React.memo(function AIContentCard({
chatItemDataId={dataId}
key={key}
value={value}
subAppValue={value.tool ? subAppsValue[value.tool.id] : undefined}
isLastResponseValue={isLastChild && i === chatValue.length - 1}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
@ -129,12 +126,12 @@ const AIContentCard = React.memo(function AIContentCard({
});
const ChatItem = (props: Props) => {
const { type, avatar, statusBoxData, children, isLastChild, questionGuides = [], chat } = props;
const { avatar, statusBoxData, children, isLastChild, questionGuides = [], chat } = props;
const { isPc } = useSystem();
const styleMap: BoxProps = {
...(type === ChatRoleEnum.Human
...(chat.obj === ChatRoleEnum.Human
? {
order: 0,
borderRadius: '8px 0 8px 8px',
@ -183,57 +180,63 @@ const ChatItem = (props: Props) => {
2. Auto-complete the last textnode
*/
const splitAiResponseResults = useMemo(() => {
if (chat.obj !== ChatRoleEnum.AI) return [chat.value];
if (chat.obj === ChatRoleEnum.Human) return [chat.value];
// Remove empty text node
const filterList = chat.value.filter((item, i) => {
if (item.type === ChatItemValueTypeEnum.text && !item.text?.content?.trim()) {
return false;
}
return item;
});
const groupedValues: AIChatItemValueItemType[][] = [];
let currentGroup: AIChatItemValueItemType[] = [];
filterList.forEach((value) => {
if (value.type === 'interactive') {
if (currentGroup.length > 0) {
groupedValues.push(currentGroup);
currentGroup = [];
if (chat.obj === ChatRoleEnum.AI) {
// Remove empty text node
const filterList = chat.value.filter((item, i) => {
if (item.text && !item.text.content?.trim()) {
return false;
}
if (item.reasoning && !item.reasoning.content?.trim()) {
return false;
}
return item;
});
groupedValues.push([value]);
} else {
currentGroup.push(value);
}
});
const groupedValues: AIChatItemValueItemType[][] = [];
let currentGroup: AIChatItemValueItemType[] = [];
if (currentGroup.length > 0) {
groupedValues.push(currentGroup);
}
// Check last group is interactive, Auto add a empty text node(animation)
const lastGroup = groupedValues[groupedValues.length - 1];
if (isChatting || groupedValues.length === 0) {
if (
(lastGroup &&
lastGroup[lastGroup.length - 1] &&
lastGroup[lastGroup.length - 1].type === ChatItemValueTypeEnum.interactive) ||
groupedValues.length === 0
) {
groupedValues.push([
{
type: ChatItemValueTypeEnum.text,
text: {
content: ''
}
filterList.forEach((value) => {
if (value.interactive) {
if (currentGroup.length > 0) {
groupedValues.push(currentGroup);
currentGroup = [];
}
]);
groupedValues.push([value]);
} else {
currentGroup.push(value);
}
});
if (currentGroup.length > 0) {
groupedValues.push(currentGroup);
}
// Check last group is interactive, Auto add a empty text node(animation)
const lastGroup = groupedValues[groupedValues.length - 1];
if (isChatting || groupedValues.length === 0) {
if (
(lastGroup &&
lastGroup[lastGroup.length - 1] &&
lastGroup[lastGroup.length - 1].interactive) ||
groupedValues.length === 0
) {
groupedValues.push([
{
text: {
content: ''
}
}
]);
}
}
return groupedValues;
}
return groupedValues;
return [];
}, [chat.obj, chat.value, isChatting]);
const setCiteModalData = useContextSelector(ChatItemContext, (v) => v.setCiteModalData);
@ -278,6 +281,8 @@ const ChatItem = (props: Props) => {
}
);
const aiSubApps = 'subApps' in chat ? chat.subApps : undefined;
return (
<Box
_hover={{
@ -288,11 +293,11 @@ const ChatItem = (props: Props) => {
>
{/* control icon */}
<Flex w={'100%'} alignItems={'center'} gap={2} justifyContent={styleMap.justifyContent}>
{isChatting && type === ChatRoleEnum.AI && isLastChild ? null : (
{isChatting && chat.obj === ChatRoleEnum.AI && isLastChild ? null : (
<Flex order={styleMap.order} ml={styleMap.ml} align={'center'} gap={'0.62rem'}>
{chat.time && (isPc || isChatLog) && (
<Box
order={type === ChatRoleEnum.AI ? 2 : 0}
order={chat.obj === ChatRoleEnum.AI ? 2 : 0}
className={'time-label'}
fontSize={styleMap.fontSize}
color={styleMap.color}
@ -307,7 +312,7 @@ const ChatItem = (props: Props) => {
<ChatController {...props} isLastChild={isLastChild} />
</Flex>
)}
<ChatAvatar src={avatar} type={type} />
<ChatAvatar src={avatar} type={chat.obj} />
{/* Workflow status */}
{!!chatStatusMap && statusBoxData && isLastChild && showNodeStatus && (
@ -334,88 +339,91 @@ const ChatItem = (props: Props) => {
)}
</Flex>
{/* content */}
{splitAiResponseResults.map((value, i) => (
<Box
key={i}
mt={['6px', 2]}
className="chat-box-card"
textAlign={styleMap.textAlign}
_hover={{
'& .footer-copy': {
display: 'block'
}
}}
>
<Card
{...MessageCardStyle}
bg={styleMap.bg}
borderRadius={styleMap.borderRadius}
textAlign={'left'}
{splitAiResponseResults.map((value, i) => {
return (
<Box
key={i}
mt={['6px', 2]}
className="chat-box-card"
textAlign={styleMap.textAlign}
_hover={{
'& .footer-copy': {
display: 'block'
}
}}
>
{type === ChatRoleEnum.Human && <HumanContentCard chatValue={value} />}
{type === ChatRoleEnum.AI && (
<>
<AIContentCard
chatValue={value as AIChatItemValueItemType[]}
dataId={chat.dataId}
isLastChild={isLastChild && i === splitAiResponseResults.length - 1}
isChatting={isChatting}
questionGuides={questionGuides}
onOpenCiteModal={onOpenCiteModal}
/>
{i === splitAiResponseResults.length - 1 && (
<ResponseTags
showTags={!isLastChild || !isChatting}
historyItem={chat}
<Card
{...MessageCardStyle}
bg={styleMap.bg}
borderRadius={styleMap.borderRadius}
textAlign={'left'}
>
{chat.obj === ChatRoleEnum.Human && <HumanContentCard chatValue={value} />}
{chat.obj === ChatRoleEnum.AI && (
<>
<AIContentCard
chatValue={value as AIChatItemValueItemType[]}
subAppsValue={chat.subAppsValue}
dataId={chat.dataId}
isLastChild={isLastChild && i === splitAiResponseResults.length - 1}
isChatting={isChatting}
questionGuides={questionGuides}
onOpenCiteModal={onOpenCiteModal}
/>
)}
</>
)}
{/* Example: Response tags. A set of dialogs only needs to be displayed once*/}
{i === splitAiResponseResults.length - 1 && (
<>
{/* error message */}
{!!chat.errorMsg && (
<Box mt={2}>
<ChatBoxDivider icon={'common/errorFill'} text={t('chat:error_message')} />
<Box fontSize={'xs'} color={'myGray.500'}>
{chat.errorMsg}
{i === splitAiResponseResults.length - 1 && (
<ResponseTags
showTags={!isLastChild || !isChatting}
historyItem={chat}
onOpenCiteModal={onOpenCiteModal}
/>
)}
</>
)}
{/* Example: Response tags. A set of dialogs only needs to be displayed once*/}
{i === splitAiResponseResults.length - 1 && (
<>
{/* error message */}
{!!chat.errorMsg && (
<Box mt={2}>
<ChatBoxDivider icon={'common/errorFill'} text={t('chat:error_message')} />
<Box fontSize={'xs'} color={'myGray.500'}>
{chat.errorMsg}
</Box>
</Box>
)}
{children}
</>
)}
{/* 对话框底部的复制按钮 */}
{chat.obj == ChatRoleEnum.AI &&
!('interactive' in value[0]) &&
(!isChatting || (isChatting && !isLastChild)) && (
<Box
className="footer-copy"
display={['block', 'none']}
position={'absolute'}
bottom={0}
right={0}
transform={'translateX(100%)'}
>
<MyTooltip label={t('common:Copy')}>
<MyIcon
w={'1rem'}
cursor="pointer"
p="5px"
bg="white"
name={'copy'}
color={'myGray.500'}
_hover={{ color: 'primary.600' }}
onClick={() => copyData(formatChatValue2InputType(value).text ?? '')}
/>
</MyTooltip>
</Box>
)}
{children}
</>
)}
{/* 对话框底部的复制按钮 */}
{type == ChatRoleEnum.AI &&
value[0]?.type !== 'interactive' &&
(!isChatting || (isChatting && !isLastChild)) && (
<Box
className="footer-copy"
display={['block', 'none']}
position={'absolute'}
bottom={0}
right={0}
transform={'translateX(100%)'}
>
<MyTooltip label={t('common:Copy')}>
<MyIcon
w={'1rem'}
cursor="pointer"
p="5px"
bg="white"
name={'copy'}
color={'myGray.500'}
_hover={{ color: 'primary.600' }}
onClick={() => copyData(formatChatValue2InputType(value).text ?? '')}
/>
</MyTooltip>
</Box>
)}
</Card>
</Box>
))}
</Card>
</Box>
);
})}
</Box>
);
};

View File

@ -3,7 +3,6 @@ import { type ChatItemType } from '@fastgpt/global/core/chat/type';
import { useCallback } from 'react';
import { htmlTemplate } from '@/web/core/chat/constants';
import { fileDownload } from '@/web/common/file/utils';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { useTranslation } from 'next-i18next';
export const useChatBox = () => {
const { t } = useTranslation();
@ -47,13 +46,13 @@ export const useChatBox = () => {
.map((item) => {
let result = `Role: ${item.obj}\n`;
const content = item.value.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
if (item.text) {
return item.text?.content;
} else if (item.type === ChatItemValueTypeEnum.file) {
} else if ('file' in item && item.file) {
return `
![${item.file?.name}](${item.file?.url})
`;
} else if (item.type === ChatItemValueTypeEnum.tool) {
} else if ('tools' in item && item.tools) {
return `
\`\`\`Tool
${JSON.stringify(item.tools, null, 2)}

View File

@ -35,11 +35,7 @@ import ChatInput from './Input/ChatInput';
import ChatBoxDivider from '../../Divider';
import { type OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatStatusEnum
} from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import {
getInteractiveByHistories,
formatChatValue2InputType,
@ -239,159 +235,255 @@ const ChatBox = ({
status,
name,
tool,
subAppId,
interactive,
autoTTSResponse,
variables,
nodeResponse,
durationSeconds
durationSeconds,
autoTTSResponse
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
setChatRecords((state) =>
state.map((item, index) => {
if (index !== state.length - 1) return item;
if (item.obj !== ChatRoleEnum.AI) return item;
autoTTSResponse && splitText2Audio(formatChatValue2InputType(item.value).text || '');
if (subAppId) {
let subAppValue = cloneDeep(item.subAppsValue?.[subAppId]);
if (!subAppValue) {
console.log("Can't find the sub app");
return item;
}
const updateIndex = (() => {
if (!responseValueId) return item.value.length - 1;
const index = item.value.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return item.value.length - 1;
})();
const updateValue: AIChatItemValueItemType = cloneDeep(item.value[updateIndex]);
const updateIndex = (() => {
if (!responseValueId) return subAppValue.length - 1;
const index = subAppValue.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return subAppValue.length - 1;
})();
const updateValue = subAppValue[updateIndex];
if (
event === SseResponseEventEnum.answer ||
event === SseResponseEventEnum.fastAnswer
) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
reasoning: {
content: reasoningText
}
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
text: {
content: text
}
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
}
}
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
id: responseValueId,
tool
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
}
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
updateValue.tool.response += tool.response;
}
return item;
}
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
subAppsValue: {
...item.subAppsValue,
[subAppId]: subAppValue
}
};
} else if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
moduleName: name
};
} else if (reasoningText) {
if (updateValue.type === ChatItemValueTypeEnum.reasoning && updateValue.reasoning) {
updateValue.reasoning.content += reasoningText;
} else {
autoTTSResponse && splitText2Audio(formatChatValue2InputType(item.value).text || '');
const updateIndex = (() => {
if (!responseValueId) return item.value.length - 1;
const index = item.value.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return item.value.length - 1;
})();
const updateValue: AIChatItemValueItemType = cloneDeep(item.value[updateIndex]);
updateValue.id = responseValueId;
console.log(event, tool, updateValue);
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
};
} else {
}
if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
moduleName: name
};
}
if (
event === SseResponseEventEnum.answer ||
event === SseResponseEventEnum.fastAnswer
) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
reasoning: {
content: reasoningText
}
};
return {
...item,
value: [...item.value, val]
};
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const newValue: AIChatItemValueItemType = {
id: responseValueId,
text: {
content: text
}
};
return {
...item,
value: item.value.concat(newValue)
};
}
}
}
// Tool call
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.reasoning,
reasoning: {
content: reasoningText
id: responseValueId,
tool: {
...tool,
response: ''
}
};
return {
...item,
subAppsValue: {
...item.subAppsValue,
[tool.id]: []
},
value: [...item.value, val]
};
}
} else if (
(event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer) &&
text
) {
if (!updateValue || !updateValue.text) {
const newValue: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.text,
text: {
content: text
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
// replace tool response
updateValue.tool.response += tool.response;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.updateVariables && variables) {
resetVariables({ variables });
}
if (event === SseResponseEventEnum.interactive && interactive) {
const val: AIChatItemValueItemType = {
interactive
};
return {
...item,
value: item.value.concat(newValue)
};
} else {
updateValue.text.content += text;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
value: item.value.concat(val)
};
}
if (event === SseResponseEventEnum.workflowDuration && durationSeconds) {
return {
...item,
durationSeconds: item.durationSeconds
? +(item.durationSeconds + durationSeconds).toFixed(2)
: durationSeconds
};
}
} else if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
id: responseValueId,
type: ChatItemValueTypeEnum.tool,
tools: [tool]
};
return {
...item,
value: item.value.concat(val)
};
} else if (
event === SseResponseEventEnum.toolParams &&
tool &&
updateValue.type === ChatItemValueTypeEnum.tool &&
updateValue?.tools
) {
updateValue.tools = updateValue.tools.map((item) => {
if (item.id === tool.id) {
item.params += tool.params;
}
return item;
});
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else if (
event === SseResponseEventEnum.toolResponse &&
tool &&
updateValue.type === ChatItemValueTypeEnum.tool &&
updateValue?.tools
) {
// replace tool response
updateValue.tools = updateValue.tools.map((item) => {
if (item.id === tool.id) {
item.response = item.response ? item.response + tool.response : tool.response;
}
return item;
});
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else if (event === SseResponseEventEnum.updateVariables && variables) {
resetVariables({ variables });
} else if (event === SseResponseEventEnum.interactive) {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.interactive,
interactive
};
return {
...item,
value: item.value.concat(val)
};
} else if (event === SseResponseEventEnum.workflowDuration && durationSeconds) {
return {
...item,
durationSeconds: item.durationSeconds
? +(item.durationSeconds + durationSeconds).toFixed(2)
: durationSeconds
};
}
return item;
@ -524,7 +616,6 @@ const ChatBox = ({
hideInUI,
value: [
...files.map((file) => ({
type: ChatItemValueTypeEnum.file,
file: {
type: file.type,
name: file.name,
@ -536,7 +627,6 @@ const ChatBox = ({
...(text
? [
{
type: ChatItemValueTypeEnum.text,
text: {
content: text
}
@ -551,7 +641,6 @@ const ChatBox = ({
obj: ChatRoleEnum.AI,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: ''
}
@ -1045,7 +1134,6 @@ const ChatBox = ({
<Box py={item.hideInUI ? 0 : 6}>
{item.obj === ChatRoleEnum.Human && !item.hideInUI && (
<ChatItem
type={item.obj}
avatar={userAvatar}
chat={item}
onRetry={retryInput(item.dataId)}
@ -1055,7 +1143,6 @@ const ChatBox = ({
)}
{item.obj === ChatRoleEnum.AI && (
<ChatItem
type={item.obj}
avatar={appAvatar}
chat={item}
isLastChild={index === chatRecords.length - 1}

View File

@ -5,7 +5,7 @@ import {
} from '@fastgpt/global/core/chat/type';
import { type ChatBoxInputType, type UserInputFileItemType } from './type';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { ChatItemValueTypeEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
@ -26,7 +26,7 @@ export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): Chat
const files =
(value
?.map((item) =>
item.type === 'file' && item.file
'file' in item && item.file
? {
id: item.file.url,
type: item.file.type,
@ -57,7 +57,7 @@ export const getInteractiveByHistories = (
if (
lastMessageValue &&
lastMessageValue.type === ChatItemValueTypeEnum.interactive &&
'interactive' in lastMessageValue &&
!!lastMessageValue?.interactive?.params
) {
const finalInteractive = extractDeepestInteractive(lastMessageValue.interactive);
@ -88,13 +88,10 @@ export const setInteractiveResultToHistories = (
if (i !== histories.length - 1) return item;
const value = item.value.map((val, i) => {
if (
i !== item.value.length - 1 ||
val.type !== ChatItemValueTypeEnum.interactive ||
!val.interactive
) {
if (i !== item.value.length - 1) {
return val;
}
if (!('interactive' in val) || !val.interactive) return val;
const finalInteractive = extractDeepestInteractive(val.interactive);
if (finalInteractive.type === 'userSelect') {

View File

@ -9,7 +9,7 @@ import { type FieldValues } from 'react-hook-form';
import { PluginRunBoxTabEnum } from './constants';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { type generatingMessageProps } from '../type';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { useTranslation } from 'next-i18next';
@ -99,7 +99,6 @@ const PluginRunContextProvider = ({
) {
if (!lastValue || !lastValue.text) {
const newValue: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.text,
text: {
content: text
}
@ -117,19 +116,13 @@ const PluginRunContextProvider = ({
}
} else if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.tool,
tools: [tool]
};
return {
...item,
value: item.value.concat(val)
};
} else if (
event === SseResponseEventEnum.toolParams &&
tool &&
lastValue.type === ChatItemValueTypeEnum.tool &&
lastValue?.tools
) {
} else if (event === SseResponseEventEnum.toolParams && tool && lastValue?.tools) {
lastValue.tools = lastValue.tools.map((item) => {
if (item.id === tool.id) {
item.params += tool.params;
@ -145,7 +138,7 @@ const PluginRunContextProvider = ({
return {
...item,
value: item.value.map((val) => {
if (val.type === ChatItemValueTypeEnum.tool && val.tools) {
if (val.tools) {
const tools = val.tools.map((item) =>
item.id === tool.id ? { ...item, response: tool.response } : item
);
@ -205,7 +198,6 @@ const PluginRunContextProvider = ({
obj: ChatRoleEnum.AI,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: ''
}

View File

@ -2,6 +2,7 @@ import type { StreamResponseType } from '@/web/common/api/fetch';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import type {
ChatHistoryItemResType,
AIChatItemValueItemType,
ToolModuleResponseItemType
} from '@fastgpt/global/core/chat/type';
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
@ -11,6 +12,7 @@ import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workf
export type generatingMessageProps = {
event: SseResponseEventEnum;
responseValueId?: string;
subAppId?: string;
text?: string;
reasoningText?: string;
name?: string;

View File

@ -10,11 +10,9 @@ import {
Flex,
HStack
} from '@chakra-ui/react';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import type {
AIChatItemValueItemType,
ToolModuleResponseItemType,
UserChatItemValueItemType
ToolModuleResponseItemType
} from '@fastgpt/global/core/chat/type';
import React, { useState, useCallback, useMemo } from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon';
@ -128,75 +126,91 @@ const RenderText = React.memo(function RenderText({
const RenderTool = React.memo(
function RenderTool({
showAnimation,
tools
tool,
subAppValue,
chatItemDataId,
isChatting,
onOpenCiteModal
}: {
showAnimation: boolean;
tools: ToolModuleResponseItemType[];
tool: ToolModuleResponseItemType;
subAppValue?: AIChatItemValueItemType[];
chatItemDataId: string;
isChatting: boolean;
onOpenCiteModal?: (e?: OnOpenCiteModalProps) => void;
}) {
const { t } = useSafeTranslation();
const [userOnchange, setUserOnchange] = useState(false);
const formatJson = (string: string) => {
try {
return JSON.stringify(JSON.parse(string), null, 2);
} catch (error) {
return string;
}
};
const params = formatJson(tool.params);
const response = formatJson(tool.response);
return (
<Box>
{tools.map((tool) => {
const formatJson = (string: string) => {
try {
return JSON.stringify(JSON.parse(string), null, 2);
} catch (error) {
return string;
}
};
const toolParams = formatJson(tool.params);
const toolResponse = formatJson(tool.response);
return (
<Accordion
key={tool.id}
allowToggle
_notLast={{ mb: 2 }}
index={userOnchange ? undefined : showAnimation ? 0 : undefined}
onChange={() => {
setUserOnchange(true);
}}
>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton {...accordionButtonStyle}>
<Avatar src={tool.toolAvatar} w={'1.25rem'} h={'1.25rem'} borderRadius={'sm'} />
<Box mx={2} fontSize={'sm'} color={'myGray.900'}>
{t(tool.toolName)}
</Box>
{showAnimation && !tool.response && <MyIcon name={'common/loading'} w={'14px'} />}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
px={0}
mt={3}
borderRadius={'md'}
overflow={'hidden'}
maxH={'500px'}
overflowY={'auto'}
>
{toolParams && toolParams !== '{}' && (
<Box mb={3}>
<Markdown
source={`~~~json#Input
${toolParams}`}
/>
</Box>
)}
{toolResponse && (
<Markdown
source={`~~~json#Response
${toolResponse}`}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
);
})}
</Box>
<Accordion
key={tool.id}
allowToggle
index={userOnchange ? undefined : showAnimation ? 0 : undefined}
onChange={() => {
setUserOnchange(true);
}}
>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton {...accordionButtonStyle}>
<Avatar src={tool.toolAvatar} w={'1.25rem'} h={'1.25rem'} borderRadius={'sm'} />
<Box mx={2} fontSize={'sm'} color={'myGray.900'}>
{t(tool.toolName)}
</Box>
{showAnimation && !tool.response && <MyIcon name={'common/loading'} w={'14px'} />}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
px={0}
mt={3}
borderRadius={'md'}
overflow={'hidden'}
maxH={'500px'}
overflowY={'auto'}
>
{params && params !== '{}' && (
<Box mb={3}>
<Markdown
source={`~~~json#Input
${params}`}
/>
</Box>
)}
{response && (
<Markdown
source={`~~~json#Response
${response}`}
/>
)}
{subAppValue && (
<Box bg={'white'} p={2}>
{subAppValue.map((value, index) => (
<AIResponseBox
key={index}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
isLastResponseValue={index === subAppValue.length - 1}
value={value}
/>
))}
</Box>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
);
},
(prevProps, nextProps) => isEqual(prevProps, nextProps)
@ -297,17 +311,19 @@ const RenderPaymentPauseInteractive = React.memo(function RenderPaymentPauseInte
const AIResponseBox = ({
chatItemDataId,
value,
subAppValue,
isLastResponseValue,
isChatting,
onOpenCiteModal
}: {
chatItemDataId: string;
value: UserChatItemValueItemType | AIChatItemValueItemType;
value: AIChatItemValueItemType;
subAppValue?: AIChatItemValueItemType[];
isLastResponseValue: boolean;
isChatting: boolean;
onOpenCiteModal?: (e?: OnOpenCiteModalProps) => void;
}) => {
if (value.type === ChatItemValueTypeEnum.text && value.text) {
if ('text' in value && value.text) {
return (
<RenderText
chatItemDataId={chatItemDataId}
@ -317,7 +333,7 @@ const AIResponseBox = ({
/>
);
}
if (value.type === ChatItemValueTypeEnum.reasoning && value.reasoning) {
if ('reasoning' in value && value.reasoning) {
return (
<RenderResoningContent
isChatting={isChatting}
@ -326,10 +342,19 @@ const AIResponseBox = ({
/>
);
}
if (value.type === ChatItemValueTypeEnum.tool && value.tools) {
return <RenderTool showAnimation={isChatting} tools={value.tools} />;
if ('tool' in value && value.tool) {
return (
<RenderTool
showAnimation={isChatting}
tool={value.tool}
subAppValue={subAppValue}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
);
}
if (value.type === ChatItemValueTypeEnum.interactive && value.interactive) {
if ('interactive' in value && value.interactive) {
const finalInteractive = extractDeepestInteractive(value.interactive);
if (finalInteractive.type === 'userSelect') {
return <RenderUserSelectInteractive interactive={finalInteractive} />;
@ -341,6 +366,22 @@ const AIResponseBox = ({
return <RenderPaymentPauseInteractive interactive={finalInteractive} />;
}
}
// Abandon
if ('tools' in value && value.tools) {
return value.tools.map((tool) => (
<Box key={tool.id} _notLast={{ mb: 2 }}>
<RenderTool
showAnimation={isChatting}
tool={tool}
subAppValue={subAppValue}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
</Box>
));
}
return null;
};
export default React.memo(AIResponseBox);

View File

@ -50,7 +50,7 @@ export const appWorkflow2AgentForm = ({
defaultAppForm.aiSettings.aiChatTopP = inputMap.get(NodeInputKeyEnum.aiChatTopP);
const subApps = inputMap.get(NodeInputKeyEnum.subApps) as FlowNodeTemplateType[];
console.log(subApps);
if (subApps) {
subApps.forEach((subApp) => {
defaultAppForm.selectedTools.push(subApp);

View File

@ -12,7 +12,7 @@ import {
} from '@/components/core/chat/components/Interactive/InteractiveComponents';
import { type UserInputInteractive } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { type ChatItemType, type UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConfirm';
import { WorkflowActionsContext } from '../../../../context/workflowActionsContext';
import { WorkflowDebugContext } from '../../../../context/workflowDebugContext';
@ -102,7 +102,6 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
const updatedQuery: UserChatItemValueItemType[] = [
{
type: ChatItemValueTypeEnum.text,
text: { content: userContent }
}
];
@ -112,7 +111,6 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
obj: ChatRoleEnum.AI,
value: [
{
type: ChatItemValueTypeEnum.interactive,
interactive: {
...interactive,
entryNodeIds: workflowDebugData.entryNodeIds || [],

View File

@ -41,11 +41,8 @@ import { WORKFLOW_MAX_RUN_TIMES } from '@fastgpt/service/core/workflow/constants
import { getWorkflowToolInputsFromStoreNodes } from '@fastgpt/global/core/app/tool/workflowTool/utils';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum
} from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { saveChat, updateInteractiveChat } from '@fastgpt/service/core/chat/saveChat';
import { getLocale } from '@fastgpt/service/common/middle/i18n';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
@ -108,7 +105,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: { content: 'tool test' }
}
]

View File

@ -8,7 +8,7 @@ import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { authChatCrud } from '@/service/support/permission/auth/chat';
import { MongoApp } from '@fastgpt/service/core/app/schema';
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import {
filterPublicNodeResponseData,
removeAIResponseCite
@ -85,7 +85,7 @@ async function handler(
});
if (showNodeStatus === false) {
item.value = item.value.filter((v) => v.type !== ChatItemValueTypeEnum.tool);
item.value = item.value.filter((v) => v.tools);
}
}
});

View File

@ -6,7 +6,7 @@ import { NextAPI } from '@/service/middleware/entry';
import { type ApiRequestProps } from '@fastgpt/service/type/next';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoChatItemResponse } from '@fastgpt/service/core/chat/chatItemResponseSchema';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getS3ChatSource } from '@fastgpt/service/common/s3/sources/chat';
async function handler(req: ApiRequestProps<DeleteChatItemProps>, res: NextApiResponse) {
@ -39,7 +39,7 @@ async function handler(req: ApiRequestProps<DeleteChatItemProps>, res: NextApiRe
if (item?.obj === ChatRoleEnum.Human && delFile) {
const s3ChatSource = getS3ChatSource();
for (const value of item.value) {
if (value.type === ChatItemValueTypeEnum.file && value.file?.key) {
if (value.file?.key) {
await s3ChatSource.deleteChatFileByKey(value.file.key);
}
}

View File

@ -2,11 +2,7 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { getNextTimeByCronStringAndTimezone } from '@fastgpt/global/common/string/time';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum
} from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { type UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import {
@ -48,7 +44,6 @@ export const getScheduleTriggerApp = async () => {
const { nodes, edges, chatConfig } = await retryFn(() => getAppLatestVersion(app._id, app));
const userQuery: UserChatItemValueItemType[] = [
{
type: ChatItemValueTypeEnum.text,
text: {
content: app.scheduledTriggerConfig?.defaultPrompt
}

View File

@ -20,11 +20,7 @@ import {
updateWorkflowToolInputByVariables
} from '@fastgpt/service/core/app/tool/workflowTool/utils';
import { getWorkflowToolInputsFromStoreNodes } from '@fastgpt/global/core/app/tool/workflowTool/utils';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum
} from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import {
getWorkflowEntryNodeIds,
storeEdges2RuntimeEdges,
@ -188,7 +184,6 @@ export const callMcpServerTool = async ({ key, toolName, inputs }: toolCallProps
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: variables.question
}

View File

@ -25,19 +25,27 @@ export type StreamResponseType = {
type ResponseQueueItemType =
| {
responseValueId?: string;
subAppId?: string;
event: SseResponseEventEnum.fastAnswer | SseResponseEventEnum.answer;
text?: string;
reasoningText?: string;
}
| { responseValueId?: string; event: SseResponseEventEnum.interactive; [key: string]: any }
| {
responseValueId?: string;
subAppId?: string;
event: SseResponseEventEnum.interactive;
[key: string]: any;
}
| {
responseValueId?: string;
subAppId?: string;
event:
| SseResponseEventEnum.toolCall
| SseResponseEventEnum.toolParams
| SseResponseEventEnum.toolResponse;
[key: string]: any;
tools: any;
};
class FatalError extends Error {}
export const streamFetch = ({
@ -82,7 +90,7 @@ export const streamFetch = ({
if (abortCtrl.signal.aborted) {
responseQueue.forEach((item) => {
onMessage(item);
if (isAnswerEvent(item.event) && item.text) {
if (isAnswerEvent(item.event) && 'text' in item && item.text) {
responseText += item.text;
}
});
@ -94,7 +102,7 @@ export const streamFetch = ({
for (let i = 0; i < fetchCount; i++) {
const item = responseQueue[i];
onMessage(item);
if (isAnswerEvent(item.event) && item.text) {
if (isAnswerEvent(item.event) && 'text' in item && item.text) {
responseText += item.text;
}
}
@ -183,13 +191,14 @@ export const streamFetch = ({
})();
if (typeof parseJson !== 'object') return;
const { responseValueId, ...rest } = parseJson;
const { responseValueId, subAppId, ...rest } = parseJson;
// console.log(parseJson, event);
if (event === SseResponseEventEnum.answer) {
const reasoningText = rest.choices?.[0]?.delta?.reasoning_content || '';
pushDataToQueue({
responseValueId,
subAppId,
event,
reasoningText
});
@ -198,6 +207,7 @@ export const streamFetch = ({
for (const item of text) {
pushDataToQueue({
responseValueId,
subAppId,
event,
text: item
});
@ -206,6 +216,7 @@ export const streamFetch = ({
const reasoningText = rest.choices?.[0]?.delta?.reasoning_content || '';
pushDataToQueue({
responseValueId,
subAppId,
event,
reasoningText
});
@ -213,6 +224,7 @@ export const streamFetch = ({
const text = rest.choices?.[0]?.delta?.content || '';
pushDataToQueue({
responseValueId,
subAppId,
event,
text
});
@ -223,6 +235,7 @@ export const streamFetch = ({
) {
pushDataToQueue({
responseValueId,
subAppId,
event,
...rest
});
@ -239,6 +252,7 @@ export const streamFetch = ({
} else if (event === SseResponseEventEnum.interactive) {
pushDataToQueue({
responseValueId,
subAppId,
event,
...rest
});

View File

@ -1,5 +1,5 @@
import { describe, expect, it } from 'vitest';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import {
@ -19,7 +19,7 @@ describe('transformPreviewHistories', () => {
const histories: ChatItemType[] = [
{
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: [
{
...mockResponseData,
@ -33,7 +33,7 @@ describe('transformPreviewHistories', () => {
expect(result[0]).toEqual({
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: undefined,
llmModuleAccount: 1,
totalQuoteList: [],
@ -45,7 +45,7 @@ describe('transformPreviewHistories', () => {
const histories: ChatItemType[] = [
{
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: [
{
...mockResponseData,
@ -59,7 +59,7 @@ describe('transformPreviewHistories', () => {
expect(result[0]).toEqual({
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: undefined,
llmModuleAccount: 1,
totalQuoteList: undefined,
@ -72,7 +72,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should return original item if obj is not AI', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.Human,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }]
value: [{ text: { content: 'test response' } }]
};
expect(addStatisticalDataToHistoryItem(item)).toBe(item);
@ -81,7 +81,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should return original item if totalQuoteList is already defined', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
totalQuoteList: []
};
@ -91,7 +91,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should return original item if responseData is undefined', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }]
value: [{ text: { content: 'test response' } }]
};
expect(addStatisticalDataToHistoryItem(item)).toBe(item);
@ -100,7 +100,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should calculate statistics correctly', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: [
{
...mockResponseData,
@ -141,7 +141,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should handle empty arrays and undefined values', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: [
{
...mockResponseData,
@ -163,7 +163,7 @@ describe('addStatisticalDataToHistoryItem', () => {
it('should handle nested plugin and loop details', () => {
const item: ChatItemType = {
obj: ChatRoleEnum.AI,
value: [{ type: ChatItemValueTypeEnum.text, text: { content: 'test response' } }],
value: [{ text: { content: 'test response' } }],
responseData: [
{
...mockResponseData,

View File

@ -313,7 +313,7 @@ describe('valueTypeFormat', () => {
});
import { getHistories } from '@fastgpt/service/core/workflow/dispatch/utils';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
describe('getHistories test', async () => {
@ -322,7 +322,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.System,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好'
}
@ -333,7 +332,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好'
}
@ -344,7 +342,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.AI,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好2'
}
@ -355,7 +352,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好3'
}
@ -366,7 +362,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.AI,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好4'
}
@ -392,7 +387,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好'
}
@ -407,7 +401,6 @@ describe('getHistories test', async () => {
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你好'
}

View File

@ -6,7 +6,6 @@ import {
getWorkflowEntryNodeIds,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
vi.mock(import('@fastgpt/service/common/string/tiktoken'), async (importOriginal) => {
@ -60,7 +59,6 @@ const testWorkflow = async (path: string) => {
variables,
query: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: '你是谁'
}