response ui

This commit is contained in:
archer 2025-11-19 13:23:07 +08:00
parent 3e5208ef4c
commit 73c40a5527
No known key found for this signature in database
GPG Key ID: 4446499B846D4A9E
11 changed files with 297 additions and 290 deletions

View File

@ -87,6 +87,10 @@ export type SystemChatItemType = {
export type AIChatItemValueItemType = {
id?: string;
stepCall?: {
taskId: string;
stepId: string;
};
} & RequireOnlyOne<{
text: {
content: string;
@ -96,9 +100,12 @@ export type AIChatItemValueItemType = {
};
tool: ToolModuleResponseItemType;
interactive: WorkflowInteractiveResponseType;
agentPlan: AgentPlanType;
// Abandon
// Agent
agentPlan: AgentPlanType;
stepTitle: string;
// @deprecated
tools: ToolModuleResponseItemType[];
}>;
export type AIChatItemType = {

View File

@ -15,8 +15,10 @@ export enum SseResponseEventEnum {
flowResponses = 'flowResponses', // sse response request
updateVariables = 'updateVariables',
interactive = 'interactive', // user select
agentPlan = 'agentPlan' // agent plan
interactive = 'interactive',
agentPlan = 'agentPlan', // agent plan
stepCall = 'stepCall' // step call
}
export enum DispatchNodeResponseKeyEnum {

View File

@ -35,6 +35,7 @@ import { stepCall } from './master/call';
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { addLog } from '../../../../../common/system/log';
import { checkTaskComplexity } from './master/taskComplexity';
import { getNanoid } from '@fastgpt/global/common/string/tools';
export type DispatchAgentModuleProps = ModuleDispatchProps<{
[NodeInputKeyEnum.history]?: ChatItemType[];
@ -345,8 +346,9 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
/* ===== Master agent, 逐步执行 plan ===== */
if (!agentPlan) return Promise.reject('没有 plan');
let assistantResponses: AIChatItemValueItemType[] = [];
const assistantResponses: AIChatItemValueItemType[] = [];
const taskId = getNanoid(6);
while (agentPlan.steps!.filter((item) => !item.response)!.length) {
for await (const step of agentPlan?.steps) {
if (step.response) continue;
@ -354,21 +356,27 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
// Temp code
workflowStreamResponse?.({
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: `\n # ${step.id}: ${step.title}\n`
})
});
const tmpAssistantResponses: AIChatItemValueItemType = {
text: {
content: `\n # ${step.id}: ${step.title}\n`
event: SseResponseEventEnum.stepCall,
stepCall: {
taskId,
stepId: step.id
},
data: {
stepTitle: step.title
}
};
assistantResponses.push(tmpAssistantResponses);
});
assistantResponses.push({
stepCall: {
taskId,
stepId: step.id
},
stepTitle: step.title
});
// Step call
const result = await stepCall({
...props,
taskId,
getSubAppInfo,
steps: agentPlan.steps, // 传入所有步骤,而不仅仅是未执行的步骤
subAppList,
@ -387,7 +395,15 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
.flat();
step.response = result.rawResponse;
step.summary = result.summary;
assistantResponses.push(...assistantResponse);
assistantResponses.push(
...assistantResponse.map((item) => ({
...item,
stepCall: {
taskId,
stepId: step.id
}
}))
);
}
// Call replan

View File

@ -29,6 +29,7 @@ import { getStepDependon } from './dependon';
import { getResponseSummary } from './responseSummary';
export const stepCall = async ({
taskId,
getSubAppInfo,
subAppList,
steps,
@ -37,6 +38,7 @@ export const stepCall = async ({
subAppsMap,
...props
}: DispatchAgentModuleProps & {
taskId: string;
getSubAppInfo: GetSubAppInfoFnType;
subAppList: ChatCompletionTool[];
steps: AgentPlanStepType[];
@ -58,6 +60,10 @@ export const stepCall = async ({
usagePush,
params: { userChatInput, systemPrompt, model, temperature, aiChatTopP }
} = props;
const stepCallParams = {
taskId,
stepId: step.id
};
// Get depends on step ids
if (!step.depends_on) {
@ -128,6 +134,7 @@ export const stepCall = async ({
onReasoning({ text }) {
workflowStreamResponse?.({
stepCall: stepCallParams,
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
reasoning_content: text
@ -136,6 +143,7 @@ export const stepCall = async ({
},
onStreaming({ text }) {
workflowStreamResponse?.({
stepCall: stepCallParams,
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
text
@ -146,6 +154,7 @@ export const stepCall = async ({
const subApp = getSubAppInfo(call.function.name);
workflowStreamResponse?.({
id: call.id,
stepCall: stepCallParams,
event: SseResponseEventEnum.toolCall,
data: {
tool: {
@ -161,6 +170,7 @@ export const stepCall = async ({
onToolParam({ tool, params }) {
workflowStreamResponse?.({
id: tool.id,
stepCall: stepCallParams,
event: SseResponseEventEnum.toolParams,
data: {
tool: {
@ -174,8 +184,9 @@ export const stepCall = async ({
handleToolResponse: async ({ call, messages }) => {
const toolId = call.function.name;
const childWorkflowStreamResponse = getWorkflowChildResponseWrite({
subAppId: `${nodeId}/${toolId}`,
id: call.id,
subAppId: `${nodeId}/${toolId}`,
stepCall: stepCallParams,
fn: workflowStreamResponse
});

View File

@ -5,6 +5,13 @@ export type AgentPlanStepType = {
depends_on?: string[];
response?: string;
summary?: string;
status?: 'pending' | 'running' | 'completed';
tools?: {
id: string;
name: string;
avatar?: string;
}[];
log?: string;
};
export type AgentPlanType = {
task: string;

View File

@ -44,6 +44,11 @@ export type DispatchFlowResponse = {
export type WorkflowResponseType = (e: {
id?: string;
subAppId?: string;
stepCall?: {
taskId: string;
stepId: string;
};
event: SseResponseEventEnum;
data: Record<string, any>;
}) => void;

View File

@ -40,7 +40,7 @@ export const getWorkflowResponseWrite = ({
id?: string;
showNodeStatus?: boolean;
}) => {
const fn: WorkflowResponseType = ({ id, subAppId, event, data }) => {
const fn: WorkflowResponseType = ({ id, subAppId, stepCall, event, data }) => {
if (!res || res.closed || !streamResponse) return;
// Forbid show detail
@ -65,6 +65,7 @@ export const getWorkflowResponseWrite = ({
data: JSON.stringify({
...data,
...(subAppId && detail && { subAppId }),
...(stepCall && detail && { stepCall }),
...(id && detail && { responseValueId: id })
})
});
@ -74,15 +75,20 @@ export const getWorkflowResponseWrite = ({
export const getWorkflowChildResponseWrite = ({
id,
subAppId,
stepCall,
fn
}: {
id: string;
subAppId: string;
stepCall: {
taskId: string;
stepId: string;
};
fn?: WorkflowResponseType;
}): WorkflowResponseType | undefined => {
if (!fn) return;
return (e: Parameters<WorkflowResponseType>[0]) => {
return fn({ ...e, id, subAppId });
return fn({ ...e, id, subAppId, stepCall });
};
};

View File

@ -184,11 +184,9 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
1. The interactive node is divided into n dialog boxes.
2. Auto-complete the last textnode
*/
const { responses: splitAiResponseResults } = useMemo(() => {
const splitAiResponseResults = useMemo(() => {
if (chat.obj === ChatRoleEnum.Human) {
return {
responses: [chat.value]
};
return [chat.value];
}
if (chat.obj === ChatRoleEnum.AI) {
@ -205,6 +203,7 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
const groupedValues: AIChatItemValueItemType[][] = [];
let currentGroup: AIChatItemValueItemType[] = [];
let currentTaskGroup: AIChatItemValueItemType[] = [];
filterList.forEach((value) => {
// 每次遇到交互节点,则推送一个全新的分组
@ -246,14 +245,10 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
}
}
return {
responses: groupedValues
};
return groupedValues;
}
return {
responses: []
};
return [];
}, [chat.obj, chat.value, isChatting]);
const setCiteModalData = useContextSelector(ChatItemContext, (v) => v.setCiteModalData);

View File

@ -241,15 +241,18 @@ const ChatBox = ({
const generatingMessage = useMemoizedFn(
({
responseValueId,
subAppId,
stepCall,
event,
text = '',
reasoningText,
status,
name,
tool,
subAppId,
interactive,
agentPlan,
stepTitle,
variables,
nodeResponse,
durationSeconds,
@ -260,251 +263,177 @@ const ChatBox = ({
if (index !== state.length - 1) return item;
if (item.obj !== ChatRoleEnum.AI) return item;
if (subAppId) {
let subAppValue = cloneDeep(item.subAppsValue?.[subAppId]);
if (!subAppValue) {
console.log("Can't find the sub app");
return item;
}
if (autoTTSResponse) {
splitText2Audio(formatChatValue2InputType(item.value).text || '');
}
const updateIndex = (() => {
if (!responseValueId) return subAppValue.length - 1;
const index = subAppValue.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return subAppValue.length - 1;
})();
const updateValue = subAppValue[updateIndex];
const updateIndex = (() => {
if (!responseValueId) return item.value.length - 1;
const index = item.value.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return item.value.length - 1;
})();
const updateValue: AIChatItemValueItemType = cloneDeep(item.value[updateIndex]);
updateValue.id = responseValueId;
if (
event === SseResponseEventEnum.answer ||
event === SseResponseEventEnum.fastAnswer
) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
reasoning: {
content: reasoningText
}
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
text: {
content: text
}
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
};
}
if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
moduleName: name
};
}
if (event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
reasoning: {
content: reasoningText
}
};
return {
...item,
value: [...item.value, val]
};
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const newValue: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
text: {
content: text
}
};
return {
...item,
value: item.value.concat(newValue)
};
}
}
}
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
id: responseValueId,
tool
// Tool call
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
tool: {
...tool,
response: ''
}
};
return {
...item,
value: [...item.value, val]
};
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
subAppValue = [
...subAppValue.slice(0, updateIndex),
val,
...subAppValue.slice(updateIndex + 1)
];
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
}
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
updateValue.tool.response += tool.response;
}
return item;
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
// replace tool response
updateValue.tool.response += tool.response;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.updateVariables && variables) {
resetVariables({ variables });
}
if (event === SseResponseEventEnum.interactive && interactive) {
const val: AIChatItemValueItemType = {
interactive
};
return {
...item,
subAppsValue: {
...item.subAppsValue,
[subAppId]: subAppValue
}
stepCall,
value: item.value.concat(val)
};
} else {
autoTTSResponse && splitText2Audio(formatChatValue2InputType(item.value).text || '');
}
const updateIndex = (() => {
if (!responseValueId) return item.value.length - 1;
const index = item.value.findIndex((item) => item.id === responseValueId);
if (index !== -1) return index;
return item.value.length - 1;
})();
const updateValue: AIChatItemValueItemType = cloneDeep(item.value[updateIndex]);
updateValue.id = responseValueId;
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
...item,
responseData: item.responseData
? [...item.responseData, nodeResponse]
: [nodeResponse]
};
}
if (event === SseResponseEventEnum.flowNodeStatus && status) {
return {
...item,
status,
moduleName: name
};
}
if (
event === SseResponseEventEnum.answer ||
event === SseResponseEventEnum.fastAnswer
) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
reasoning: {
content: reasoningText
}
};
return {
...item,
value: [...item.value, val]
};
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const newValue: AIChatItemValueItemType = {
id: responseValueId,
text: {
content: text
}
};
return {
...item,
value: item.value.concat(newValue)
};
}
}
}
// Tool call
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
// Agent
if (event === SseResponseEventEnum.agentPlan && agentPlan) {
return {
...item,
value: item.value.concat({
id: responseValueId,
tool: {
...tool,
response: ''
}
};
return {
...item,
subAppsValue: {
...item.subAppsValue,
[tool.id]: []
},
value: [...item.value, val]
};
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
// replace tool response
updateValue.tool.response += tool.response;
stepCall,
agentPlan
})
};
}
if (event === SseResponseEventEnum.stepCall && stepTitle) {
return {
...item,
value: item.value.concat({
id: responseValueId,
stepCall,
stepTitle
})
};
}
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.updateVariables && variables) {
resetVariables({ variables });
}
if (event === SseResponseEventEnum.interactive && interactive) {
const val: AIChatItemValueItemType = {
interactive
};
return {
...item,
value: item.value.concat(val)
};
}
if (event === SseResponseEventEnum.agentPlan && agentPlan) {
return {
...item,
value: item.value.concat({
agentPlan
})
};
}
if (event === SseResponseEventEnum.workflowDuration && durationSeconds) {
return {
...item,
durationSeconds: item.durationSeconds
? +(item.durationSeconds + durationSeconds).toFixed(2)
: durationSeconds
};
}
if (event === SseResponseEventEnum.workflowDuration && durationSeconds) {
return {
...item,
durationSeconds: item.durationSeconds
? +(item.durationSeconds + durationSeconds).toFixed(2)
: durationSeconds
};
}
return item;

View File

@ -14,6 +14,7 @@ export type generatingMessageProps = {
event: SseResponseEventEnum;
responseValueId?: string;
subAppId?: string;
text?: string;
reasoningText?: string;
name?: string;
@ -21,6 +22,11 @@ export type generatingMessageProps = {
tool?: ToolModuleResponseItemType;
interactive?: WorkflowInteractiveResponseType;
agentPlan?: AgentPlanType;
stepCall?: {
taskId: string;
stepId: string;
};
stepTitle?: string;
variables?: Record<string, any>;
nodeResponse?: ChatHistoryItemResType;
durationSeconds?: number;

View File

@ -13,6 +13,7 @@ import { getWebReqUrl } from '@fastgpt/web/common/system/utils';
import type { OnOptimizePromptProps } from '@/components/common/PromptEditor/OptimizerPopover';
import type { OnOptimizeCodeProps } from '@/pageComponents/app/detail/WorkflowComponents/Flow/nodes/NodeCode/Copilot';
import type { AgentPlanType } from '@fastgpt/service/core/workflow/dispatch/ai/agent/sub/plan/type';
import { AIChatItemValueItemType } from '@fastgpt/global/core/chat/type';
type StreamFetchProps = {
url?: string;
@ -23,35 +24,42 @@ type StreamFetchProps = {
export type StreamResponseType = {
responseText: string;
};
type ResponseQueueItemType =
| {
responseValueId?: string;
subAppId?: string;
event: SseResponseEventEnum.fastAnswer | SseResponseEventEnum.answer;
text?: string;
reasoningText?: string;
}
| {
responseValueId?: string;
subAppId?: string;
event: SseResponseEventEnum.interactive;
[key: string]: any;
}
| {
responseValueId?: string;
subAppId?: string;
event: SseResponseEventEnum.agentPlan;
agentPlan: AgentPlanType;
}
| {
responseValueId?: string;
subAppId?: string;
event:
| SseResponseEventEnum.toolCall
| SseResponseEventEnum.toolParams
| SseResponseEventEnum.toolResponse;
tools: any;
};
type CommonResponseType = {
responseValueId?: string;
subAppId?: string;
stepCall?: {
taskId: string;
stepId: string;
};
};
type ResponseQueueItemType = CommonResponseType &
(
| {
event: SseResponseEventEnum.fastAnswer | SseResponseEventEnum.answer;
text?: string;
reasoningText?: string;
}
| {
event: SseResponseEventEnum.interactive;
[key: string]: any;
}
| {
event: SseResponseEventEnum.agentPlan;
agentPlan: AgentPlanType;
}
| {
event: SseResponseEventEnum.stepCall;
stepTitle: string;
}
| {
event:
| SseResponseEventEnum.toolCall
| SseResponseEventEnum.toolParams
| SseResponseEventEnum.toolResponse;
tools: any;
}
);
class FatalError extends Error {}
@ -198,7 +206,7 @@ export const streamFetch = ({
})();
if (typeof parseJson !== 'object') return;
const { responseValueId, subAppId, ...rest } = parseJson;
const { responseValueId, subAppId, stepCall, ...rest } = parseJson;
// console.log(parseJson, event);
if (event === SseResponseEventEnum.answer) {
@ -206,6 +214,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
reasoningText
});
@ -215,6 +224,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
text: item
});
@ -224,6 +234,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
reasoningText
});
@ -232,6 +243,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
text
});
@ -243,6 +255,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
...rest
});
@ -260,6 +273,7 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
...rest
});
@ -267,9 +281,18 @@ export const streamFetch = ({
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
agentPlan: rest.agentPlan
});
} else if (event === SseResponseEventEnum.stepCall) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
stepTitle: rest.stepTitle
});
} else if (event === SseResponseEventEnum.error) {
if (rest.statusText === TeamErrEnum.aiPointsNotEnough) {
useSystemStore.getState().setNotSufficientModalType(TeamErrEnum.aiPointsNotEnough);