perf: chat ui

This commit is contained in:
archer 2025-12-01 17:29:33 +08:00
parent 2986a540a3
commit 6281e2c949
No known key found for this signature in database
GPG Key ID: 4446499B846D4A9E
12 changed files with 117 additions and 207 deletions

View File

@ -15,7 +15,6 @@ import type { FlowNodeInputItemType } from '../workflow/type/io';
import type { FlowNodeTemplateType } from '../workflow/type/node.d';
import { ChatCompletionMessageParam } from '../ai/type';
import type { RequireOnlyOne } from '../../common/type/utils';
import type { AgentPlanType } from '../../../service/core/workflow/dispatch/ai/agent/sub/plan/type';
/* --------- chat ---------- */
export type ChatSchemaType = {
@ -87,10 +86,7 @@ export type SystemChatItemType = {
export type AIChatItemValueItemType = {
id?: string;
stepCall?: {
taskId: string;
stepId: string;
};
stepId?: string;
} & RequireOnlyOne<{
text: {
content: string;
@ -102,8 +98,16 @@ export type AIChatItemValueItemType = {
interactive: WorkflowInteractiveResponseType;
// Agent
agentPlan: AgentPlanType;
stepTitle: string;
agentPlan: {
replan?: boolean;
steps: {
id: string;
title: string;
description: string;
status: 'pending' | 'running' | 'completed';
value: AIChatItemValueItemType[];
}[];
};
// @deprecated
tools: ToolModuleResponseItemType[];
@ -111,7 +115,6 @@ export type AIChatItemValueItemType = {
export type AIChatItemType = {
obj: ChatRoleEnum.AI;
value: AIChatItemValueItemType[];
subAppsValue?: Record<string, AIChatItemValueItemType[]>;
memories?: Record<string, any>;
userGoodFeedback?: string;
userBadFeedback?: string;

View File

@ -17,8 +17,7 @@ export enum SseResponseEventEnum {
interactive = 'interactive',
agentPlan = 'agentPlan', // agent plan
stepCall = 'stepCall' // step call
agentPlan = 'agentPlan' // agent plan
}
export enum DispatchNodeResponseKeyEnum {

View File

@ -200,7 +200,18 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
...(plan
? [
{
agentPlan: plan
agentPlan: {
replan: false,
steps: plan.steps.map((step) => {
return {
id: step.id,
title: step.title,
description: step.description,
status: 'pending' as const,
value: []
};
})
}
}
]
: [])
@ -284,7 +295,18 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
...(rePlan
? [
{
agentPlan: plan
agentPlan: {
replan: false,
steps: plan.steps.map((step) => {
return {
id: step.id,
title: step.title,
description: step.description,
status: 'pending' as const,
value: []
};
})
}
}
]
: [])
@ -348,35 +370,14 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
const assistantResponses: AIChatItemValueItemType[] = [];
const taskId = getNanoid(6);
while (agentPlan.steps!.filter((item) => !item.response)!.length) {
for await (const step of agentPlan?.steps) {
if (step.response) continue;
addLog.debug(`Step call: ${step.id}`, step);
// Temp code
workflowStreamResponse?.({
event: SseResponseEventEnum.stepCall,
stepCall: {
taskId,
stepId: step.id
},
data: {
stepTitle: step.title
}
});
assistantResponses.push({
stepCall: {
taskId,
stepId: step.id
},
stepTitle: step.title
});
// Step call
const result = await stepCall({
...props,
taskId,
getSubAppInfo,
steps: agentPlan.steps, // 传入所有步骤,而不仅仅是未执行的步骤
subAppList,
@ -398,10 +399,7 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
assistantResponses.push(
...assistantResponse.map((item) => ({
...item,
stepCall: {
taskId,
stepId: step.id
}
stepId: step.id
}))
);
}

View File

@ -29,7 +29,6 @@ import { getStepDependon } from './dependon';
import { getResponseSummary } from './responseSummary';
export const stepCall = async ({
taskId,
getSubAppInfo,
subAppList,
steps,
@ -38,7 +37,6 @@ export const stepCall = async ({
subAppsMap,
...props
}: DispatchAgentModuleProps & {
taskId: string;
getSubAppInfo: GetSubAppInfoFnType;
subAppList: ChatCompletionTool[];
steps: AgentPlanStepType[];
@ -60,10 +58,6 @@ export const stepCall = async ({
usagePush,
params: { userChatInput, systemPrompt, model, temperature, aiChatTopP }
} = props;
const stepCallParams = {
taskId,
stepId: step.id
};
// Get depends on step ids
if (!step.depends_on) {
@ -134,7 +128,7 @@ export const stepCall = async ({
onReasoning({ text }) {
workflowStreamResponse?.({
stepCall: stepCallParams,
stepId: step.id,
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
reasoning_content: text
@ -143,7 +137,7 @@ export const stepCall = async ({
},
onStreaming({ text }) {
workflowStreamResponse?.({
stepCall: stepCallParams,
stepId: step.id,
event: SseResponseEventEnum.answer,
data: textAdaptGptResponse({
text
@ -154,7 +148,7 @@ export const stepCall = async ({
const subApp = getSubAppInfo(call.function.name);
workflowStreamResponse?.({
id: call.id,
stepCall: stepCallParams,
stepId: step.id,
event: SseResponseEventEnum.toolCall,
data: {
tool: {
@ -170,7 +164,7 @@ export const stepCall = async ({
onToolParam({ tool, params }) {
workflowStreamResponse?.({
id: tool.id,
stepCall: stepCallParams,
stepId: step.id,
event: SseResponseEventEnum.toolParams,
data: {
tool: {
@ -185,8 +179,7 @@ export const stepCall = async ({
const toolId = call.function.name;
const childWorkflowStreamResponse = getWorkflowChildResponseWrite({
id: call.id,
subAppId: `${nodeId}/${toolId}`,
stepCall: stepCallParams,
stepId: step.id,
fn: workflowStreamResponse
});

View File

@ -5,13 +5,6 @@ export type AgentPlanStepType = {
depends_on?: string[];
response?: string;
summary?: string;
status?: 'pending' | 'running' | 'completed';
tools?: {
id: string;
name: string;
avatar?: string;
}[];
log?: string;
};
export type AgentPlanType = {
task: string;

View File

@ -43,11 +43,7 @@ export type DispatchFlowResponse = {
export type WorkflowResponseType = (e: {
id?: string;
subAppId?: string;
stepCall?: {
taskId: string;
stepId: string;
};
stepId?: string;
event: SseResponseEventEnum;
data: Record<string, any>;

View File

@ -40,7 +40,7 @@ export const getWorkflowResponseWrite = ({
id?: string;
showNodeStatus?: boolean;
}) => {
const fn: WorkflowResponseType = ({ id, subAppId, stepCall, event, data }) => {
const fn: WorkflowResponseType = ({ id, stepId, event, data }) => {
if (!res || res.closed || !streamResponse) return;
// Forbid show detail
@ -64,8 +64,7 @@ export const getWorkflowResponseWrite = ({
event: detail ? event : undefined,
data: JSON.stringify({
...data,
...(subAppId && detail && { subAppId }),
...(stepCall && detail && { stepCall }),
...(stepId && detail && { stepId }),
...(id && detail && { responseValueId: id })
})
});
@ -74,21 +73,16 @@ export const getWorkflowResponseWrite = ({
};
export const getWorkflowChildResponseWrite = ({
id,
subAppId,
stepCall,
stepId,
fn
}: {
id: string;
subAppId: string;
stepCall: {
taskId: string;
stepId: string;
};
stepId: string;
fn?: WorkflowResponseType;
}): WorkflowResponseType | undefined => {
if (!fn) return;
return (e: Parameters<WorkflowResponseType>[0]) => {
return fn({ ...e, id, subAppId, stepCall });
return fn({ ...e, id, stepId });
};
};

View File

@ -86,7 +86,6 @@ const HumanContentCard = React.memo(
);
const AIContentCard = React.memo(function AIContentCard({
chatValue,
subAppsValue = {},
dataId,
isLastChild,
isChatting,
@ -95,7 +94,6 @@ const AIContentCard = React.memo(function AIContentCard({
}: {
dataId: string;
chatValue: AIChatItemValueItemType[];
subAppsValue?: AIChatItemType['subAppsValue'];
isLastChild: boolean;
isChatting: boolean;
questionGuides: string[];
@ -104,14 +102,13 @@ const AIContentCard = React.memo(function AIContentCard({
return (
<Flex flexDirection={'column'} gap={2}>
{chatValue.map((value, i) => {
const key = value.id || `${dataId}-ai-${i}`;
const key = `${dataId}-ai-${i}`;
return (
<AIResponseBox
chatItemDataId={dataId}
key={key}
value={value}
subAppValue={value.tool ? subAppsValue[value.tool.id] : undefined}
isLastResponseValue={isLastChild && i === chatValue.length - 1}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
@ -190,24 +187,18 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
}
if (chat.obj === ChatRoleEnum.AI) {
// Remove empty text node
const filterList = chat.value.filter((item, i) => {
if (item.text && !item.text.content?.trim()) {
return false;
}
if (item.reasoning && !item.reasoning.content?.trim()) {
return false;
}
return item;
});
const groupedValues: AIChatItemValueItemType[][] = [];
let currentGroup: AIChatItemValueItemType[] = [];
let currentTaskGroup: AIChatItemValueItemType[] = [];
chat.value.forEach((value) => {
if (value.text && !value.text.content?.trim()) {
return false;
}
if (value.reasoning && !value.reasoning.content?.trim()) {
return false;
}
filterList.forEach((value) => {
// 每次遇到交互节点,则推送一个全新的分组
if (value.interactive) {
// 每次遇到交互节点,则推送一个全新的分组
if (value.interactive.type === 'agentPlanCheck') {
return;
}
@ -250,7 +241,7 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
return [];
}, [chat.obj, chat.value, isChatting]);
console.log(chat.value, splitAiResponseResults, 232);
const setCiteModalData = useContextSelector(ChatItemContext, (v) => v.setCiteModalData);
const onOpenCiteModal = useMemoizedFn(
(item?: {
@ -410,7 +401,6 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
<>
<AIContentCard
chatValue={value as AIChatItemValueItemType[]}
subAppsValue={chat.subAppsValue}
dataId={chat.dataId}
isLastChild={isLastChild && i === splitAiResponseResults.length - 1}
isChatting={isChatting}

View File

@ -241,8 +241,7 @@ const ChatBox = ({
const generatingMessage = useMemoizedFn(
({
responseValueId,
subAppId,
stepCall,
stepId,
event,
text = '',
@ -252,7 +251,6 @@ const ChatBox = ({
tool,
interactive,
agentPlan,
stepTitle,
variables,
nodeResponse,
durationSeconds,
@ -273,8 +271,8 @@ const ChatBox = ({
if (index !== -1) return index;
return item.value.length - 1;
})();
const updateValue: AIChatItemValueItemType = cloneDeep(item.value[updateIndex]);
updateValue.id = responseValueId;
const updateValue: AIChatItemValueItemType = item.value[updateIndex];
updateValue.stepId = stepId;
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
return {
@ -306,7 +304,7 @@ const ChatBox = ({
} else {
const val: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
stepId,
reasoning: {
content: reasoningText
}
@ -331,7 +329,7 @@ const ChatBox = ({
} else {
const newValue: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
stepId,
text: {
content: text
}
@ -348,7 +346,7 @@ const ChatBox = ({
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
id: responseValueId,
stepCall,
stepId,
tool: {
...tool,
response: ''
@ -400,7 +398,7 @@ const ChatBox = ({
return {
...item,
stepCall,
stepId,
value: item.value.concat(val)
};
}
@ -411,21 +409,11 @@ const ChatBox = ({
...item,
value: item.value.concat({
id: responseValueId,
stepCall,
stepId,
agentPlan
})
};
}
if (event === SseResponseEventEnum.stepCall && stepTitle) {
return {
...item,
value: item.value.concat({
id: responseValueId,
stepCall,
stepTitle
})
};
}
if (event === SseResponseEventEnum.workflowDuration && durationSeconds) {
return {

View File

@ -2,18 +2,16 @@ import type { StreamResponseType } from '@/web/common/api/fetch';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import type {
ChatHistoryItemResType,
AIChatItemValueItemType,
ToolModuleResponseItemType
type AIChatItemValueItemType,
type ToolModuleResponseItemType
} from '@fastgpt/global/core/chat/type';
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import type { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import type { AgentPlanType } from '@fastgpt/service/core/workflow/dispatch/ai/agent/sub/plan/type';
export type generatingMessageProps = {
event: SseResponseEventEnum;
responseValueId?: string;
subAppId?: string;
stepId?: string;
text?: string;
reasoningText?: string;
@ -21,12 +19,7 @@ export type generatingMessageProps = {
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;
interactive?: WorkflowInteractiveResponseType;
agentPlan?: AgentPlanType;
stepCall?: {
taskId: string;
stepId: string;
};
stepTitle?: string;
agentPlan?: AIChatItemValueItemType['agentPlan'];
variables?: Record<string, any>;
nodeResponse?: ChatHistoryItemResType;
durationSeconds?: number;

View File

@ -128,20 +128,10 @@ const RenderText = React.memo(function RenderText({
const RenderTool = React.memo(
function RenderTool({
showAnimation,
tool,
subAppValue,
chatItemDataId,
isChatting,
onOpenCiteModal
tool
}: {
showAnimation: boolean;
tool: ToolModuleResponseItemType;
subAppValue?: AIChatItemValueItemType[];
chatItemDataId: string;
isChatting: boolean;
onOpenCiteModal?: (e?: OnOpenCiteModalProps) => void;
}) {
const { t } = useSafeTranslation();
const formatJson = (string: string) => {
@ -188,20 +178,6 @@ ${params}`}
${response}`}
/>
)}
{subAppValue && subAppValue.length > 0 && (
<Box bg={'white'} p={2}>
{subAppValue.map((value, index) => (
<AIResponseBox
key={index}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
isLastResponseValue={index === subAppValue.length - 1}
value={value}
/>
))}
</Box>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
@ -332,15 +308,21 @@ const RenderPaymentPauseInteractive = React.memo(function RenderPaymentPauseInte
});
const RenderAgentPlan = React.memo(function RenderAgentPlan({
agentPlan
agentPlan,
chatItemDataId,
isChatting,
onOpenCiteModal
}: {
agentPlan: AgentPlanType;
agentPlan: NonNullable<AIChatItemValueItemType['agentPlan']>;
chatItemDataId: string;
isChatting: boolean;
onOpenCiteModal?: (e?: OnOpenCiteModalProps) => void;
}) {
const { t } = useTranslation();
return (
<Box>
<Box fontSize={'xl'} color={'myGray.900'} fontWeight={'bold'}>
{agentPlan.task}
</Box>
<Box>
{agentPlan.steps.map((step, index) => (
@ -349,6 +331,22 @@ const RenderAgentPlan = React.memo(function RenderAgentPlan({
{`${index + 1}. ${step.title}`}
</Box>
<Box>{step.description}</Box>
{/* <Flex flexDirection={'column'}>
{step.value.map((value, i) => {
const key = `${step.id}-ai-${i}`;
return (
<AIResponseBox
chatItemDataId={chatItemDataId}
key={key}
value={value}
isLastResponseValue={index === step.value.length - 1}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
);
})}
</Flex> */}
</Box>
))}
</Box>
@ -361,14 +359,12 @@ const RenderAgentPlan = React.memo(function RenderAgentPlan({
const AIResponseBox = ({
chatItemDataId,
value,
subAppValue,
isLastResponseValue,
isChatting,
onOpenCiteModal
}: {
chatItemDataId: string;
value: AIChatItemValueItemType;
subAppValue?: AIChatItemValueItemType[];
isLastResponseValue: boolean;
isChatting: boolean;
onOpenCiteModal?: (e?: OnOpenCiteModalProps) => void;
@ -393,16 +389,7 @@ const AIResponseBox = ({
);
}
if ('tool' in value && value.tool) {
return (
<RenderTool
showAnimation={isChatting}
tool={value.tool}
subAppValue={subAppValue}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
);
return <RenderTool showAnimation={isChatting} tool={value.tool} />;
}
if ('interactive' in value && value.interactive) {
const interactive = extractDeepestInteractive(value.interactive);
@ -425,21 +412,21 @@ const AIResponseBox = ({
}
}
if ('agentPlan' in value && value.agentPlan) {
return <RenderAgentPlan agentPlan={value.agentPlan} />;
return (
<RenderAgentPlan
agentPlan={value.agentPlan}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
);
}
// Abandon
if ('tools' in value && value.tools) {
return value.tools.map((tool) => (
<Box key={tool.id} _notLast={{ mb: 2 }}>
<RenderTool
showAnimation={isChatting}
tool={tool}
subAppValue={subAppValue}
chatItemDataId={chatItemDataId}
isChatting={isChatting}
onOpenCiteModal={onOpenCiteModal}
/>
<RenderTool showAnimation={isChatting} tool={tool} />
</Box>
));
}

View File

@ -12,8 +12,7 @@ import { formatTime2YMDHMW } from '@fastgpt/global/common/string/time';
import { getWebReqUrl } from '@fastgpt/web/common/system/utils';
import type { OnOptimizePromptProps } from '@/components/common/PromptEditor/OptimizerPopover';
import type { OnOptimizeCodeProps } from '@/pageComponents/app/detail/WorkflowComponents/Flow/nodes/NodeCode/Copilot';
import type { AgentPlanType } from '@fastgpt/service/core/workflow/dispatch/ai/agent/sub/plan/type';
import { AIChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import type { AIChatItemValueItemType } from '@fastgpt/global/core/chat/type';
type StreamFetchProps = {
url?: string;
@ -27,11 +26,7 @@ export type StreamResponseType = {
type CommonResponseType = {
responseValueId?: string;
subAppId?: string;
stepCall?: {
taskId: string;
stepId: string;
};
stepId?: string;
};
type ResponseQueueItemType = CommonResponseType &
(
@ -46,11 +41,7 @@ type ResponseQueueItemType = CommonResponseType &
}
| {
event: SseResponseEventEnum.agentPlan;
agentPlan: AgentPlanType;
}
| {
event: SseResponseEventEnum.stepCall;
stepTitle: string;
agentPlan: AIChatItemValueItemType['agentPlan'];
}
| {
event:
@ -206,15 +197,14 @@ export const streamFetch = ({
})();
if (typeof parseJson !== 'object') return;
const { responseValueId, subAppId, stepCall, ...rest } = parseJson;
const { responseValueId, stepId, ...rest } = parseJson;
// console.log(parseJson, event);
if (event === SseResponseEventEnum.answer) {
const reasoningText = rest.choices?.[0]?.delta?.reasoning_content || '';
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
reasoningText
});
@ -223,8 +213,7 @@ export const streamFetch = ({
for (const item of text) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
text: item
});
@ -233,8 +222,7 @@ export const streamFetch = ({
const reasoningText = rest.choices?.[0]?.delta?.reasoning_content || '';
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
reasoningText
});
@ -242,8 +230,7 @@ export const streamFetch = ({
const text = rest.choices?.[0]?.delta?.content || '';
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
text
});
@ -254,8 +241,7 @@ export const streamFetch = ({
) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
...rest
});
@ -272,27 +258,17 @@ export const streamFetch = ({
} else if (event === SseResponseEventEnum.interactive) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
...rest
});
} else if (event === SseResponseEventEnum.agentPlan) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
stepId,
event,
agentPlan: rest.agentPlan
});
} else if (event === SseResponseEventEnum.stepCall) {
pushDataToQueue({
responseValueId,
subAppId,
stepCall,
event,
stepTitle: rest.stepTitle
});
} else if (event === SseResponseEventEnum.error) {
if (rest.statusText === TeamErrEnum.aiPointsNotEnough) {
useSystemStore.getState().setNotSufficientModalType(TeamErrEnum.aiPointsNotEnough);