This commit is contained in:
archer 2025-12-04 15:37:22 +08:00
parent 9ed3a350a4
commit 17fade23d9
No known key found for this signature in database
GPG Key ID: 4446499B846D4A9E
62 changed files with 2989 additions and 130 deletions

View File

@ -35,7 +35,7 @@
"scope": "typescriptreact",
"prefix": "context",
"body": [
"import React, { ReactNode } from 'react';",
"import React, { type ReactNode } from 'react';",
"import { createContext } from 'use-context-selector';",
"",
"type ContextType = {$1};",

View File

@ -4,9 +4,10 @@ import type {
ChatItemValueItemType,
RuntimeUserPromptType,
SystemChatItemValueItemType,
UserChatItemFileItemType,
UserChatItemType,
UserChatItemValueItemType
} from '../../core/chat/type.d';
} from './type';
import { ChatFileTypeEnum, ChatRoleEnum } from '../../core/chat/constants';
import type {
ChatCompletionContentPart,
@ -18,7 +19,7 @@ import type {
} from '../../core/ai/type.d';
import { ChatCompletionRequestMessageRoleEnum } from '../../core/ai/constants';
const GPT2Chat = {
export const GPT2Chat = {
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
@ -385,9 +386,10 @@ export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): Runtime
return prompt;
};
export const runtimePrompt2ChatsValue = (
prompt: RuntimeUserPromptType
): UserChatItemType['value'] => {
export const runtimePrompt2ChatsValue = (prompt: {
files?: UserChatItemFileItemType[];
text?: string;
}): UserChatItemType['value'] => {
const value: UserChatItemType['value'] = [];
if (prompt.files) {
prompt.files.forEach((file) => {

View File

@ -0,0 +1,132 @@
import { ChatCompletionRequestMessageRoleEnum } from 'core/ai/constants';
import type {
ChatCompletionContentPart,
ChatCompletionFunctionMessageParam,
ChatCompletionMessageFunctionCall,
ChatCompletionMessageParam,
ChatCompletionMessageToolCall,
ChatCompletionToolMessageParam
} from '../../ai/type';
import { ChatFileTypeEnum, ChatRoleEnum } from '../constants';
import type { HelperBotChatItemType } from './type';
import { GPT2Chat, simpleUserContentPart } from '../adapt';
import type {
AIChatItemValueItemType,
SystemChatItemValueItemType,
UserChatItemValueItemType
} from '../type';
export const helperChats2GPTMessages = ({
messages,
reserveTool = false
}: {
messages: HelperBotChatItemType[];
reserveTool?: boolean;
}): ChatCompletionMessageParam[] => {
let results: ChatCompletionMessageParam[] = [];
messages.forEach((item) => {
if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else if (item.obj === ChatRoleEnum.Human) {
const value = item.value
.map((item) => {
if (item.text) {
return {
type: 'text',
text: item.text?.content || ''
};
}
if (item.file) {
if (item.file?.type === ChatFileTypeEnum.image) {
return {
type: 'image_url',
key: item.file.key,
image_url: {
url: item.file.url
}
};
} else if (item.file?.type === ChatFileTypeEnum.file) {
return {
type: 'file_url',
name: item.file?.name || '',
url: item.file.url,
key: item.file.key
};
}
}
})
.filter(Boolean) as ChatCompletionContentPart[];
results.push({
role: ChatCompletionRequestMessageRoleEnum.User,
content: simpleUserContentPart(value)
});
} else {
const aiResults: ChatCompletionMessageParam[] = [];
//AI: 只需要把根节点转化即可
item.value.forEach((value, i) => {
if ('tool' in value && reserveTool) {
const tool_calls: ChatCompletionMessageToolCall[] = [
{
id: value.tool.id,
type: 'function',
function: {
name: value.tool.functionName,
arguments: value.tool.params
}
}
];
const toolResponse: ChatCompletionToolMessageParam[] = [
{
tool_call_id: value.tool.id,
role: ChatCompletionRequestMessageRoleEnum.Tool,
content: value.tool.response
}
];
aiResults.push({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls
});
aiResults.push(...toolResponse);
} else if ('text' in value && value.text?.content === 'string') {
if (!value.text.content && item.value.length > 1) {
return;
}
// Concat text
const lastValue = item.value[i - 1];
const lastResult = aiResults[aiResults.length - 1];
if (lastValue && typeof lastResult?.content === 'string') {
lastResult.content += value.text.content;
} else {
aiResults.push({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: value.text.content
});
}
}
});
// Auto add empty assistant message
results = results.concat(
aiResults.length > 0
? aiResults
: [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: ''
}
]
);
}
});
return results;
};

View File

@ -0,0 +1,85 @@
import { ObjectIdSchema } from '../../../common/type/mongo';
import { z } from 'zod';
import { ChatRoleEnum } from '../constants';
import {
UserChatItemSchema,
SystemChatItemSchema,
type ChatItemObjItemType,
type ChatItemValueItemType,
ToolModuleResponseItemSchema
} from '../type';
export enum HelperBotTypeEnum {
topAgent = 'topAgent'
}
export const HelperBotTypeEnumSchema = z.enum(Object.values(HelperBotTypeEnum));
export type HelperBotTypeEnumType = z.infer<typeof HelperBotTypeEnumSchema>;
export const HelperBotChatSchema = z.object({
_id: ObjectIdSchema,
chatId: z.string(),
type: HelperBotTypeEnum,
userId: z.string(),
createTime: z.date(),
updateTime: z.date()
});
export type HelperBotChatType = z.infer<typeof HelperBotChatSchema>;
// AI schema
const AIChatItemValueItemSchema = z.union([
z.object({
text: z.object({
content: z.string()
})
}),
z.object({
reasoning: z.object({
content: z.string()
})
}),
z.object({
tool: ToolModuleResponseItemSchema
})
]);
const AIChatItemSchema = z.object({
obj: z.literal(ChatRoleEnum.AI),
value: z.array(AIChatItemValueItemSchema)
});
const HelperBotChatRoleSchema = z.union([
UserChatItemSchema,
SystemChatItemSchema,
AIChatItemSchema
]);
export const HelperBotChatItemSchema = z
.object({
_id: ObjectIdSchema,
userId: z.string(),
chatId: z.string(),
dataId: z.string(),
createTime: z.date(),
memories: z.record(z.string(), z.any()).nullish()
})
.and(HelperBotChatRoleSchema);
export type HelperBotChatItemType = z.infer<typeof HelperBotChatItemSchema>;
/* 客户端 UI 展示的类型 */
export const HelperBotChatItemSiteSchema = z
.object({
_id: ObjectIdSchema,
dataId: z.string(),
createTime: z.date()
})
.and(HelperBotChatRoleSchema);
export type HelperBotChatItemSiteType = z.infer<typeof HelperBotChatItemSiteSchema>;
/* 具体的 bot 的特有参数 */
export const topAgentParamsSchema = z.object({
role: z.string().nullish(),
taskObject: z.string().nullish(),
selectedTools: z.array(z.string()).nullish(),
selectedDatasets: z.array(z.string()).nullish(),
fileUpload: z.boolean().nullish()
});
export type TopAgentParamsType = z.infer<typeof topAgentParamsSchema>;

View File

@ -1,20 +1,29 @@
import { ClassifyQuestionAgentItemType } from '../workflow/template/system/classifyQuestion/type';
import type { SearchDataResponseItemType } from '../dataset/type';
import type { ChatFileTypeEnum, ChatRoleEnum, ChatSourceEnum, ChatStatusEnum } from './constants';
import type { ChatSourceEnum, ChatStatusEnum } from './constants';
import { ChatFileTypeEnum, ChatRoleEnum } from './constants';
import type { FlowNodeTypeEnum } from '../workflow/node/constant';
import type { NodeInputKeyEnum, NodeOutputKeyEnum } from '../workflow/constants';
import { NodeOutputKeyEnum } from '../workflow/constants';
import type { DispatchNodeResponseKeyEnum } from '../workflow/runtime/constants';
import type { AppSchema, VariableItemType } from '../app/type';
import { AppChatConfigType } from '../app/type';
import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d';
import { DatasetSearchModeEnum } from '../dataset/constants';
import type { DispatchNodeResponseType } from '../workflow/runtime/type.d';
import type { DispatchNodeResponseType } from '../workflow/runtime/type';
import type { ChatBoxInputType } from '../../../../projects/app/src/components/core/chat/ChatContainer/ChatBox/type';
import type { WorkflowInteractiveResponseType } from '../workflow/template/system/interactive/type';
import type { FlowNodeInputItemType } from '../workflow/type/io';
import type { FlowNodeTemplateType } from '../workflow/type/node.d';
import { ChatCompletionMessageParam } from '../ai/type';
import type { RequireOnlyOne } from '../../common/type/utils';
import z from 'zod';
/* One tool run response */
export type ToolRunResponseItemType = any;
/* tool module response */
export const ToolModuleResponseItemSchema = z.object({
id: z.string(),
toolName: z.string(),
toolAvatar: z.string(),
params: z.string(),
response: z.string(),
functionName: z.string()
});
export type ToolModuleResponseItemType = z.infer<typeof ToolModuleResponseItemSchema>;
/* --------- chat ---------- */
export type ChatSchemaType = {
@ -47,33 +56,57 @@ export type ChatWithAppSchema = Omit<ChatSchemaType, 'appId'> & {
};
/* --------- chat item ---------- */
export type UserChatItemFileItemType = {
type: `${ChatFileTypeEnum}`;
name?: string;
key?: string;
url: string;
};
export type UserChatItemValueItemType = {
text?: {
content: string;
};
file?: UserChatItemFileItemType;
};
export type UserChatItemType = {
obj: ChatRoleEnum.Human;
value: UserChatItemValueItemType[];
hideInUI?: boolean;
};
// User
export const UserChatItemFileItemSchema = z.object({
type: z.enum(Object.values(ChatFileTypeEnum)),
name: z.string().optional(),
key: z.string().optional(),
url: z.string()
});
export type UserChatItemFileItemType = z.infer<typeof UserChatItemFileItemSchema>;
export type SystemChatItemValueItemType = {
text?: {
content: string;
};
};
export type SystemChatItemType = {
obj: ChatRoleEnum.System;
value: SystemChatItemValueItemType[];
};
export const UserChatItemValueItemSchema = z.object({
text: z
.object({
content: z.string()
})
.optional(),
file: UserChatItemFileItemSchema.optional()
});
export type UserChatItemValueItemType = z.infer<typeof UserChatItemValueItemSchema>;
export const UserChatItemSchema = z.object({
obj: z.literal(ChatRoleEnum.Human),
value: z.array(UserChatItemValueItemSchema),
hideInUI: z.boolean().optional()
});
export type UserChatItemType = z.infer<typeof UserChatItemSchema>;
// System
export const SystemChatItemValueItemSchema = z.object({
text: z
.object({
content: z.string()
})
.nullish()
});
export type SystemChatItemValueItemType = z.infer<typeof SystemChatItemValueItemSchema>;
export const SystemChatItemSchema = z.object({
obj: z.literal(ChatRoleEnum.System),
value: z.array(SystemChatItemValueItemSchema)
});
export type SystemChatItemType = z.infer<typeof SystemChatItemSchema>;
// AI
export const AdminFbkSchema = z.object({
feedbackDataId: z.string(),
datasetId: z.string(),
collectionId: z.string(),
q: z.string(),
a: z.string().optional()
});
export type AdminFbkType = z.infer<typeof AdminFbkSchema>;
export type AIChatItemValueItemType = {
id?: string;
@ -136,14 +169,6 @@ export type ChatItemSchema = ChatItemObjItemType & {
time: Date;
};
export type AdminFbkType = {
feedbackDataId: string;
datasetId: string;
collectionId: string;
q: string;
a?: string;
};
export type ResponseTagItemType = {
totalQuoteList?: SearchDataResponseItemType[];
llmModuleAccount?: number;
@ -180,8 +205,8 @@ export type ChatItemResponseSchemaType = {
/* --------- team chat --------- */
export type ChatAppListSchema = {
apps: AppType[];
teamInfo: teamInfoSchema;
apps: AppSchema[];
teamInfo: any;
uid?: string;
};
@ -206,30 +231,22 @@ export type ChatHistoryItemResType = DispatchNodeResponseType & {
};
/* ---------- node outputs ------------ */
export type NodeOutputItemType = {
nodeId: string;
key: NodeOutputKeyEnum;
value: any;
};
export const NodeOutputItemSchema = z.object({
nodeId: z.string(),
key: z.enum(Object.values(NodeOutputKeyEnum)),
value: z.any()
});
export type NodeOutputItemType = z.infer<typeof NodeOutputItemSchema>;
/* One tool run response */
export type ToolRunResponseItemType = any;
/* tool module response */
export type ToolModuleResponseItemType = {
id: string;
toolName: string; // tool name
toolAvatar: string;
params: string; // tool params
response: string;
functionName: string;
};
export const ToolCiteLinksSchema = z.object({
name: z.string(),
url: z.string()
});
export type ToolCiteLinksType = z.infer<typeof ToolCiteLinksSchema>;
export type ToolCiteLinksType = {
name: string;
url: string;
};
/* dispatch run time */
export type RuntimeUserPromptType = {
files: UserChatItemValueItemType['file'][];
text: string;
};
export const RuntimeUserPromptSchema = z.object({
files: z.array(UserChatItemFileItemSchema),
text: z.string()
});
export type RuntimeUserPromptType = z.infer<typeof RuntimeUserPromptSchema>;

View File

@ -6,7 +6,7 @@ import {
type ChatHistoryItemResType,
type ChatItemType,
type UserChatItemValueItemType
} from './type.d';
} from './type';
import { sliceStrStartEnd } from '../../common/string/tools';
import { PublishChannelEnum } from '../../support/outLink/constant';
import { removeDatasetCiteText } from '../ai/llm/utils';

View File

@ -0,0 +1,62 @@
import { PaginationPropsSchema, PaginationResponseSchema } from '../../../type';
import {
type HelperBotChatItemSiteType,
HelperBotTypeEnumSchema,
topAgentParamsSchema
} from '../../../../core/chat/helperBot/type';
import { z } from 'zod';
import type { PaginationResponse } from '../../../../../web/common/fetch/type';
// 分页获取记录
export const GetHelperBotChatRecordsParamsSchema = z
.object({
type: HelperBotTypeEnumSchema,
chatId: z.string()
})
.and(PaginationPropsSchema);
export type GetHelperBotChatRecordsParamsType = z.infer<typeof GetHelperBotChatRecordsParamsSchema>;
export type GetHelperBotChatRecordsResponseType = PaginationResponse<HelperBotChatItemSiteType>;
// 删除单组对话
export const DeleteHelperBotChatParamsSchema = z.object({
type: HelperBotTypeEnumSchema,
chatId: z.string(),
chatItemId: z.string()
});
export type DeleteHelperBotChatParamsType = z.infer<typeof DeleteHelperBotChatParamsSchema>;
// 获取文件上传签名
export const GetHelperBotFilePresignParamsSchema = z.object({
type: HelperBotTypeEnumSchema,
chatId: z.string(),
filename: z.string()
});
export type GetHelperBotFilePresignParamsType = z.infer<typeof GetHelperBotFilePresignParamsSchema>;
// 获取文件预览链接
export const GetHelperBotFilePreviewParamsSchema = z.object({
key: z.string().min(1)
});
export type GetHelperBotFilePreviewParamsType = z.infer<typeof GetHelperBotFilePreviewParamsSchema>;
export const GetHelperBotFilePreviewResponseSchema = z.string();
export const HelperBotCompletionsParamsSchema = z.object({
chatId: z.string(),
chatItemId: z.string(),
query: z.string(),
files: z.array(
z.object({
type: z.enum(['image', 'file']),
key: z.string(),
url: z.string().optional(),
name: z.string()
})
),
metadata: z.discriminatedUnion('type', [
z.object({
type: z.literal('topAgent'),
data: topAgentParamsSchema
})
])
});
export type HelperBotCompletionsParamsType = z.infer<typeof HelperBotCompletionsParamsSchema>;

View File

@ -0,0 +1,79 @@
import { z } from 'zod';
import type { OpenAPIPath } from '../../../type';
import {
DeleteHelperBotChatParamsSchema,
GetHelperBotChatRecordsParamsSchema,
HelperBotCompletionsParamsSchema
} from './api';
import { TagsMap } from '../../../tag';
export const HelperBotPath: OpenAPIPath = {
'/core/chat/helperBot/getRecords': {
get: {
summary: '分页获取记录',
description: '分页获取记录',
tags: [TagsMap.helperBot],
requestParams: {
query: GetHelperBotChatRecordsParamsSchema
},
responses: {
200: {
description: '成功返回记录列表',
content: {
'application/json': {
schema: z.array(z.any())
}
}
}
}
}
},
'/core/chat/helperBot/deleteRecord': {
delete: {
summary: '删除单组对话',
description: '删除单组对话',
tags: [TagsMap.helperBot],
requestBody: {
content: {
'application/json': {
schema: DeleteHelperBotChatParamsSchema
}
}
},
responses: {
200: {
description: '成功删除记录',
content: {
'application/json': {
schema: z.any()
}
}
}
}
}
},
'/core/chat/helperBot/completions': {
post: {
summary: '辅助助手对话接口',
description: '辅助助手对话接口',
tags: [TagsMap.helperBot],
requestBody: {
content: {
'application/json': {
schema: HelperBotCompletionsParamsSchema
}
}
},
responses: {
200: {
description: '成功返回处理结果',
content: {
'application/stream+json': {
schema: z.any()
}
}
}
}
}
}
};

View File

@ -5,11 +5,12 @@ import { z } from 'zod';
import { CreatePostPresignedUrlResultSchema } from '../../../../service/common/s3/type';
import { PresignChatFileGetUrlSchema, PresignChatFilePostUrlSchema } from '../../../core/chat/api';
import { TagsMap } from '../../tag';
import { HelperBotPath } from './helperBot';
export const ChatPath: OpenAPIPath = {
...ChatSettingPath,
...ChatFavouriteAppPath,
...HelperBotPath,
'/core/chat/presignChatFileGetUrl': {
post: {
summary: '获取对话文件预签名 URL',

View File

@ -33,6 +33,10 @@ export const openAPIDocument = createDocument({
{
name: 'ApiKey',
tags: [TagsMap.apiKey]
},
{
name: '系统接口',
tags: [TagsMap.helperBot]
}
]
});

View File

@ -6,5 +6,6 @@ export const TagsMap = {
pluginAdmin: '管理员插件管理',
pluginToolAdmin: '管理员系统工具管理',
pluginTeam: '团队插件管理',
apiKey: 'APIKey'
apiKey: 'APIKey',
helperBot: '辅助助手'
};

View File

@ -27,3 +27,26 @@ export const formatSuccessResponse = <T>(data: T) => {
data
});
};
export const PaginationPropsSchema = z
.object({
pageSize: z.union([z.number(), z.string()]),
// offset 和 pageNum 只能传其一
offset: z.union([z.number(), z.string()]).optional(),
pageNum: z.union([z.number(), z.string()]).optional()
})
.refine(
(data) => (typeof data.offset !== 'undefined') !== (typeof data.pageNum !== 'undefined'),
{ message: 'offset 和 pageNum 必须且只能传一个' }
);
export type PaginationPropsType = z.infer<typeof PaginationPropsSchema>;
export const PaginationResponseSchema = <T extends z.ZodType>(item: T) =>
z.object({
total: z.number(),
list: z.array(item)
});
export type PaginationResponseType<T = any> = {
total: number;
list: T[];
};

View File

@ -1,4 +1,4 @@
import type { HistoryItemType } from '../../core/chat/type.d';
import type { HistoryItemType } from '../../core/chat/type';
import type { OutLinkSchema } from './type.d';
export type AuthOutLinkInitProps = {

View File

@ -0,0 +1,121 @@
import { parseFileExtensionFromUrl } from '@fastgpt/global/common/string/tools';
import { S3PrivateBucket } from '../../buckets/private';
import { S3Sources } from '../../type';
import {
type CheckHelperBotFileKeys,
type DelChatFileByPrefixParams,
DelChatFileByPrefixSchema,
HelperBotFileUploadSchema
} from './type';
import { differenceInHours } from 'date-fns';
import { S3Buckets } from '../../constants';
import path from 'path';
import { getFileS3Key } from '../../utils';
export class S3HelperBotSource {
private bucket: S3PrivateBucket;
private static instance: S3HelperBotSource;
constructor() {
this.bucket = new S3PrivateBucket();
}
static getInstance() {
return (this.instance ??= new S3HelperBotSource());
}
static parseFileUrl(url: string | URL) {
try {
const parseUrl = new URL(url);
const pathname = decodeURIComponent(parseUrl.pathname);
// 非 S3 key
if (!pathname.startsWith(`/${S3Buckets.private}/${S3Sources.helperBot}/`)) {
return {
filename: '',
extension: '',
imageParsePrefix: ''
};
}
const filename = pathname.split('/').pop() || 'file';
const extension = path.extname(filename);
return {
filename,
extension: extension.replace('.', ''),
imageParsePrefix: `${pathname.replace(`/${S3Buckets.private}/`, '').replace(extension, '')}-parsed`
};
} catch (error) {
return {
filename: '',
extension: '',
imageParsePrefix: ''
};
}
}
parseKey(key: string) {
const [type, chatId, userId, filename] = key.split('/');
return { type, chatId, userId, filename };
}
// 获取文件流
getFileStream(key: string) {
return this.bucket.getObject(key);
}
// 获取文件状态
getFileStat(key: string) {
return this.bucket.statObject(key);
}
// 获取文件元数据
async getFileMetadata(key: string) {
const stat = await this.getFileStat(key);
if (!stat) return { filename: '', extension: '', contentLength: 0, contentType: '' };
const contentLength = stat.size;
const filename: string = decodeURIComponent(stat.metaData['origin-filename']);
const extension = parseFileExtensionFromUrl(filename);
const contentType: string = stat.metaData['content-type'];
return {
filename,
extension,
contentType,
contentLength
};
}
async createGetFileURL(params: { key: string; expiredHours?: number; external: boolean }) {
const { key, expiredHours = 1, external = false } = params; // 默认一个小时
if (external) {
return await this.bucket.createExternalUrl({ key, expiredHours });
}
return await this.bucket.createPreviewUrl({ key, expiredHours });
}
async createUploadFileURL(params: CheckHelperBotFileKeys) {
const { type, chatId, userId, filename, expiredTime } = HelperBotFileUploadSchema.parse(params);
const { fileKey } = getFileS3Key.helperBot({ type, chatId, userId, filename });
return await this.bucket.createPostPresignedUrl(
{ rawKey: fileKey, filename },
{ expiredHours: expiredTime ? differenceInHours(new Date(), expiredTime) : 24 }
);
}
deleteFilesByPrefix(params: DelChatFileByPrefixParams) {
const { type, chatId, userId } = DelChatFileByPrefixSchema.parse(params);
const prefix = [S3Sources.helperBot, type, userId, chatId].filter(Boolean).join('/');
return this.bucket.addDeleteJob({ prefix });
}
deleteFileByKey(key: string) {
return this.bucket.addDeleteJob({ key });
}
}
export function getS3HelperBotSource() {
return S3HelperBotSource.getInstance();
}

View File

@ -0,0 +1,18 @@
import { z } from 'zod';
import { HelperBotTypeEnumSchema } from '@fastgpt/global/core/chat/helperBot/type';
export const HelperBotFileUploadSchema = z.object({
type: HelperBotTypeEnumSchema,
chatId: z.string().nonempty(),
userId: z.string().nonempty(),
filename: z.string().nonempty(),
expiredTime: z.date().optional()
});
export type CheckHelperBotFileKeys = z.infer<typeof HelperBotFileUploadSchema>;
export const DelChatFileByPrefixSchema = z.object({
type: HelperBotTypeEnumSchema,
chatId: z.string().nonempty().optional(),
userId: z.string().nonempty().optional()
});
export type DelChatFileByPrefixParams = z.infer<typeof DelChatFileByPrefixSchema>;

View File

@ -17,7 +17,7 @@ export type ExtensionType = keyof typeof Mimes;
export type S3OptionsType = typeof defaultS3Options;
export const S3SourcesSchema = z.enum(['avatar', 'chat', 'dataset', 'temp']);
export const S3SourcesSchema = z.enum(['avatar', 'chat', 'dataset', 'temp', 'helperBot']);
export const S3Sources = S3SourcesSchema.enum;
export type S3SourceType = z.infer<typeof S3SourcesSchema>;

View File

@ -11,6 +11,7 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import path from 'node:path';
import type { ParsedFileContentS3KeyParams } from './sources/dataset/type';
import { EndpointUrl } from '@fastgpt/global/common/file/constants';
import type { HelperBotTypeEnumType } from '@fastgpt/global/core/chat/helperBot/type';
// S3文件名最大长度配置
export const S3_FILENAME_MAX_LENGTH = 50;
@ -194,6 +195,25 @@ export const getFileS3Key = {
};
},
helperBot: ({
type,
chatId,
userId,
filename
}: {
type: HelperBotTypeEnumType;
chatId: string;
userId: string;
filename: string;
}) => {
const { formatedFilename, extension } = getFormatedFilename(filename);
const basePrefix = [S3Sources.helperBot, type, userId, chatId].filter(Boolean).join('/');
return {
fileKey: [basePrefix, `${formatedFilename}${extension ? `.${extension}` : ''}`].join('/'),
fileParsedPrefix: [basePrefix, `${formatedFilename}-parsed`].join('/')
};
},
// 上传数据集的文件的解析结果的图片的 Key
dataset: (params: ParsedFileContentS3KeyParams) => {
const { datasetId, filename } = params;

View File

@ -0,0 +1,42 @@
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema } = connectionMongo;
import { helperBotChatItemCollectionName } from './constants';
import { HelperBotTypeEnum } from '@fastgpt/global/core/chat/helperBot/type';
import type { HelperBotChatItemType } from '@fastgpt/global/core/chat/helperBot/type';
const HelperBotChatItemSchema = new Schema({
userId: {
type: String,
require: true
},
chatId: {
type: String,
require: true
},
dataId: {
type: String,
require: true
},
createTime: {
type: Date,
default: () => new Date()
},
obj: {
type: String,
require: true,
enum: Object.values(ChatRoleEnum)
},
value: {
type: Array,
require: true
},
memories: Object
});
HelperBotChatItemSchema.index({ userId: 1, chatId: 1, dataId: 1, obj: 1 }, { unique: true });
export const MongoHelperBotChatItem = getMongoModel<HelperBotChatItemType>(
helperBotChatItemCollectionName,
HelperBotChatItemSchema
);

View File

@ -0,0 +1,36 @@
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema } = connectionMongo;
import { helperBotChatCollectionName } from './constants';
import { HelperBotTypeEnum } from '@fastgpt/global/core/chat/helperBot/type';
import type { HelperBotChatType } from '../../../../global/core/chat/helperBot/type';
const HelperBotChatSchema = new Schema({
type: {
type: String,
required: true,
enum: Object.values(HelperBotTypeEnum)
},
userId: {
type: String,
require: true
},
chatId: {
type: String,
require: true
},
createTime: {
type: Date,
default: () => new Date()
},
updateTime: {
type: Date,
default: () => new Date()
}
});
HelperBotChatSchema.index({ type: 1, userId: 1, chatId: 1 }, { unique: true });
export const MongoHelperBotChat = getMongoModel<HelperBotChatType>(
helperBotChatCollectionName,
HelperBotChatSchema
);

View File

@ -0,0 +1,2 @@
export const helperBotChatCollectionName = 'helper_bot_chats';
export const helperBotChatItemCollectionName = 'helper_bot_chat_items';

View File

@ -0,0 +1,6 @@
import { HelperBotTypeEnum } from '@fastgpt/global/core/chat/helperBot/type';
import { dispatchTopAgent } from './topAgent';
export const dispatchMap = {
[HelperBotTypeEnum.topAgent]: dispatchTopAgent
};

View File

@ -0,0 +1,10 @@
import type { HelperBotDispatchParamsType } from '../type';
import { helperChats2GPTMessages } from '@fastgpt/global/core/chat/helperBot/adaptor';
export const dispatchTopAgent = async (props: HelperBotDispatchParamsType) => {
const { query, files, metadata, histories } = props;
const messages = helperChats2GPTMessages({
messages: histories,
reserveTool: false
});
};

View File

@ -0,0 +1,413 @@
export const getPrompt = ({ resourceList }: { resourceList: string }) => {
return `<!-- 流程搭建模板设计系统 -->
<role>
********Agent执行流程模板
****
****
-
-
-
-
</role>
<mission>
****
1.
2.
3.
4. 100%
****
- 使
-
-
</mission>
<info_collection_phase>
****
****
1. ****
-
-
-
-
2. ****
- ****
* 使
*
* 使
- ****
*
*
*
- ****
*
*
*
3. ****
- 使
-
-
4. ****
-
-
- ///
5. ****
-
-
-
****
- ****
- ****
- ****
- ****
- ****
****
1. /type字段
2.
3.
4.
****
**使JSON格式**
JSON
{
"reasoning": "为什么问这个问题的推理过程:基于什么考虑、希望收集什么信息、对后续有什么帮助",
"question": "实际向用户提出的问题内容"
}
{
"reasoning": "需要首先了解任务的基本定位和目标场景,这将决定后续需要确认的工具类型和能力边界",
"question": "我想了解一下您希望这个流程模板实现什么功能?能否详细描述一下具体要处理什么样的任务或问题?"
}
{
"reasoning": "需要确认参数化设计的重点方向,这将影响流程模板的灵活性设计",
"question": "关于流程的参数化设计,用户最需要调整的是:\\nA. 输入数据源(不同类型的数据库/文件)\\nB. 处理参数(阈值、过滤条件、算法选择)\\nC. 输出格式(报告类型、文件格式、目标系统)\\nD. 执行环境(触发方式、频率、并发度)\\n\\n请选择最符合的选项或输入您的详细回答"
}
1. 3-4
2. "其他"
3. 便
4. 使
- ///
- ///
- ///
- ///
- ///
-
-
- 使
</info_collection_phase>
<capability_boundary_enforcement>
****
****
1. ****使
2. ****
3. ****
4. ****
****
-
-
- 使
-
****
- 使
-
-
-
****
</capability_boundary_enforcement>
<plan_generation_phase>
<resource_definitions>
****
** (Tools)**
-
- API
-
-
** (Knowledges)**
-
-
-
-
** (System Features)**
-
-
-
-
****
- = "做事情"
- = "查信息"
- = "改变模式"
****
-
-
- /
- 使
</resource_definitions>
****
"""
${resourceList}
"""
****
1. JSON格式输出
2. **** -
3.
4. **使** -
**🚨 使**
****
1. "## 可用资源列表"
2. ID后面都有标签[] []
3. type
- [] "type": "tool"
- [] "type": "knowledge"
****
- 使[{"id": "...", "type": "..."}]
- ID必须完全匹配列表中的ID
- 使["...", "..."]
- type
****
1. ID
2. [] "type": "tool"
3. [] "type": "knowledge"
4. id type
****
-
- 使"数据库工具"ID
- "可能"
-
- [] type: "tool"
- type
-
****
🔍
-
-
-
📋
-
-
-
🎯
-
-
-
* file_upload
* file_upload
🔧
-
-
- ID都在可用列表中
- resources
****
**JSON**
JSON
{
"task_analysis": {
"goal": "任务的核心目标描述",
"role": "该流程的角色信息",
"key_features": "收集到的信息,对任务的深度理解和定位"
},
"reasoning": "详细说明所有资源的选择理由:工具、知识库和系统功能如何协同工作来完成任务目标",
"resources": {
"tools": [
{"id": "工具ID", "type": "tool"}
],
"knowledges": [
{"id": "知识库ID", "type": "knowledge"}
],
"system_features": {
"file_upload": {
"enabled": true/false,
"purpose": "说明原因enabled=true时必填",
"file_types": ["可选的文件类型"]
}
}
}
}
****
- task_analysis: 提供对任务的深度理解和角色定义
- reasoning: 说明所有资源++
- resources: 资源配置对象
* tools: 工具数组 id type"tool"
* knowledges: 知识库数组 id type"knowledge"
* system_features: 系统功能配置对象
- file_upload.enabled: 是否需要文件上传
- file_upload.purpose: 为什么需要enabled=true时必填
- file_upload.file_types: 建议的文件类型["pdf", "xlsx"]
** 1**
{
"task_analysis": {
"goal": "分析用户的财务报表数据",
"role": "财务数据分析专家"
},
"reasoning": "使用数据分析工具处理Excel数据需要用户上传自己的财务报表文件",
"resources": {
"tools": [
{"id": "data_analysis/tool", "type": "tool"}
],
"knowledges": [],
"system_features": {
"file_upload": {
"enabled": true,
"purpose": "需要您上传财务报表文件Excel或PDF格式进行数据提取和分析",
"file_types": ["xlsx", "xls", "pdf"]
}
}
}
}
** 2**
{
"reasoning": "使用搜索工具获取实时信息,结合知识库的专业知识",
"resources": {
"tools": [
{"id": "metaso/metasoSearch", "type": "tool"}
],
"knowledges": [
{"id": "travel_kb", "type": "knowledge"}
],
"system_features": {
"file_upload": {
"enabled": false
}
}
}
}
** 1**使
{
"tools": [...] // ❌ 错误:应该使用 resources.tools
}
** 2**system_features
{
"resources": {
"system_features": {
"file_upload": {
"enabled": true
// ❌ 错误:启用时缺少 purpose 字段
}
}
}
}
****
- 使 \`\`\`json 或其他代码块标记
- 使 tools 使 resources
-
- 使 resources toolsknowledgessystem_features
- file_upload.enabled=true purpose
- knowledges tools
- JSON内容
1.
2.
3. resources
4. resources
5. tools knowledges
6. type准确性type为"tool"type为"knowledge"
7. file_upload.enabled=true时必须提供purpose字段
8. JSON
</plan_generation_phase>
<phase_transition>
****
-
- 6
-
-
****
****
\`\`\`
\`\`\`
"基于我们刚才的交流,我已经收集到足够的信息来为您制定执行计划。现在让我根据您的需求和实际情况,生成详细的任务分解方案。"
****
\`\`\`
...
\`\`\`
</phase_transition>
<conversation_rules>
****
- 1-2
- JSON格式的执行计划
****
- "直接生成流程"
-
- 3-4
****
-
-
-
-
</conversation_rules>`;
};

View File

@ -0,0 +1,16 @@
import { z } from 'zod';
import { HelperBotCompletionsParamsSchema } from '../../../../../global/openapi/core/chat/helperBot/api';
import { HelperBotChatItemSchema } from '@fastgpt/global/core/chat/helperBot/type';
import { WorkflowResponseFnSchema } from '../../../workflow/dispatch/type';
export const HelperBotDispatchParamsSchema = z.object({
query: z.string(),
files: HelperBotCompletionsParamsSchema.shape.files,
metadata: HelperBotCompletionsParamsSchema.shape.metadata,
histories: z.array(HelperBotChatItemSchema),
workflowResponseWrite: WorkflowResponseFnSchema
});
export type HelperBotDispatchParamsType = z.infer<typeof HelperBotDispatchParamsSchema>;
export const HelperBotDispatchResponseSchema = z.object({});
export type HelperBotDispatchResponseType = z.infer<typeof HelperBotDispatchResponseSchema>;

View File

@ -1,6 +1,6 @@
import { connectionMongo, getMongoModel } from '../../common/mongo';
const { Schema } = connectionMongo;
import { type ChatSchemaType } from '@fastgpt/global/core/chat/type.d';
import { type ChatSchemaType } from '@fastgpt/global/core/chat/type';
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import {
TeamCollectionName,

View File

@ -2,7 +2,7 @@ import type {
AIChatItemType,
ChatHistoryItemResType,
UserChatItemType
} from '@fastgpt/global/core/chat/type.d';
} from '@fastgpt/global/core/chat/type';
import { MongoApp } from '../app/schema';
import type { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';

View File

@ -1,5 +1,5 @@
/* Abandoned */
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
import { type SelectAppItemType } from '@fastgpt/global/core/workflow/template/system/abandoned/runApp/type';
import { runWorkflow } from '../index';

View File

@ -1,5 +1,5 @@
import { filterGPTMessageByMaxContext } from '../../../ai/llm/utils';
import type { ChatItemType, UserChatItemValueItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType, UserChatItemFileItemType } from '@fastgpt/global/core/chat/type';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
@ -319,7 +319,7 @@ async function getMultiInput({
runningUserInfo
}: {
histories: ChatItemType[];
inputFiles: UserChatItemValueItemType['file'][];
inputFiles: UserChatItemFileItemType[];
fileLinks?: string[];
stringQuoteText?: string; // file quote
requestOrigin?: string;
@ -371,7 +371,9 @@ async function getMultiInput({
return {
documentQuoteText: text,
userFiles: fileLinks.map((url) => parseUrlToFileType(url)).filter(Boolean)
userFiles: fileLinks
.map((url) => parseUrlToFileType(url))
.filter(Boolean) as UserChatItemFileItemType[]
};
}
@ -402,7 +404,7 @@ async function getChatMessages({
systemPrompt: string;
userChatInput: string;
userFiles: UserChatItemValueItemType['file'][];
userFiles: UserChatItemFileItemType[];
documentQuoteText?: string; // document quote
}) {
// Dataset prompt ====>

View File

@ -1,5 +1,5 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/workflow/template/system/classifyQuestion/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';

View File

@ -1,6 +1,6 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { filterGPTMessageByMaxContext } from '../../../ai/llm/utils';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/workflow/template/system/contextExtract/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';

View File

@ -9,7 +9,11 @@ import { getLLMModel } from '../../../../ai/model';
import { filterToolNodeIdByEdges, getNodeErrResponse, getHistories } from '../../utils';
import { runToolCall } from './toolCall';
import { type DispatchToolModuleProps, type ToolNodeItemType } from './type';
import { type ChatItemType, type UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import type {
UserChatItemFileItemType,
ChatItemType,
UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import {
GPTMessages2Chats,
@ -121,10 +125,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
fileLinks,
inputFiles: globalFiles,
hasReadFilesTool,
usageId,
appId: props.runningAppInfo.id,
chatId: props.chatId,
uId: props.uid
usageId
});
const concatenateSystemPrompt = [
@ -284,10 +285,7 @@ const getMultiInput = async ({
customPdfParse,
inputFiles,
hasReadFilesTool,
usageId,
appId,
chatId,
uId
usageId
}: {
runningUserInfo: ChatDispatchProps['runningUserInfo'];
histories: ChatItemType[];
@ -295,12 +293,9 @@ const getMultiInput = async ({
requestOrigin?: string;
maxFiles: number;
customPdfParse?: boolean;
inputFiles: UserChatItemValueItemType['file'][];
inputFiles: UserChatItemFileItemType[];
hasReadFilesTool: boolean;
usageId?: string;
appId: string;
chatId?: string;
uId: string;
}) => {
// Not file quote
if (!fileLinks || hasReadFilesTool) {
@ -334,7 +329,9 @@ const getMultiInput = async ({
return {
documentQuoteText: text,
userFiles: fileLinks.map((url) => parseUrlToFileType(url)).filter(Boolean)
userFiles: fileLinks
.map((url) => parseUrlToFileType(url))
.filter(Boolean) as UserChatItemFileItemType[]
};
};

View File

@ -1,4 +1,4 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
import { runWorkflow } from '../index';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';

View File

@ -5,7 +5,7 @@ import type {
ChatHistoryItemResType,
NodeOutputItemType,
ToolRunResponseItemType
} from '@fastgpt/global/core/chat/type.d';
} from '@fastgpt/global/core/chat/type';
import type { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeInputKeyEnum, VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
import {

View File

@ -1,4 +1,4 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';

View File

@ -3,15 +3,18 @@ import type {
ChatHistoryItemResType,
ToolRunResponseItemType
} from '@fastgpt/global/core/chat/type';
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import type {
DispatchNodeResponseKeyEnum,
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import type { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type';
import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import type {
InteractiveNodeResponseType,
WorkflowInteractiveResponseType
} from '@fastgpt/global/core/workflow/template/system/interactive/type';
import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import z from 'zod';
export type WorkflowDebugResponse = {
memoryEdges: RuntimeEdgeItemType[];
@ -41,10 +44,16 @@ export type DispatchFlowResponse = {
durationSeconds: number;
};
export type WorkflowResponseType = (e: {
id?: string;
stepId?: string;
export const WorkflowResponseFnSchema = z.function({
input: z.tuple([
z.object({
id: z.string().optional(),
stepId: z.string().optional(),
event: z.custom<SseResponseEventEnum>(),
data: z.record(z.string(), z.any())
})
]),
output: z.void()
});
event: SseResponseEventEnum;
data: Record<string, any>;
}) => void;
export type WorkflowResponseType = z.infer<typeof WorkflowResponseFnSchema>;

View File

@ -1,6 +1,6 @@
import { getErrText } from '@fastgpt/global/common/error/utils';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ChatItemType, UserChatItemFileItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType, UserChatItemFileItemType } from '@fastgpt/global/core/chat/type';
import { NodeOutputKeyEnum, VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
import type { VariableItemType } from '@fastgpt/global/core/app/type';
import { encryptSecret } from '../../../common/secret/aes256gcm';

View File

@ -1,13 +1,13 @@
import type { RequireOnlyOne } from '@fastgpt/global/common/type/utils';
type PaginationProps<T = {}> = T & {
export type PaginationProps<T = {}> = T & {
pageSize: number | string;
} & RequireOnlyOne<{
offset: number | string;
pageNum: number | string;
}>;
type PaginationResponse<T = {}> = {
export type PaginationResponse<T = {}> = {
total: number;
list: T[];
};

View File

@ -2,7 +2,6 @@ import React, { type ReactNode, type RefObject, useMemo, useRef, useState } from
import { Box, type BoxProps } from '@chakra-ui/react';
import { useToast } from './useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { type PaginationProps, type PaginationResponse } from '../common/fetch/type';
import {
useBoolean,
useLockFn,
@ -15,6 +14,7 @@ import {
import MyBox from '../components/common/MyBox';
import { useTranslation } from 'next-i18next';
import { useRequest2 } from './useRequest';
import type { PaginationPropsType, PaginationResponseType } from '@fastgpt/global/openapi/type';
type ItemHeight<T> = (index: number, data: T) => number;
const thresholdVal = 100;
@ -31,8 +31,8 @@ export type ScrollListType = ({
} & BoxProps) => React.JSX.Element;
export function useVirtualScrollPagination<
TParams extends PaginationProps,
TData extends PaginationResponse
TParams extends PaginationPropsType,
TData extends PaginationResponseType
>(
api: (data: TParams) => Promise<TData>,
{
@ -179,8 +179,8 @@ export function useVirtualScrollPagination<
}
export function useScrollPagination<
TParams extends PaginationProps,
TData extends PaginationResponse
TParams extends PaginationPropsType,
TData extends PaginationResponseType
>(
api: (data: TParams) => Promise<TData>,
{

View File

@ -239,7 +239,7 @@ const ChatItem = ({ hasPlanCheck, ...props }: Props) => {
return [];
}, [chat.obj, chat.value, isChatting]);
console.log(chat.value, splitAiResponseResults, 232);
const setCiteModalData = useContextSelector(ChatItemContext, (v) => v.setCiteModalData);
const onOpenCiteModal = useMemoizedFn(
(item?: {

View File

@ -6,7 +6,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import DatasetSelectModal, { useDatasetSelect } from '@/components/core/dataset/SelectModal';
import dynamic from 'next/dynamic';
import { type AdminFbkType } from '@fastgpt/global/core/chat/type.d';
import { type AdminFbkType } from '@fastgpt/global/core/chat/type';
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';

View File

@ -11,7 +11,7 @@ import type {
AIChatItemValueItemType,
ChatSiteItemType,
UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type.d';
} from '@fastgpt/global/core/chat/type';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { Box, Button, Checkbox, Flex } from '@chakra-ui/react';

View File

@ -0,0 +1,400 @@
import type { FlexProps } from '@chakra-ui/react';
import { Box, Flex, Textarea, useBoolean } from '@chakra-ui/react';
import React, { useRef, useCallback, useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import {
type ChatBoxInputFormType,
type ChatBoxInputType,
type SendPromptFnType
} from '../ChatContainer/ChatBox/type';
import { textareaMinH } from '../ChatContainer/ChatBox/constants';
import { useFieldArray, type UseFormReturn } from 'react-hook-form';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import dynamic from 'next/dynamic';
import { useContextSelector } from 'use-context-selector';
import { WorkflowRuntimeContext } from '../ChatContainer/context/workflowRuntimeContext';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { documentFileType } from '@fastgpt/global/common/file/constants';
import FilePreview from '../ChatContainer/components/FilePreview';
import { useFileUpload } from './hooks/useFileUpload';
import ComplianceTip from '@/components/common/ComplianceTip/index';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { HelperBotContext } from './context';
import type { onSendMessageFnType } from './type';
const fileTypeFilter = (file: File) => {
return (
file.type.includes('image') ||
documentFileType.split(',').some((type) => file.name.endsWith(type.trim()))
);
};
const ChatInput = ({
chatId,
onSendMessage,
onStop,
TextareaDom,
chatForm,
isChatting
}: {
chatId: string;
onSendMessage: onSendMessageFnType;
onStop: () => void;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
chatForm: UseFormReturn<ChatBoxInputFormType>;
isChatting: boolean;
}) => {
const { t } = useTranslation();
const { toast } = useToast();
const { isPc } = useSystem();
const { setValue, watch, control } = chatForm;
const inputValue = watch('input');
const type = useContextSelector(HelperBotContext, (v) => v.type);
const fileSelectConfig = useContextSelector(HelperBotContext, (v) => v.fileSelectConfig);
const [focusing, { on: onFocus, off: offFocus }] = useBoolean();
const fileCtrl = useFieldArray({
control,
name: 'files'
});
const {
File,
onOpenSelectFile,
fileList,
onSelectFile,
selectFileIcon,
selectFileLabel,
showSelectFile,
showSelectImg,
showSelectVideo,
showSelectAudio,
showSelectCustomFileExtension,
removeFiles,
replaceFiles,
hasFileUploading
} = useFileUpload({
fileSelectConfig,
fileCtrl,
type,
chatId
});
const havInput = !!inputValue || fileList.length > 0;
const canSendMessage = havInput && !hasFileUploading;
const canUploadFile =
showSelectFile ||
showSelectImg ||
showSelectVideo ||
showSelectAudio ||
showSelectCustomFileExtension;
/* on send */
const handleSend = useCallback(
async (val?: string) => {
if (!canSendMessage) return;
const textareaValue = val || TextareaDom.current?.value || '';
onSendMessage({
query: textareaValue.trim(),
files: fileList
});
replaceFiles([]);
},
[TextareaDom, canSendMessage, fileList, onSendMessage, replaceFiles]
);
const RenderTextarea = useMemo(
() => (
<Flex direction={'column'} mt={fileList.length > 0 ? 1 : 0}>
{/* Textarea */}
<Flex w={'100%'}>
{/* Prompt Container */}
<Textarea
ref={TextareaDom}
py={0}
mx={[2, 4]}
px={2}
border={'none'}
_focusVisible={{
border: 'none'
}}
placeholder={
isPc ? t('common:core.chat.Type a message') : t('chat:input_placeholder_phone')
}
resize={'none'}
rows={1}
height={[5, 6]}
lineHeight={[5, 6]}
maxHeight={[24, 32]}
minH={'50px'}
mb={0}
maxLength={-1}
overflowY={'hidden'}
overflowX={'hidden'}
whiteSpace={'pre-wrap'}
wordBreak={'break-word'}
boxShadow={'none !important'}
color={'myGray.900'}
fontWeight={400}
fontSize={'1rem'}
letterSpacing={'0.5px'}
w={'100%'}
_placeholder={{
color: '#707070',
fontSize: 'sm'
}}
value={inputValue}
onChange={(e) => {
const textarea = e.target;
textarea.style.height = textareaMinH;
const maxHeight = 128;
const newHeight = Math.min(textarea.scrollHeight, maxHeight);
textarea.style.height = `${newHeight}px`;
// Only show scrollbar when content exceeds max height
if (textarea.scrollHeight > maxHeight) {
textarea.style.overflowY = 'auto';
} else {
textarea.style.overflowY = 'hidden';
}
setValue('input', textarea.value);
}}
onKeyDown={(e) => {
// enter send.(pc or iframe && enter and unPress shift)
const isEnter = e.key === 'Enter';
if (isEnter && TextareaDom.current && (e.ctrlKey || e.altKey)) {
// Add a new line
const index = TextareaDom.current.selectionStart;
const val = TextareaDom.current.value;
TextareaDom.current.value = `${val.slice(0, index)}\n${val.slice(index)}`;
TextareaDom.current.selectionStart = index + 1;
TextareaDom.current.selectionEnd = index + 1;
TextareaDom.current.style.height = textareaMinH;
TextareaDom.current.style.height = `${TextareaDom.current.scrollHeight}px`;
return;
}
// Select all content
// @ts-ignore
e.key === 'a' && e.ctrlKey && e.target?.select();
if ((isPc || window !== parent) && e.keyCode === 13 && !e.shiftKey) {
handleSend();
e.preventDefault();
}
}}
onPaste={(e) => {
const clipboardData = e.clipboardData;
if (clipboardData && canUploadFile) {
const items = clipboardData.items;
const files = Array.from(items)
.map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
.filter((file) => {
return file && fileTypeFilter(file);
}) as File[];
onSelectFile({ files });
if (files.length > 0) {
e.preventDefault();
e.stopPropagation();
}
}
}}
onFocus={onFocus}
onBlur={offFocus}
/>
</Flex>
</Flex>
),
[
fileList.length,
TextareaDom,
isPc,
t,
inputValue,
onFocus,
offFocus,
setValue,
handleSend,
canUploadFile,
onSelectFile
]
);
const RenderButtonGroup = useMemo(() => {
const iconSize = {
w: isPc ? '20px' : '16px',
h: isPc ? '20px' : '16px'
};
return (
<Flex
alignItems={'flex-start'}
justifyContent={'space-between'}
w={'100%'}
mt={0}
pr={[3, 4]}
pl={[3, 4]}
h={[8, 9]}
gap={[0, 1]}
>
<Box flex={1} />
{/* Right button group */}
<Flex alignItems={'center'} gap={[0, 1]}>
{/* Attachment Group */}
<Flex alignItems={'center'} h={[8, 9]}>
{/* file selector button */}
{canUploadFile && (
<Flex
alignItems={'center'}
justifyContent={'center'}
w={[8, 9]}
h={[8, 9]}
p={[1, 2]}
borderRadius={'sm'}
cursor={'pointer'}
_hover={{ bg: 'rgba(0, 0, 0, 0.04)' }}
onClick={(e) => {
e.stopPropagation();
onOpenSelectFile();
}}
>
<MyTooltip label={selectFileLabel}>
<MyIcon name={selectFileIcon as any} {...iconSize} color={'#707070'} />
</MyTooltip>
<File onSelect={(files) => onSelectFile({ files })} />
</Flex>
)}
</Flex>
{/* Divider Container */}
{canUploadFile && (
<Flex alignItems={'center'} justifyContent={'center'} w={2} h={4} mr={2}>
<Box w={'2px'} h={5} bg={'myGray.200'} />
</Flex>
)}
{/* Send Button Container */}
<Flex alignItems={'center'} w={[8, 9]} h={[8, 9]} borderRadius={'lg'}>
<Flex
alignItems={'center'}
justifyContent={'center'}
w={[7, 9]}
h={[7, 9]}
p={[1, 2]}
bg={
isChatting ? 'primary.50' : canSendMessage ? 'primary.500' : 'rgba(17, 24, 36, 0.1)'
}
borderRadius={['md', 'lg']}
cursor={isChatting ? 'pointer' : canSendMessage ? 'pointer' : 'not-allowed'}
onClick={(e) => {
e.stopPropagation();
if (isChatting) {
return onStop();
}
return handleSend();
}}
>
{isChatting ? (
<MyIcon {...iconSize} name={'stop'} color={'primary.600'} />
) : (
<MyTooltip label={t('common:core.chat.Send Message')}>
<MyIcon name={'core/chat/sendFill'} {...iconSize} color={'white'} />
</MyTooltip>
)}
</Flex>
</Flex>
</Flex>
</Flex>
);
}, [
isPc,
canUploadFile,
selectFileLabel,
selectFileIcon,
File,
isChatting,
canSendMessage,
t,
onOpenSelectFile,
onSelectFile,
handleSend,
onStop
]);
const activeStyles: FlexProps = {
boxShadow: '0px 5px 20px -4px rgba(19, 51, 107, 0.13)',
border: '0.5px solid rgba(0, 0, 0, 0.24)'
};
return (
<Box
onDragOver={(e) => e.preventDefault()}
onDrop={(e) => {
e.preventDefault();
if (!canUploadFile) return;
const files = Array.from(e.dataTransfer.files);
const droppedFiles = files.filter((file) => fileTypeFilter(file));
if (droppedFiles.length > 0) {
onSelectFile({ files: droppedFiles });
}
const invalidFileName = files
.filter((file) => !fileTypeFilter(file))
.map((file) => file.name)
.join(', ');
if (invalidFileName) {
toast({
status: 'warning',
title: t('chat:unsupported_file_type'),
description: invalidFileName
});
}
}}
>
{/* Real Chat Input */}
<Flex
direction={'column'}
minH={['96px', '120px']}
pt={fileList.length > 0 ? '0' : [3, 4]}
pb={3}
position={'relative'}
borderRadius={['xl', 'xxl']}
bg={'white'}
overflow={'display'}
{...(focusing
? activeStyles
: {
_hover: activeStyles,
border: '0.5px solid rgba(0, 0, 0, 0.18)',
boxShadow: `0px 5px 16px -4px rgba(19, 51, 107, 0.08)`
})}
onClick={() => TextareaDom?.current?.focus()}
>
<Box flex={1}>
{/* file preview */}
<Box px={[2, 3]}>
<FilePreview fileList={fileList} removeFiles={removeFiles} />
</Box>
{RenderTextarea}
</Box>
<Box>{RenderButtonGroup}</Box>
</Flex>
<ComplianceTip type={'chat'} />
</Box>
);
};
export default React.memo(ChatInput);

View File

@ -0,0 +1,21 @@
import { POST, GET, DELETE } from '@/web/common/api/request';
import type {
GetHelperBotChatRecordsParamsType,
DeleteHelperBotChatParamsType,
GetHelperBotChatRecordsResponseType,
GetHelperBotFilePresignParamsType,
GetHelperBotFilePreviewParamsType
} from '@fastgpt/global/openapi/core/chat/helperBot/api';
import type { CreatePostPresignedUrlResult } from '@fastgpt/service/common/s3/type';
export const getHelperBotChatRecords = (data: GetHelperBotChatRecordsParamsType) =>
GET<GetHelperBotChatRecordsResponseType>('/core/chat/helperBot/getRecords', data);
export const deleteHelperBotChatRecord = (data: DeleteHelperBotChatParamsType) =>
DELETE('/core/chat/helperBot/deleteRecord', data);
export const getHelperBotFilePresign = (data: GetHelperBotFilePresignParamsType) =>
POST<CreatePostPresignedUrlResult>('/core/chat/helperBot/getFilePresign', data);
export const getHelperBotFilePreview = (data: GetHelperBotFilePreviewParamsType) =>
POST<string>('/core/chat/helperBot/getFilePreview', data);

View File

@ -0,0 +1,177 @@
import React, { useMemo } from 'react';
import type { HelperBotChatItemSiteType } from '@fastgpt/global/core/chat/helperBot/type';
import {
Box,
Accordion,
AccordionButton,
AccordionIcon,
AccordionItem,
AccordionPanel,
Button,
Flex,
HStack
} from '@chakra-ui/react';
import AIResponseBox from '../../components/AIResponseBox';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import Markdown from '@/components/Markdown';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIconButton from '@fastgpt/web/components/common/Icon/button';
import { useCopyData } from '@fastgpt/web/hooks/useCopyData';
const accordionButtonStyle = {
w: 'auto',
bg: 'white',
borderRadius: 'md',
borderWidth: '1px',
borderColor: 'myGray.200',
boxShadow: '1',
pl: 3,
pr: 2.5,
_hover: {
bg: 'auto'
}
};
const RenderResoningContent = React.memo(function RenderResoningContent({
content,
isChatting,
isLastResponseValue
}: {
content: string;
isChatting: boolean;
isLastResponseValue: boolean;
}) {
const { t } = useTranslation();
const showAnimation = isChatting && isLastResponseValue;
return (
<Accordion allowToggle defaultIndex={isLastResponseValue ? 0 : undefined}>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton {...accordionButtonStyle} py={1}>
<HStack mr={2} spacing={1}>
<MyIcon name={'core/chat/think'} w={'0.85rem'} />
<Box fontSize={'sm'}>{t('chat:ai_reasoning')}</Box>
</HStack>
{showAnimation && <MyIcon name={'common/loading'} w={'0.85rem'} />}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
pr={0}
pl={3}
mt={2}
borderLeft={'2px solid'}
borderColor={'myGray.300'}
color={'myGray.500'}
>
<Markdown source={content} showAnimation={showAnimation} />
</AccordionPanel>
</AccordionItem>
</Accordion>
);
});
const RenderText = React.memo(function RenderText({
showAnimation,
text
}: {
showAnimation: boolean;
text: string;
}) {
const source = useMemo(() => {
if (!text) return '';
// Remove quote references if not showing response detail
return text;
}, [text]);
return <Markdown source={source} showAnimation={showAnimation} />;
});
const AIItem = ({
chat,
isChatting,
isLastChild
}: {
chat: HelperBotChatItemSiteType;
isChatting: boolean;
isLastChild: boolean;
}) => {
const { t } = useTranslation();
const { copyData } = useCopyData();
return (
<Box
_hover={{
'& .controler': {
display: 'flex'
}
}}
>
<Box
px={4}
py={3}
borderRadius={'sm'}
display="inline-block"
maxW={['calc(100% - 25px)', 'calc(100% - 40px)']}
color={'myGray.900'}
bg={'myGray.100'}
>
{chat.value.map((value, i) => {
if ('text' in value && value.text) {
return (
<RenderText
key={i}
showAnimation={isChatting && isLastChild}
text={value.text.content}
/>
);
}
if ('reasoning' in value && value.reasoning) {
return (
<RenderResoningContent
key={i}
isChatting={isChatting}
isLastResponseValue={isLastChild}
content={value.reasoning.content}
/>
);
}
})}
</Box>
{/* Controller */}
<Flex h={'26px'} mt={1}>
<Flex className="controler" display={['flex', 'none']} alignItems={'center'} gap={1}>
<MyTooltip label={t('common:Copy')}>
<MyIconButton
icon="copy"
color={'myGray.500'}
onClick={() => {
const text = chat.value
.map((value) => {
if ('text' in value) {
return value.text || '';
}
return '';
})
.join('');
return copyData(text ?? '');
}}
/>
</MyTooltip>
<MyTooltip label={t('common:Delete')}>
<MyIconButton
icon="delete"
color={'myGray.500'}
hoverColor={'red.600'}
hoverBg={'red.50'}
// onClick={() => copyData(text ?? '')}
/>
</MyTooltip>
</Flex>
</Flex>
</Box>
);
};
export default AIItem;

View File

@ -0,0 +1,68 @@
import React from 'react';
import type { HelperBotChatItemSiteType } from '@fastgpt/global/core/chat/helperBot/type';
import { formatChatValue2InputType } from '../../ChatContainer/ChatBox/utils';
import { Box, Card, Flex } from '@chakra-ui/react';
import Markdown from '@/components/Markdown';
import FileBlock from '../../ChatContainer/ChatBox/components/FilesBox';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import { useCopyData } from '@fastgpt/web/hooks/useCopyData';
import IconButton from '@/pageComponents/account/team/OrgManage/IconButton';
import MyIconButton from '@fastgpt/web/components/common/Icon/button';
const HumanItem = ({ chat }: { chat: HelperBotChatItemSiteType }) => {
const { t } = useTranslation();
const { copyData } = useCopyData();
const { text, files = [] } = formatChatValue2InputType(chat.value);
// TODO: delete chatitem
return (
<Flex
direction={'column'}
alignItems={'end'}
_hover={{
'& .controler': {
display: 'flex'
}
}}
>
<Box
px={4}
py={3}
borderRadius={'sm'}
display="inline-block"
textAlign="right"
maxW={['calc(100% - 25px)', 'calc(100% - 40px)']}
color={'myGray.900'}
bg={'primary.100'}
order={0}
>
<Flex flexDirection={'column'} gap={4}>
{files.length > 0 && <FileBlock files={files} />}
{text && <Markdown source={text} />}
</Flex>
</Box>
{/* Controller */}
<Flex h={'26px'} mt={1}>
<Flex className="controler" display={['flex', 'none']} alignItems={'center'} gap={1}>
<MyTooltip label={t('common:Copy')}>
<MyIconButton icon="copy" color={'myGray.500'} onClick={() => copyData(text ?? '')} />
</MyTooltip>
<MyTooltip label={t('common:Delete')}>
<MyIconButton
icon="delete"
color={'myGray.500'}
hoverColor={'red.600'}
hoverBg={'red.50'}
onClick={() => copyData(text ?? '')}
/>
</MyTooltip>
</Flex>
</Flex>
</Flex>
);
};
export default HumanItem;

View File

@ -0,0 +1,45 @@
import { useMemoEnhance } from '@fastgpt/web/hooks/useMemoEnhance';
import React, { useState, type ReactNode } from 'react';
import { createContext } from 'use-context-selector';
import {
HelperBotTypeEnum,
type HelperBotTypeEnumType,
type TopAgentParamsType
} from '@fastgpt/global/core/chat/helperBot/type';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import type { AppFileSelectConfigType } from '@fastgpt/global/core/app/type';
export type HelperBotProps = {
emptyDom?: ReactNode;
fileSelectConfig?: AppFileSelectConfigType;
} & {
type: HelperBotTypeEnumType;
metadata: TopAgentParamsType;
onApply: (e: TopAgentParamsType) => void;
};
type HelperBotContextType = HelperBotProps & {};
export const HelperBotContext = createContext<HelperBotContextType>({
type: HelperBotTypeEnum.topAgent,
metadata: {
role: '',
taskObject: '',
selectedTools: [],
selectedDatasets: [],
fileUpload: false
},
onApply: function (e: TopAgentParamsType): void {
throw new Error('Function not implemented.');
}
});
const HelperBotContextProvider = ({
children,
...params
}: { children: ReactNode } & HelperBotProps) => {
const contextValue: HelperBotContextType = useMemoEnhance(() => params, [params]);
return <HelperBotContext.Provider value={contextValue}>{children}</HelperBotContext.Provider>;
};
export default HelperBotContextProvider;

View File

@ -0,0 +1,263 @@
import { useCallback, useMemo } from 'react';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useTranslation } from 'next-i18next';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { formatFileSize } from '@fastgpt/global/common/file/tools';
import { clone } from 'lodash';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { type UseFieldArrayReturn } from 'react-hook-form';
import type { ChatBoxInputFormType, UserInputFileItemType } from '../../ChatContainer/ChatBox/type';
import { type AppFileSelectConfigType } from '@fastgpt/global/core/app/type';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { type OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getPresignedChatFileGetUrl, getUploadChatFilePresignedUrl } from '@/web/common/file/api';
import { POST } from '@/web/common/api/request';
import { getUploadFileType } from '@fastgpt/global/core/app/constants';
import { parseS3UploadError } from '@fastgpt/global/common/error/s3';
import type { HelperBotTypeEnumType } from '@fastgpt/global/core/chat/helperBot/type';
import { getHelperBotFilePresign, getHelperBotFilePreview } from '../api';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
type UseFileUploadOptions = {
fileSelectConfig?: AppFileSelectConfigType;
fileCtrl: UseFieldArrayReturn<ChatBoxInputFormType, 'files', 'id'>;
type: HelperBotTypeEnumType;
chatId: string;
};
export const useFileUpload = (props: UseFileUploadOptions) => {
const { fileSelectConfig, fileCtrl, type, chatId } = props;
const { toast } = useToast();
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const {
update: updateFiles,
remove: removeFiles,
fields: fileList,
replace: replaceFiles,
append: appendFiles
} = fileCtrl;
const hasFileUploading = fileList.some((item) => !item.url);
const showSelectFile = fileSelectConfig?.canSelectFile;
const showSelectImg = fileSelectConfig?.canSelectImg;
const showSelectVideo = fileSelectConfig?.canSelectVideo;
const showSelectAudio = fileSelectConfig?.canSelectAudio;
const showSelectCustomFileExtension = fileSelectConfig?.canSelectCustomFileExtension;
const canUploadFile =
showSelectFile ||
showSelectImg ||
showSelectVideo ||
showSelectAudio ||
showSelectCustomFileExtension;
const maxSelectFiles = fileSelectConfig?.maxFiles ?? 10;
const maxSize = (feConfigs?.uploadFileMaxSize || 1024) * 1024 * 1024; // nkb
const canSelectFileAmount = maxSelectFiles - fileList.length;
const { icon: selectFileIcon, label: selectFileLabel } = useMemo(() => {
if (canUploadFile) {
return {
icon: 'core/chat/fileSelect',
label: t('chat:select_file')
};
}
return {};
}, [canUploadFile, t]);
const fileType = useMemo(() => {
return getUploadFileType({
canSelectFile: showSelectFile,
canSelectImg: showSelectImg,
canSelectVideo: showSelectVideo,
canSelectAudio: showSelectAudio,
canSelectCustomFileExtension: showSelectCustomFileExtension,
customFileExtensionList: fileSelectConfig?.customFileExtensionList
});
}, [
fileSelectConfig?.customFileExtensionList,
showSelectAudio,
showSelectCustomFileExtension,
showSelectFile,
showSelectImg,
showSelectVideo
]);
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType,
multiple: true,
maxCount: canSelectFileAmount
});
const onSelectFile = useCallback(
async ({ files }: { files: File[] }) => {
if (!files || files.length === 0) {
return [];
}
// Filter max files
if (files.length > maxSelectFiles) {
files = files.slice(0, maxSelectFiles);
toast({
status: 'warning',
title: t('chat:file_amount_over', { max: maxSelectFiles })
});
}
// Filter files by max size
const filterFilesByMaxSize = files.filter((file) => file.size <= maxSize);
if (filterFilesByMaxSize.length < files.length) {
toast({
status: 'warning',
title: t('file:some_file_size_exceeds_limit', { maxSize: formatFileSize(maxSize) })
});
}
// Convert files to UserInputFileItemType
const loadFiles = await Promise.all(
filterFilesByMaxSize.map(
(file) =>
new Promise<UserInputFileItemType>((resolve, reject) => {
if (file.type.includes('image')) {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const item: UserInputFileItemType = {
id: getNanoid(6),
rawFile: file,
type: ChatFileTypeEnum.image,
name: file.name,
icon: reader.result as string,
status: 0
};
resolve(item);
};
reader.onerror = () => {
reject(reader.error);
};
} else {
resolve({
id: getNanoid(6),
rawFile: file,
type: ChatFileTypeEnum.file,
name: file.name,
icon: getFileIcon(file.name),
status: 0
});
}
})
)
);
appendFiles(loadFiles);
return loadFiles;
},
[maxSelectFiles, appendFiles, toast, t, maxSize]
);
const uploadFiles = useCallback(async () => {
const filterFiles = fileList.filter((item) => item.status === 0);
if (filterFiles.length === 0) return;
replaceFiles(fileList.map((item) => ({ ...item, status: 1 })));
let errorFileIndex: number[] = [];
await Promise.allSettled(
filterFiles.map(async (file) => {
const copyFile = clone(file);
copyFile.status = 1;
if (!copyFile.rawFile) return;
try {
const fileIndex = fileList.findIndex((item) => item.id === file.id)!;
// Get Upload Post Presigned URL
const { url, fields, maxSize } = await getHelperBotFilePresign({
type,
chatId,
filename: copyFile.rawFile.name
});
// Upload File to S3
const formData = new FormData();
Object.entries(fields).forEach(([k, v]) => formData.set(k, v));
formData.set('file', copyFile.rawFile);
await POST(url, formData, {
headers: {
'Content-Type': 'multipart/form-data; charset=utf-8'
},
onUploadProgress: (e) => {
if (!e.total) return;
const percent = Math.round((e.loaded / e.total) * 100);
copyFile.process = percent;
updateFiles(fileIndex, copyFile);
}
}).catch((error) => Promise.reject(parseS3UploadError({ t, error, maxSize })));
const previewUrl = await getHelperBotFilePreview({
key: fields.key
});
// Update file url and key
copyFile.url = previewUrl;
copyFile.key = fields.key;
updateFiles(fileIndex, copyFile);
} catch (error) {
errorFileIndex.push(fileList.findIndex((item) => item.id === file.id)!);
toast({
status: 'warning',
title: t(
getErrText(error, t('common:error.upload_file_error_filename', { name: file.name }))
)
});
}
})
);
removeFiles(errorFileIndex);
}, [chatId, fileList, removeFiles, replaceFiles, t, toast, type, updateFiles]);
const sortFileList = useMemo(() => {
// Sort: Document, image
const sortResult = clone(fileList).sort((a, b) => {
if (a.type === ChatFileTypeEnum.image && b.type === ChatFileTypeEnum.file) {
return 1;
} else if (a.type === ChatFileTypeEnum.file && b.type === ChatFileTypeEnum.image) {
return -1;
}
return 0;
});
return sortResult;
}, [fileList]);
// Upload files
useRequest2(uploadFiles, {
manual: false,
errorToast: t('common:upload_file_error'),
refreshDeps: [fileList, type, chatId]
});
return {
File,
onOpenSelectFile,
fileList: sortFileList,
onSelectFile,
uploadFiles,
selectFileIcon,
selectFileLabel,
showSelectFile,
showSelectImg,
showSelectVideo,
showSelectAudio,
showSelectCustomFileExtension,
removeFiles,
replaceFiles,
hasFileUploading
};
};

View File

@ -0,0 +1,413 @@
import React, { useCallback, useRef, useState } from 'react';
import HelperBotContextProvider, { type HelperBotProps } from './context';
import type {
AIChatItemValueItemType,
ChatItemType,
ChatSiteItemType
} from '@fastgpt/global/core/chat/type';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import type { getPaginationRecordsBody } from '@/pages/api/core/chat/getPaginationRecords';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import type { PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { Box } from '@chakra-ui/react';
import HumanItem from './components/HumanItem';
import AIItem from './components/AIItem';
import { useMemoEnhance } from '@fastgpt/web/hooks/useMemoEnhance';
import { getHelperBotChatRecords } from './api';
import type {
GetHelperBotChatRecordsParamsType,
GetHelperBotChatRecordsResponseType
} from '@fastgpt/global/openapi/core/chat/helperBot/api';
import ChatInput from './Chatinput';
import type { ChatBoxInputFormType } from '../ChatContainer/ChatBox/type';
import { useForm } from 'react-hook-form';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useTranslation } from 'next-i18next';
import { useMemoizedFn, useThrottleFn } from 'ahooks';
import type { HelperBotChatItemSiteType } from '@fastgpt/global/core/chat/helperBot/type';
import type { onSendMessageParamsType } from './type';
import { textareaMinH } from '../ChatContainer/ChatBox/constants';
import { streamFetch } from '@/web/common/api/fetch';
import type { generatingMessageProps } from '../ChatContainer/type';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
const ChatBox = ({ type, metadata, ...props }: HelperBotProps) => {
const { toast } = useToast();
const { t } = useTranslation();
const ScrollContainerRef = useRef<HTMLDivElement>(null);
// Messages 管理
const [chatId, setChatId] = useState<string>(getNanoid(12));
const [isChatting, setIsChatting] = useState(false);
const chatForm = useForm<ChatBoxInputFormType>({
defaultValues: {
input: '',
files: [],
chatStarted: false,
variables: {}
}
});
const { setValue } = chatForm;
const requestParams = useMemoEnhance(() => {
return {
chatId,
type,
metadata
};
}, []);
const scrollToBottom = useCallback((behavior: 'smooth' | 'auto' = 'smooth', delay = 0) => {
setTimeout(() => {
if (!ScrollContainerRef.current) {
setTimeout(() => {
scrollToBottom(behavior);
}, 500);
} else {
ScrollContainerRef.current.scrollTo({
top: ScrollContainerRef.current.scrollHeight,
behavior
});
}
}, delay);
}, []);
const {
data: chatRecords,
setData: setChatRecords,
ScrollData
} = useScrollPagination(
async (
data: GetHelperBotChatRecordsParamsType
): Promise<GetHelperBotChatRecordsResponseType> => {
const res = await getHelperBotChatRecords(data);
// First load scroll to bottom
if (Number(data.offset) === 0) {
scrollToBottom('auto');
}
return {
...res,
list: res.list
};
},
{
pageSize: 10,
refreshDeps: [requestParams],
params: requestParams,
scrollLoadType: 'top',
showErrorToast: false
}
);
const chatController = useRef(new AbortController());
const abortRequest = useMemoizedFn((signal: string = 'stop') => {
chatController.current?.abort(signal);
});
const resetInputVal = useMemoizedFn(({ query = '', files = [] }: onSendMessageParamsType) => {
if (!TextareaDom.current) return;
setValue('files', files);
setValue('input', query);
sessionStorage.removeItem(`chatInput_${chatId}`);
setTimeout(() => {
/* 回到最小高度 */
if (TextareaDom.current) {
TextareaDom.current.style.height =
query === '' ? textareaMinH : `${TextareaDom.current.scrollHeight}px`;
}
}, 100);
});
// Text area
const TextareaDom = useRef<HTMLTextAreaElement>(null);
// Message request
const { run: generatingScroll } = useThrottleFn(
(force?: boolean) => {
if (!ScrollContainerRef.current) return;
const isBottom =
ScrollContainerRef.current.scrollTop + ScrollContainerRef.current.clientHeight + 150 >=
ScrollContainerRef.current.scrollHeight;
if (isBottom || force) {
scrollToBottom('auto');
}
},
{
wait: 100
}
);
const generatingMessage = useMemoizedFn(
({ event, text = '', reasoningText, tool }: generatingMessageProps) => {
setChatRecords((state) =>
state.map((item, index) => {
if (index !== state.length - 1) return item;
if (item.obj !== ChatRoleEnum.AI) return item;
const updateIndex = item.value.length - 1;
const updateValue: AIChatItemValueItemType = item.value[updateIndex];
if (event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer) {
if (reasoningText) {
if (updateValue?.reasoning) {
updateValue.reasoning.content += reasoningText;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const val: AIChatItemValueItemType = {
reasoning: {
content: reasoningText
}
};
return {
...item,
value: [...item.value, val]
};
}
}
if (text) {
if (updateValue?.text) {
updateValue.text.content += text;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
} else {
const newValue: AIChatItemValueItemType = {
text: {
content: text
}
};
return {
...item,
value: item.value.concat(newValue)
};
}
}
}
// Tool call
if (event === SseResponseEventEnum.toolCall && tool) {
const val: AIChatItemValueItemType = {
tool: {
...tool,
response: ''
}
};
return {
...item,
value: [...item.value, val]
};
}
if (event === SseResponseEventEnum.toolParams && tool && updateValue?.tool) {
if (tool.params) {
updateValue.tool.params += tool.params;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
if (event === SseResponseEventEnum.toolResponse && tool && updateValue?.tool) {
if (tool.response) {
// replace tool response
updateValue.tool.response += tool.response;
return {
...item,
value: [
...item.value.slice(0, updateIndex),
updateValue,
...item.value.slice(updateIndex + 1)
]
};
}
return item;
}
return item;
})
);
generatingScroll(false);
}
);
const handleSendMessage = useMemoizedFn(async ({ query = '' }: onSendMessageParamsType) => {
// Init check
if (isChatting) {
return toast({
title: t('chat:is_chatting'),
status: 'warning'
});
}
abortRequest();
query = query.trim();
if (!query) {
toast({
title: t('chat:content_empty'),
status: 'warning'
});
return;
}
const chatItemDataId = getNanoid(24);
const newChatList: HelperBotChatItemSiteType[] = [
...chatRecords,
{
_id: getNanoid(24),
createTime: new Date(),
dataId: chatItemDataId,
obj: ChatRoleEnum.Human,
value: [
{
text: {
content: query
}
}
]
},
{
_id: getNanoid(24),
createTime: new Date(),
dataId: chatItemDataId,
obj: ChatRoleEnum.AI,
value: [
{
text: {
content: `我无法直接通过“读取静态网页工具”获取 GitHub动态站点上的实时信息因此不能自动抓取 fastgpt 的 star 数量。
- **FastGPTfastgpt-dev/FastGPT**
GitHub https://github.com/fastgpt-dev/FastGPT
** star ** star
1. star 2025
2. GitHub API
3. `
}
}
]
}
];
setChatRecords(newChatList);
resetInputVal({});
scrollToBottom();
setIsChatting(true);
try {
const abortSignal = new AbortController();
chatController.current = abortSignal;
const { responseText } = await streamFetch({
url: '/api/core/chat/helperBot/completions',
data: {
chatId,
chatItemId: chatItemDataId,
query,
files: chatForm.getValues('files').map((item) => ({
type: item.type,
key: item.key,
// url: item.url,
name: item.name
})),
metadata: {
type: 'topAgent',
data: {}
}
},
onMessage: generatingMessage,
abortCtrl: abortSignal
});
} catch (error) {}
setIsChatting(false);
});
return (
<MyBox display={'flex'} flexDirection={'column'} h={'100%'} position={'relative'}>
<ScrollData
ScrollContainerRef={ScrollContainerRef}
flex={'1 0 0'}
h={0}
w={'100%'}
overflow={'overlay'}
px={[4, 0]}
pb={3}
>
{chatRecords.map((item, index) => (
<Box
key={item._id}
px={[3, 5]}
w={'100%'}
maxW={['auto', 'min(1000px, 100%)']}
mx="auto"
_notLast={{
mb: 2
}}
>
{item.obj === ChatRoleEnum.Human && <HumanItem chat={item} />}
{item.obj === ChatRoleEnum.AI && (
<AIItem
chat={item}
isChatting={isChatting}
isLastChild={index === chatRecords.length - 1}
/>
)}
</Box>
))}
</ScrollData>
<Box
px={[3, 5]}
m={['0 auto 10px', '10px auto']}
w={'100%'}
maxW={['auto', 'min(820px, 100%)']}
>
<ChatInput
TextareaDom={TextareaDom}
chatId={chatId}
chatForm={chatForm}
isChatting={isChatting}
onSendMessage={handleSendMessage}
onStop={() => {}}
/>
</Box>
</MyBox>
);
};
const index = (props: HelperBotProps) => {
return (
<HelperBotContextProvider {...props}>
<ChatBox {...props} />
</HelperBotContextProvider>
);
};
export default index;

View File

@ -0,0 +1,7 @@
import type { UserInputFileItemType } from '../ChatContainer/ChatBox/type';
export type onSendMessageParamsType = {
query?: string;
files?: UserInputFileItemType[];
};
export type onSendMessageFnType = (e: onSendMessageParamsType) => Promise<any>;

View File

@ -1,6 +1,6 @@
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { Box, Flex, type BoxProps, useDisclosure, HStack } from '@chakra-ui/react';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
import { moduleTemplatesFlat } from '@fastgpt/global/core/workflow/template/constants';
import MyModal from '@fastgpt/web/components/common/MyModal';
import Markdown from '@/components/Markdown';

View File

@ -0,0 +1,161 @@
import { Box, Flex, IconButton } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
import React, { useEffect, useMemo } from 'react';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useSafeState } from 'ahooks';
import type { AppFormEditFormType } from '@fastgpt/global/core/app/type';
import { useContextSelector } from 'use-context-selector';
import { AppContext } from '../../context';
import { useChatTest } from '../../useChatTest';
import ChatItemContextProvider, { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
import ChatRecordContextProvider from '@/web/core/chat/context/chatRecordContext';
import { useChatStore } from '@/web/core/chat/context/useChatStore';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { cardStyles } from '../../constants';
import ChatQuoteList from '@/pageComponents/chat/ChatQuoteList';
import VariablePopover from '@/components/core/chat/ChatContainer/components/VariablePopover';
import { ChatTypeEnum } from '@/components/core/chat/ChatContainer/ChatBox/constants';
import type { Form2WorkflowFnType } from '../FormComponent/type';
import FillRowTabs from '@fastgpt/web/components/common/Tabs/FillRowTabs';
import HelperBot from '@/components/core/chat/HelperBot';
import { HelperBotTypeEnum } from '@fastgpt/global/core/chat/helperBot/type';
type Props = {
appForm: AppFormEditFormType;
setRenderEdit: React.Dispatch<React.SetStateAction<boolean>>;
form2WorkflowFn: Form2WorkflowFnType;
};
const ChatTest = ({ appForm, setRenderEdit, form2WorkflowFn }: Props) => {
const { t } = useTranslation();
const [activeTab, setActiveTab] = useSafeState<'helper' | 'chat_debug'>('helper');
const { appDetail } = useContextSelector(AppContext, (v) => v);
const datasetCiteData = useContextSelector(ChatItemContext, (v) => v.datasetCiteData);
const setCiteModalData = useContextSelector(ChatItemContext, (v) => v.setCiteModalData);
// agentForm2AppWorkflow dependent allDatasets
const isVariableVisible = useContextSelector(ChatItemContext, (v) => v.isVariableVisible);
const [workflowData, setWorkflowData] = useSafeState({
nodes: appDetail.modules || [],
edges: appDetail.edges || []
});
useEffect(() => {
const { nodes, edges } = form2WorkflowFn(appForm, t);
setWorkflowData({ nodes, edges });
}, [appForm, setWorkflowData, t]);
useEffect(() => {
setRenderEdit(!datasetCiteData);
}, [datasetCiteData, setRenderEdit]);
const { ChatContainer, restartChat } = useChatTest({
...workflowData,
chatConfig: appForm.chatConfig,
isReady: true
});
return (
<Flex h={'full'} gap={2}>
<MyBox
flex={'1 0 0'}
w={0}
display={'flex'}
position={'relative'}
flexDirection={'column'}
h={'full'}
py={4}
{...cardStyles}
boxShadow={'3'}
>
<Flex px={[2, 5]} pb={2}>
<FillRowTabs<'helper' | 'chat_debug'>
py={1}
list={[
{
label: '辅助生成',
value: 'helper'
},
{
label: t('app:chat_debug'),
value: 'chat_debug'
}
]}
value={activeTab}
onChange={(value) => {
setActiveTab(value);
}}
/>
{!isVariableVisible && activeTab === 'chat_debug' && (
<VariablePopover chatType={ChatTypeEnum.test} />
)}
<Box flex={1} />
<MyTooltip label={t('common:core.chat.Restart')}>
<IconButton
className="chat"
size={'smSquare'}
icon={<MyIcon name={'common/clearLight'} w={'14px'} />}
variant={'whiteDanger'}
borderRadius={'md'}
aria-label={'delete'}
onClick={(e) => {
e.stopPropagation();
restartChat();
}}
/>
</MyTooltip>
</Flex>
<Box flex={1}>
{activeTab === 'helper' && (
<HelperBot type={HelperBotTypeEnum.topAgent} metadata={{}} onApply={() => {}} />
)}
{activeTab === 'chat_debug' && <ChatContainer />}
</Box>
</MyBox>
{datasetCiteData && (
<Box flex={'1 0 0'} w={0} maxW={'560px'} {...cardStyles} boxShadow={'3'}>
<ChatQuoteList
rawSearch={datasetCiteData.rawSearch}
metadata={datasetCiteData.metadata}
onClose={() => setCiteModalData(undefined)}
/>
</Box>
)}
</Flex>
);
};
const Render = ({ appForm, setRenderEdit, form2WorkflowFn }: Props) => {
const { chatId } = useChatStore();
const { appDetail } = useContextSelector(AppContext, (v) => v);
const chatRecordProviderParams = useMemo(
() => ({
chatId: chatId,
appId: appDetail._id
}),
[appDetail._id, chatId]
);
return (
<ChatItemContextProvider
showRouteToDatasetDetail={true}
isShowReadRawSource={true}
isResponseDetail={true}
showNodeStatus
>
<ChatRecordContextProvider params={chatRecordProviderParams}>
<ChatTest
appForm={appForm}
setRenderEdit={setRenderEdit}
form2WorkflowFn={form2WorkflowFn}
/>
</ChatRecordContextProvider>
</ChatItemContextProvider>
);
};
export default React.memo(Render);

View File

@ -1,7 +1,7 @@
import React, { useState } from 'react';
import { Box } from '@chakra-ui/react';
import ChatTest from '../FormComponent/ChatTest';
import ChatTest from './ChatTest';
import AppCard from '../FormComponent/AppCard';
import EditForm from './EditForm';
import { type AppFormEditFormType } from '@fastgpt/global/core/app/type';

View File

@ -249,6 +249,7 @@ const EditForm = ({
{t('common:Choose')}
</Button>
<Button
mr={'-5px'}
variant={'transparentBase'}
leftIcon={<MyIcon name={'edit'} w={'14px'} />}
iconSpacing={1}

View File

@ -1,6 +1,6 @@
import React from 'react';
import { useChatBox } from '@/components/core/chat/ChatContainer/ChatBox/hooks/useChatBox';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { Box, IconButton } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';

View File

@ -0,0 +1,55 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import {
HelperBotCompletionsParamsSchema,
type HelperBotCompletionsParamsType
} from '@fastgpt/global/openapi/core/chat/helperBot/api';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoHelperBotChatItem } from '@fastgpt/service/core/chat/HelperBot/chatItemSchema';
import { getWorkflowResponseWrite } from '@fastgpt/service/core/workflow/dispatch/utils';
import { dispatchMap } from '@fastgpt/service/core/chat/HelperBot/dispatch/index';
export type completionsBody = HelperBotCompletionsParamsType;
async function handler(req: ApiRequestProps<completionsBody>, res: ApiResponseType<any>) {
const { chatId, chatItemId, query, files, metadata } = HelperBotCompletionsParamsSchema.parse(
req.body
);
const { teamId, userId } = await authCert({ req, authToken: true });
const histories = await MongoHelperBotChatItem.find({
userId,
chatId
})
.sort({ _id: -1 })
.limit(40)
.lean();
histories.reverse();
const workflowResponseWrite = getWorkflowResponseWrite({
res,
detail: true,
streamResponse: true,
id: chatId,
showNodeStatus: true
});
// 执行不同逻辑
const fn = dispatchMap[metadata.type];
const result = await fn({});
// Save chat
// Push usage
}
export default NextAPI(handler);
export const config = {
api: {
bodyParser: {
sizeLimit: '20mb'
},
responseLimit: '20mb'
}
};

View File

@ -0,0 +1,28 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import type { DeleteHelperBotChatParamsType } from '@fastgpt/global/openapi/core/chat/helperBot/api';
import { authHelperBotChatCrud } from '@/service/support/permission/auth/chat';
import { MongoHelperBotChatItem } from '@fastgpt/service/core/chat/HelperBot/chatItemSchema';
export type deleteRecordQuery = DeleteHelperBotChatParamsType;
export type deleteRecordBody = {};
export type deleteRecordResponse = {};
async function handler(
req: ApiRequestProps<deleteRecordBody, deleteRecordQuery>,
res: ApiResponseType<any>
): Promise<deleteRecordResponse> {
const { type, chatId, chatItemId } = req.query;
const { chat, userId } = await authHelperBotChatCrud({
type,
chatId,
req,
authToken: true
});
await MongoHelperBotChatItem.deleteMany({ userId, chatId, chatItemId });
return {};
}
export default NextAPI(handler);

View File

@ -0,0 +1,36 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import type { GetHelperBotFilePresignParamsType } from '@fastgpt/global/openapi/core/chat/helperBot/api';
import type { CreatePostPresignedUrlResult } from '@fastgpt/service/common/s3/type';
import { authHelperBotChatCrud } from '@/service/support/permission/auth/chat';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getS3HelperBotSource } from '../../../../../../../../packages/service/common/s3/sources/helperbot/index';
export type getFilePresignQuery = {};
export type getFilePresignBody = GetHelperBotFilePresignParamsType;
export type getFilePresignResponse = CreatePostPresignedUrlResult;
async function handler(
req: ApiRequestProps<getFilePresignBody, getFilePresignQuery>,
res: ApiResponseType<any>
): Promise<getFilePresignResponse> {
const { type, chatId, filename } = req.body;
const { userId } = await authCert({
req,
authToken: true
});
const data = await getS3HelperBotSource().createUploadFileURL({
type,
chatId,
userId,
filename
});
return data;
}
export default NextAPI(handler);

View File

@ -0,0 +1,24 @@
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import type { GetHelperBotFilePreviewParamsType } from '@fastgpt/global/openapi/core/chat/helperBot/api';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getS3HelperBotSource } from '@fastgpt/service/common/s3/sources/helperbot';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
async function handler(req: ApiRequestProps<GetHelperBotFilePreviewParamsType>): Promise<string> {
const { key } = req.body;
const { userId } = await authCert({
req,
authToken: true
});
const { type, chatId, userId: uid, filename } = getS3HelperBotSource().parseKey(key);
if (userId !== uid) {
return Promise.reject(ChatErrEnum.unAuthChat);
}
return await getS3HelperBotSource().createGetFileURL({ key, external: true });
}
export default NextAPI(handler);

View File

@ -0,0 +1,43 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import type {
GetHelperBotChatRecordsParamsType,
GetHelperBotChatRecordsResponseType
} from '@fastgpt/global/openapi/core/chat/helperBot/api';
import { authHelperBotChatCrud } from '@/service/support/permission/auth/chat';
import { MongoHelperBotChatItem } from '../../../../../../../../packages/service/core/chat/HelperBot/chatItemSchema';
import { parsePaginationRequest } from '@fastgpt/service/common/api/pagination';
export type getRecordsQuery = GetHelperBotChatRecordsParamsType;
export type getRecordsBody = {};
export type getRecordsResponse = GetHelperBotChatRecordsResponseType;
async function handler(
req: ApiRequestProps<getRecordsBody, getRecordsQuery>,
res: ApiResponseType<any>
): Promise<getRecordsResponse> {
const { type, chatId } = req.query;
const { chat, userId } = await authHelperBotChatCrud({
type,
chatId,
req,
authToken: true
});
const { offset, pageSize } = parsePaginationRequest(req);
const [histories, total] = await Promise.all([
MongoHelperBotChatItem.find({ userId, chatId }).sort({ _id: -1 }).skip(offset).limit(20).lean(),
MongoHelperBotChatItem.countDocuments({ userId, chatId })
]);
histories.reverse();
return {
total,
list: histories
};
}
export default NextAPI(handler);

View File

@ -11,6 +11,9 @@ import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { getFlatAppResponses } from '@/global/core/chat/utils';
import { MongoChatItemResponse } from '@fastgpt/service/core/chat/chatItemResponseSchema';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { HelperBotTypeEnum } from '@fastgpt/global/core/chat/helperBot/type';
import { MongoHelperBotChat } from '@fastgpt/service/core/chat/HelperBot/chatSchema';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
/*
chat的权限
@ -264,3 +267,18 @@ export const authCollectionInChat = async ({
} catch (error) {}
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
};
export const authHelperBotChatCrud = async ({
type,
chatId,
...props
}: AuthModeType & {
type: `${HelperBotTypeEnum}`;
chatId: string;
}) => {
const { userId } = await authCert(props);
const chat = await MongoHelperBotChat.findOne({ type, userId, chatId }).lean();
return { chat, userId };
};

View File

@ -1,5 +1,5 @@
import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
import type { getResDataQuery } from '@/pages/api/core/chat/getResData';
import type {
CloseCustomFeedbackParams,

View File

@ -14,7 +14,8 @@
"**/*.d.ts",
"../../packages/**/*.d.ts",
"../../test/list.test.ts",
"../../packages/service/core/workflow/dispatch/ai/agent/type.ts"
"../../packages/service/core/workflow/dispatch/ai/agent/type.ts",
"../../packages/service/core/workflow/dispatch/type.ts"
],
"exclude": ["**/*.test.ts"]
}