External dataset (#1485)
Some checks are pending
Deploy image by kubeconfig / build-fastgpt-docs-images (push) Waiting to run
Deploy image by kubeconfig / update-docs-image (push) Blocked by required conditions
Deploy image to vercel / deploy-production (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (push) Waiting to run

* fix: revert version

* feat: external collection

* import context

* external ui

* doc

* fix: ts

* clear invalid data

* feat: rename sub name

* fix: node if else edge remove

* fix: init

* api size

* fix: if else node refresh
This commit is contained in:
Archer 2024-05-15 10:19:51 +08:00 committed by GitHub
parent fb04889a31
commit cd876251b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
74 changed files with 1882 additions and 1353 deletions

View File

@ -40,18 +40,11 @@
"",
"type ContextType = {$1};",
"",
"type ContextValueType = {};",
"",
"export const Context = createContext<ContextType>({});",
"",
"export const ContextProvider = ({",
" children,",
" value",
"}: {",
" children: ReactNode;",
" value: ContextValueType;",
"}) => {",
" return <Context.Provider value={value}>{children}</Context.Provider>;",
"export const ContextProvider = ({ children }: { children: ReactNode }) => {",
" const contextValue: ContextType = {};",
" return <Context.Provider value={contextValue}>{children}</Context.Provider>;",
"};",
],
"description": "FastGPT usecontext template"

View File

@ -36,6 +36,8 @@ COPY --from=mainDeps /app/projects/$name/node_modules ./projects/$name/node_modu
RUN [ -z "$proxy" ] || sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories
RUN apk add --no-cache libc6-compat && npm install -g pnpm@8.6.0
ENV NODE_OPTIONS="--max-old-space-size=4096"
RUN pnpm --filter=$name build
# --------- runner -----------

View File

@ -118,4 +118,5 @@ OneAPI 的 API Key 配置错误,需要修改`OPENAI_API_KEY`环境变量,并
### bad_response_status_code bad response status code 503
1. 模型服务不可用
2. ....
2. 模型接口参数异常温度、max token等可能不适配
3. ....

View File

@ -35,4 +35,5 @@ curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
## V4.8.1 更新说明
1. 新增 - 知识库重新选择向量模型重建
2. 修复 - 定时器清理脏数据任务
2. 修复 - 工作流删除节点的动态输入和输出时候,没有正确的删除连接线,导致可能出现逻辑异常。
3. 修复 - 定时器清理脏数据任务

View File

@ -11,14 +11,16 @@ export type DatasetUpdateBody = {
intro?: string;
permission?: DatasetSchemaType['permission'];
agentModel?: LLMModelItemType;
websiteConfig?: DatasetSchemaType['websiteConfig'];
status?: DatasetSchemaType['status'];
websiteConfig?: DatasetSchemaType['websiteConfig'];
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
};
/* ================= collection ===================== */
export type DatasetCollectionChunkMetadataType = {
parentId?: string;
trainingType?: `${TrainingModeEnum}`;
trainingType?: TrainingModeEnum;
chunkSize?: number;
chunkSplitter?: string;
qaPrompt?: string;
@ -78,7 +80,7 @@ export type PostWebsiteSyncParams = {
export type PushDatasetDataProps = {
collectionId: string;
data: PushDatasetDataChunkProps[];
trainingMode: `${TrainingModeEnum}`;
trainingMode: TrainingModeEnum;
prompt?: string;
billId?: string;
};

View File

@ -0,0 +1,6 @@
/* sourceId = prefix-id; id=fileId;link url;externalId */
export enum CollectionSourcePrefixEnum {
local = 'local',
link = 'link',
external = 'external'
}

View File

@ -2,23 +2,29 @@
export enum DatasetTypeEnum {
folder = 'folder',
dataset = 'dataset',
websiteDataset = 'websiteDataset' // depp link
websiteDataset = 'websiteDataset', // depp link
externalFile = 'externalFile'
}
export const DatasetTypeMap = {
[DatasetTypeEnum.folder]: {
icon: 'common/folderFill',
label: 'core.dataset.Folder Dataset',
label: 'Folder Dataset',
collectionLabel: 'common.Folder'
},
[DatasetTypeEnum.dataset]: {
icon: 'core/dataset/commonDataset',
label: 'core.dataset.Common Dataset',
label: 'Common Dataset',
collectionLabel: 'common.File'
},
[DatasetTypeEnum.websiteDataset]: {
icon: 'core/dataset/websiteDataset',
label: 'core.dataset.Website Dataset',
label: 'Website Dataset',
collectionLabel: 'common.Website'
},
[DatasetTypeEnum.externalFile]: {
icon: 'core/dataset/commonDataset',
label: 'External File',
collectionLabel: 'common.File'
}
};
@ -77,7 +83,8 @@ export enum ImportDataSourceEnum {
fileLocal = 'fileLocal',
fileLink = 'fileLink',
fileCustom = 'fileCustom',
csvTable = 'csvTable'
csvTable = 'csvTable',
externalFile = 'externalFile'
}
export enum TrainingModeEnum {

View File

@ -22,13 +22,16 @@ export type DatasetSchemaType = {
vectorModel: string;
agentModel: string;
intro: string;
type: `${DatasetTypeEnum}`;
type: DatasetTypeEnum;
status: `${DatasetStatusEnum}`;
permission: `${PermissionTypeEnum}`;
// metadata
websiteConfig?: {
url: string;
selector: string;
};
externalReadUrl?: string;
};
export type DatasetCollectionSchemaType = {
@ -42,16 +45,18 @@ export type DatasetCollectionSchemaType = {
createTime: Date;
updateTime: Date;
trainingType: `${TrainingModeEnum}`;
trainingType: TrainingModeEnum;
chunkSize: number;
chunkSplitter?: string;
qaPrompt?: string;
fileId?: string;
rawLink?: string;
sourceId?: string; // relate CollectionSourcePrefixEnum
fileId?: string; // local file id
rawLink?: string; // link url
rawTextLength?: number;
hashRawText?: string;
externalSourceUrl?: string; // external import url
metadata?: {
webPageSelector?: string;
relatedImgId?: string; // The id of the associated image collections
@ -93,7 +98,7 @@ export type DatasetTrainingSchemaType = {
billId: string;
expireAt: Date;
lockTime: Date;
mode: `${TrainingModeEnum}`;
mode: TrainingModeEnum;
model: string;
prompt: string;
dataId?: string;
@ -112,13 +117,19 @@ export type DatasetDataWithCollectionType = Omit<DatasetDataSchemaType, 'collect
};
/* ================= dataset ===================== */
export type DatasetSimpleItemType = {
_id: string;
avatar: string;
name: string;
vectorModel: VectorModelItemType;
};
export type DatasetListItemType = {
_id: string;
parentId: string;
avatar: string;
name: string;
intro: string;
type: `${DatasetTypeEnum}`;
type: DatasetTypeEnum;
isOwner: boolean;
canWrite: boolean;
permission: `${PermissionTypeEnum}`;

View File

@ -46,7 +46,7 @@ export function getDefaultIndex(props?: { q?: string; a?: string; dataId?: strin
};
}
export const predictDataLimitLength = (mode: `${TrainingModeEnum}`, data: any[]) => {
export const predictDataLimitLength = (mode: TrainingModeEnum, data: any[]) => {
if (mode === TrainingModeEnum.qa) return data.length * 20;
if (mode === TrainingModeEnum.auto) return data.length * 5;
return data.length;

View File

@ -18,6 +18,7 @@ export const AssignedAnswerModule: FlowNodeTemplateType = {
intro:
'该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。',
version: '481',
isTool: true,
inputs: [
{
key: NodeInputKeyEnum.answerText,

View File

@ -16,11 +16,6 @@ const DatasetCollectionSchema = new Schema({
ref: DatasetColCollectionName,
default: null
},
userId: {
// abandoned
type: Schema.Types.ObjectId,
ref: 'user'
},
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
@ -54,6 +49,7 @@ const DatasetCollectionSchema = new Schema({
default: () => new Date()
},
// chunk filed
trainingType: {
type: String,
enum: Object.keys(TrainingTypeMap),
@ -70,20 +66,21 @@ const DatasetCollectionSchema = new Schema({
type: String
},
sourceId: String,
// local file collection
fileId: {
type: Schema.Types.ObjectId,
ref: 'dataset.files'
},
rawLink: {
type: String
},
// web link collection
rawLink: String,
rawTextLength: {
type: Number
},
hashRawText: {
type: String
},
// external collection
// metadata
rawTextLength: Number,
hashRawText: String,
externalSourceUrl: String, // external import url
metadata: {
type: Object,
default: {}

View File

@ -89,7 +89,8 @@ const DatasetSchema = new Schema({
default: 'body'
}
}
}
},
externalReadUrl: String
});
try {

View File

@ -14,7 +14,7 @@ import {
} from '@fastgpt/global/support/wallet/sub/constants';
import type { TeamSubSchema } from '@fastgpt/global/support/wallet/sub/type';
export const subCollectionName = 'team.subscriptions';
export const subCollectionName = 'team_subscriptions';
const SubSchema = new Schema({
teamId: {

View File

@ -1,11 +1,11 @@
import { DragHandleIcon } from '@chakra-ui/icons';
import { Box } from '@chakra-ui/react';
import { Box, BoxProps } from '@chakra-ui/react';
import React from 'react';
import { DraggableProvided } from 'react-beautiful-dnd';
const DragIcon = ({ provided }: { provided: DraggableProvided }) => {
const DragIcon = ({ provided, ...props }: { provided: DraggableProvided } & BoxProps) => {
return (
<Box {...provided.dragHandleProps}>
<Box {...provided.dragHandleProps} {...props}>
<DragHandleIcon color={'myGray.500'} _hover={{ color: 'primary.600' }} />
</Box>
);

View File

@ -1,12 +1,6 @@
{
"parser": "@typescript-eslint/parser", // 使 TypeScript
"plugins": ["@typescript-eslint"], // TypeScript
"extends": "next/core-web-vitals",
"rules": {
"react-hooks/rules-of-hooks": 0,
"@typescript-eslint/consistent-type-imports": "warn" // "error"
"react-hooks/rules-of-hooks": 0
}
}

View File

@ -1,4 +1,5 @@
{
"Add new": "Add new",
"App": "App",
"Export": "Export",
"Folder": "Folder",
@ -509,18 +510,14 @@
"Choose Dataset": "Associate dataset",
"Chunk amount": "Number of chunks",
"Collection": "Dataset",
"Common Dataset": "Common dataset",
"Common Dataset Desc": "Can be built by importing files, web links, or manual entry",
"Create dataset": "Create a dataset",
"Dataset": "Dataset",
"Dataset ID": "Dataset ID",
"Dataset Type": "Dataset type",
"Delete Confirm": "Confirm to delete this dataset? Data cannot be recovered after deletion, please confirm!",
"Delete Website Tips": "Confirm to delete this site?",
"Empty Dataset": "",
"Empty Dataset Tips": "No datasets yet, go create one!",
"File collection": "File dataset",
"Folder Dataset": "Folder",
"Folder placeholder": "This is a directory",
"Go Dataset": "Go to dataset",
"Intro Placeholder": "This dataset has no introduction~",
@ -540,8 +537,6 @@
"Table collection": "Table dataset",
"Text collection": "Text dataset",
"Total chunks": "Total chunks: {{total}}",
"Website Dataset": "Web site synchronization",
"Website Dataset Desc": "Web site synchronization allows you to use a web page link to build a dataset",
"collection": {
"Click top config website": "Click to configure website",
"Collection name": "Dataset name",

View File

@ -1,6 +1,17 @@
{
"Common Dataset": "Common dataset",
"Common Dataset Desc": "Can be built by importing files, web links, or manual entry",
"Confirm to rebuild embedding tip": "Are you sure to switch the knowledge base index? Switching index is a very heavy operation that requires re-indexing all the data in your knowledge base, which may take a long time. Please ensure that the remaining points in your account are sufficient.",
"External file": "External file",
"External file Dataset Desc": "You can import files from an external file library to build a knowledge base. Files are not stored twice",
"External id": "File id",
"External read url": "External read url",
"External url": "File read url",
"Folder Dataset": "Folder",
"Rebuild embedding start tip": "The task of switching index models has begun",
"Rebuilding index count": "Rebuilding count: {{count}}",
"The knowledge base has indexes that are being trained or being rebuilt": "The knowledge base has indexes that are being trained or being rebuilt"
"The knowledge base has indexes that are being trained or being rebuilt": "The knowledge base has indexes that are being trained or being rebuilt",
"Website Dataset": "Web site",
"Website Dataset Desc": "Web site synchronization allows you to use a web page link to build a dataset",
"filename": "filename"
}

View File

@ -1,4 +1,5 @@
{
"Add new": "新增",
"App": "应用",
"Export": "导出",
"Folder": "文件夹",
@ -509,8 +510,6 @@
"Choose Dataset": "关联知识库",
"Chunk amount": "分段数",
"Collection": "数据集",
"Common Dataset": "通用知识库",
"Common Dataset Desc": "可通过导入文件、网页链接或手动录入形式构建知识库",
"Create dataset": "创建一个知识库",
"Dataset": "知识库",
"Dataset ID": "知识库 ID",
@ -520,7 +519,6 @@
"Empty Dataset": "",
"Empty Dataset Tips": "还没有知识库,快去创建一个吧!",
"File collection": "文件数据集",
"Folder Dataset": "文件夹",
"Folder placeholder": "这是一个目录",
"Go Dataset": "前往知识库",
"Intro Placeholder": "这个知识库还没有介绍~",
@ -540,8 +538,6 @@
"Table collection": "表格数据集",
"Text collection": "文本数据集",
"Total chunks": "总分段: {{total}}",
"Website Dataset": "Web 站点同步",
"Website Dataset Desc": "Web 站点同步允许你直接使用一个网页链接构建知识库",
"collection": {
"Click top config website": "点击配置网站",
"Collection name": "数据集名称",

View File

@ -1,6 +1,17 @@
{
"Common Dataset": "通用知识库",
"Common Dataset Desc": "可通过导入文件、网页链接或手动录入形式构建知识库",
"Confirm to rebuild embedding tip": "确认为知识库切换索引?\n切换索引是一个非常重量的操作需要对您知识库内所有数据进行重新索引时间可能较长请确保账号内剩余积分充足。",
"External File": "外部文件库",
"External file Dataset Desc": "可以从外部文件库导入文件构建知识库,文件不会进行二次存储",
"External id": "文件阅读ID",
"External read url": "外部预览地址",
"External url": "文件访问URL",
"Folder Dataset": "文件夹",
"Rebuild embedding start tip": "切换索引模型任务已开始",
"Rebuilding index count": "重建中索引数量: {{count}}",
"The knowledge base has indexes that are being trained or being rebuilt": "知识库有训练中或正在重建的索引"
"The knowledge base has indexes that are being trained or being rebuilt": "知识库有训练中或正在重建的索引",
"Website Dataset": "Web 站点同步",
"Website Dataset Desc": "Web 站点同步允许你直接使用一个网页链接构建知识库",
"filename": "文件名"
}

View File

@ -0,0 +1,14 @@
import Head from 'next/head';
import React from 'react';
const NextHead = ({ title, icon, desc }: { title?: string; icon?: string; desc?: string }) => {
return (
<Head>
<title>{title}</title>
{desc && <meta name="description" content={desc} />}
{icon && <link rel="icon" href={icon} />}
</Head>
);
};
export default NextHead;

View File

@ -1,12 +1,12 @@
import { Box, Flex, FlexProps } from '@chakra-ui/react';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import React from 'react';
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import { useI18n } from '@/web/context/I18n';
const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & FlexProps) => {
const { t } = useTranslation();
const DatasetTypeTag = ({ type, ...props }: { type: DatasetTypeEnum } & FlexProps) => {
const { datasetT } = useI18n();
const item = DatasetTypeMap[type] || DatasetTypeMap['dataset'];
@ -22,7 +22,8 @@ const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & Fle
{...props}
>
<MyIcon name={item.icon as any} w={'16px'} mr={2} color={'myGray.400'} />
<Box>{t(item.label)}</Box>
{/* @ts-ignore */}
<Box>{datasetT(item.label)}</Box>
</Flex>
);
};

View File

@ -50,16 +50,11 @@ const ListItem = ({
}) => {
const { t } = useTranslation();
const { getZoom } = useReactFlow();
const onDelEdge = useContextSelector(WorkflowContext, (v) => v.onDelEdge);
const handleId = getHandleId(nodeId, 'source', getElseIFLabel(conditionIndex));
return (
<Box
ref={provided.innerRef}
{...provided.draggableProps}
style={{
...provided.draggableProps.style,
opacity: snapshot.isDragging ? 0.8 : 1
}}
>
const Render = useMemo(() => {
return (
<Flex
alignItems={'center'}
position={'relative'}
@ -68,7 +63,10 @@ const ListItem = ({
>
<Container w={snapshot.isDragging ? '' : 'full'} className="nodrag">
<Flex mb={4} alignItems={'center'}>
{ifElseList.length > 1 && <DragIcon provided={provided} />}
<DragIcon
visibility={ifElseList.length > 1 ? 'visible' : 'hidden'}
provided={provided}
/>
<Box color={'black'} fontSize={'lg'} ml={2}>
{getElseIFLabel(conditionIndex)}
</Box>
@ -109,6 +107,10 @@ const ListItem = ({
color={'myGray.400'}
onClick={() => {
onUpdateIfElseList(ifElseList.filter((_, index) => index !== conditionIndex));
onDelEdge({
nodeId,
sourceHandle: handleId
});
}}
/>
)}
@ -185,21 +187,21 @@ const ListItem = ({
onChange={(e) => {
onUpdateIfElseList(
ifElseList.map((ifElse, index) => {
if (index === conditionIndex) {
return {
...ifElse,
list: ifElse.list.map((item, index) => {
if (index === i) {
return {
...item,
value: e
};
}
return item;
})
};
}
return ifElse;
return {
...ifElse,
list:
index === conditionIndex
? ifElse.list.map((item, index) => {
if (index === i) {
return {
...item,
value: e
};
}
return item;
})
: ifElse.list
};
})
);
}}
@ -263,12 +265,38 @@ const ListItem = ({
{!snapshot.isDragging && (
<SourceHandle
nodeId={nodeId}
handleId={getHandleId(nodeId, 'source', getElseIFLabel(conditionIndex))}
handleId={handleId}
position={Position.Right}
translate={[18, 0]}
/>
)}
</Flex>
);
}, [
conditionIndex,
conditionItem.condition,
conditionItem.list,
getZoom,
handleId,
ifElseList,
nodeId,
onDelEdge,
onUpdateIfElseList,
provided,
snapshot.isDragging,
t
]);
return (
<Box
ref={provided.innerRef}
{...provided.draggableProps}
style={{
...provided.draggableProps.style,
opacity: snapshot.isDragging ? 0.8 : 1
}}
>
{Render}
</Box>
);
};
@ -387,35 +415,39 @@ const ConditionValueInput = ({
return output.valueType;
}, [nodeList, variable]);
if (valueType === WorkflowIOValueTypeEnum.boolean) {
return (
<MySelect
list={[
{ label: 'True', value: 'true' },
{ label: 'False', value: 'false' }
]}
onchange={onChange}
value={value}
placeholder={'选择值'}
isDisabled={
condition === VariableConditionEnum.isEmpty ||
condition === VariableConditionEnum.isNotEmpty
}
/>
);
} else {
return (
<MyInput
value={value}
placeholder={'输入值'}
w={'100%'}
bg={'white'}
isDisabled={
condition === VariableConditionEnum.isEmpty ||
condition === VariableConditionEnum.isNotEmpty
}
onChange={(e) => onChange(e.target.value)}
/>
);
}
const Render = useMemo(() => {
if (valueType === WorkflowIOValueTypeEnum.boolean) {
return (
<MySelect
list={[
{ label: 'True', value: 'true' },
{ label: 'False', value: 'false' }
]}
onchange={onChange}
value={value}
placeholder={'选择值'}
isDisabled={
condition === VariableConditionEnum.isEmpty ||
condition === VariableConditionEnum.isNotEmpty
}
/>
);
} else {
return (
<MyInput
value={value}
placeholder={'输入值'}
w={'100%'}
bg={'white'}
isDisabled={
condition === VariableConditionEnum.isEmpty ||
condition === VariableConditionEnum.isNotEmpty
}
onChange={(e) => onChange(e.target.value)}
/>
);
}
}, [condition, onChange, value, valueType]);
return Render;
};

View File

@ -1,4 +1,4 @@
import React, { useCallback, useMemo, useState } from 'react';
import React, { useCallback, useMemo } from 'react';
import NodeCard from '../render/NodeCard';
import { useTranslation } from 'next-i18next';
import { Box, Button, Flex } from '@chakra-ui/react';
@ -9,7 +9,7 @@ import { IfElseListItemType } from '@fastgpt/global/core/workflow/template/syste
import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '../../../context';
import Container from '../../components/Container';
import DndDrag, { Draggable, DropResult } from '@fastgpt/web/components/common/DndDrag/index';
import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag/index';
import { SourceHandle } from '../render/Handle';
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
import ListItem from './ListItem';
@ -19,6 +19,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
const { nodeId, inputs = [] } = data;
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
const elseHandleId = getHandleId(nodeId, 'source', IfElseResultEnum.ELSE);
const ifElseList = useMemo(
() =>
@ -49,7 +50,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
<NodeCard selected={selected} maxW={'1000px'} {...data}>
<Box px={4} cursor={'default'}>
<DndDrag<IfElseListItemType>
onDragEndCb={(list) => onUpdateIfElseList(list)}
onDragEndCb={(list: IfElseListItemType[]) => onUpdateIfElseList(list)}
dataList={ifElseList}
renderClone={(provided, snapshot, rubric) => (
<ListItem
@ -95,7 +96,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
</Box>
<SourceHandle
nodeId={nodeId}
handleId={getHandleId(nodeId, 'source', IfElseResultEnum.ELSE)}
handleId={elseHandleId}
position={Position.Right}
translate={[26, 0]}
/>

View File

@ -1,4 +1,4 @@
import React, { useState } from 'react';
import React, { useCallback, useState } from 'react';
import { getPublishList, postRevertVersion } from '@/web/core/app/versionApi';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import CustomRightDrawer from '@fastgpt/web/components/common/MyDrawer/CustomRightDrawer';
@ -14,6 +14,8 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
const PublishHistoriesSlider = () => {
const { t } = useTranslation();
@ -45,29 +47,29 @@ const PublishHistoriesSlider = () => {
setIsShowVersionHistories(false);
});
const onPreview = useMemoizedFn((data: AppVersionSchemaType) => {
const onPreview = useCallback((data: AppVersionSchemaType) => {
setSelectedHistoryId(data._id);
initData({
nodes: data.nodes,
edges: data.edges
});
});
const onCloseSlider = useMemoizedFn(() => {
setSelectedHistoryId(undefined);
initData({
nodes: appDetail.modules,
edges: appDetail.edges
});
onClose();
});
}, []);
const onCloseSlider = useCallback(
(data: { nodes: StoreNodeItemType[]; edges: StoreEdgeItemType[] }) => {
setSelectedHistoryId(undefined);
initData(data);
onClose();
},
[appDetail]
);
const { mutate: onRevert, isLoading: isReverting } = useRequest({
mutationFn: async (data: AppVersionSchemaType) => {
if (!appId) return;
await postRevertVersion(appId, {
versionId: data._id,
editNodes: appDetail.modules,
editNodes: appDetail.modules, // old workflow
editEdges: appDetail.edges
});
@ -77,7 +79,7 @@ const PublishHistoriesSlider = () => {
edges: data.edges
});
onCloseSlider();
onCloseSlider(data);
}
});
@ -86,7 +88,12 @@ const PublishHistoriesSlider = () => {
return (
<>
<CustomRightDrawer
onClose={onCloseSlider}
onClose={() =>
onCloseSlider({
nodes: appDetail.modules,
edges: appDetail.edges
})
}
iconSrc="core/workflow/versionHistories"
title={t('core.workflow.publish.histories')}
maxW={'300px'}

View File

@ -430,8 +430,8 @@ const WorkflowContextProvider = ({
const initData = useMemoizedFn(
async (e: { nodes: StoreNodeItemType[]; edges: StoreEdgeItemType[] }) => {
setNodes(e.nodes?.map((item) => storeNode2FlowNode({ item })));
setEdges(e.edges?.map((item) => storeEdgesRenderEdge({ edge: item })));
setNodes(e.nodes?.map((item) => storeNode2FlowNode({ item })) || []);
setEdges(e.edges?.map((item) => storeEdgesRenderEdge({ edge: item })) || []);
}
);

View File

@ -14,7 +14,7 @@ import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
/* ================= dataset ===================== */
export type CreateDatasetParams = {
parentId?: string;
type: `${DatasetTypeEnum}`;
type: DatasetTypeEnum;
name: string;
intro: string;
avatar: string;
@ -76,7 +76,7 @@ export type SearchTestResponse = {
/* =========== training =========== */
export type PostPreviewFilesChunksProps = {
type: `${ImportDataSourceEnum}`;
type: ImportDataSourceEnum;
sourceId: string;
chunkSize: number;
overlapRatio: number;

View File

@ -10,28 +10,22 @@ import I18nContextProvider from '@/web/context/I18n';
import { useInitApp } from '@/web/context/useInitApp';
import '@/web/styles/reset.scss';
import NextHead from '@/components/common/NextHead';
function App({ Component, pageProps }: AppProps) {
const { feConfigs, scripts, title } = useInitApp();
return (
<>
<Head>
<title>{title}</title>
<meta
name="description"
content={
feConfigs?.systemDescription ||
process.env.SYSTEM_DESCRIPTION ||
`${title} 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!`
}
/>
<meta
name="viewport"
content="width=device-width,initial-scale=1.0,maximum-scale=1.0,minimum-scale=1.0,user-scalable=no, viewport-fit=cover"
/>
<link rel="icon" href={feConfigs.favicon || process.env.SYSTEM_FAVICON} />
</Head>
<NextHead
title={title}
desc={
feConfigs?.systemDescription ||
process.env.SYSTEM_DESCRIPTION ||
`${title} 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!`
}
icon={feConfigs?.favicon || process.env.SYSTEM_FAVICON}
/>
{scripts?.map((item, i) => <Script key={i} strategy="lazyOnload" {...item}></Script>)}
<QueryClientContext>

View File

@ -58,17 +58,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };
(async () => {
try {
console.log('执行脏数据清理任务');
// 360天 ~ 2小时前
const end = addHours(new Date(), -2);
const start = addHours(new Date(), -360 * 24);
await checkInvalidDatasetFiles(start, end);
await checkInvalidImg(start, end);
await checkInvalidDatasetData(start, end);
await checkInvalidVector(start, end);
const endTime = addHours(new Date(), start);
const startTime = addHours(new Date(), end);
await checkInvalidDatasetFiles(startTime, endTime);
await checkInvalidImg(startTime, endTime);
await checkInvalidDatasetData(startTime, endTime);
await checkInvalidVector(startTime, endTime);
console.log('执行脏数据清理任务完毕');
} catch (error) {
console.log('执行脏数据清理任务出错了');

View File

@ -141,11 +141,18 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
const collections = await connectionMongo.connection.db
.listCollections({ name: 'team.members' })
.toArray();
if (collections.length > 0) {
const sourceCol = connectionMongo.connection.db.collection('team.members');
const targetCol = connectionMongo.connection.db.collection('team_members');
await sourceCol.rename('team_members', { dropTarget: true });
console.log('success rename team.members -> team_members');
if ((await targetCol.countDocuments()) > 1) {
// 除了root
console.log('team_members 中有数据,无法自动将 buffer.tts 迁移到 team_members请手动操作');
} else {
await sourceCol.rename('team_members', { dropTarget: true });
console.log('success rename team.members -> team_members');
}
}
} catch (error) {
console.log('error rename team.members -> team_members', error);
@ -170,6 +177,27 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
console.log('error rename team.tags -> team_tags', error);
}
try {
const collections = await connectionMongo.connection.db
.listCollections({ name: 'team.subscriptions' })
.toArray();
if (collections.length > 0) {
const sourceCol = connectionMongo.connection.db.collection('team.subscriptions');
const targetCol = connectionMongo.connection.db.collection('team_subscriptions');
if ((await targetCol.countDocuments()) > 0) {
console.log(
'team_subscriptions 中有数据,无法自动将 team.subscriptions 迁移到 team_subscriptions请手动操作'
);
} else {
await sourceCol.rename('team_subscriptions', { dropTarget: true });
console.log('success rename team.subscriptions -> team_subscriptions');
}
}
} catch (error) {
console.log('error rename team.subscriptions -> team_subscriptions', error);
}
jsonRes(res, {
message: 'success'
});

View File

@ -28,7 +28,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export const config = {
api: {
sizeLimit: '10mb',
bodyParser: {
sizeLimit: '16mb'
}

View File

@ -1,45 +1,31 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { NextAPI } from '@/service/middle/entry';
/* get all dataset by teamId or tmbId */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
// 凭证校验
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
async function handler(
req: NextApiRequest,
res: NextApiResponse<any>
): Promise<DatasetSimpleItemType[]> {
// 凭证校验
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, role }),
type: { $ne: DatasetTypeEnum.folder }
}).lean();
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, role }),
type: { $ne: DatasetTypeEnum.folder }
}).lean();
const data = datasets.map((item) => ({
_id: item._id,
parentId: item.parentId,
avatar: item.avatar,
name: item.name,
intro: item.intro,
type: item.type,
permission: item.permission,
vectorModel: getVectorModel(item.vectorModel),
canWrite: String(item.tmbId) === tmbId,
isOwner: teamOwner || String(item.tmbId) === tmbId
}));
jsonRes<DatasetListItemType[]>(res, {
data
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return datasets.map((item) => ({
_id: item._id,
avatar: item.avatar,
name: item.name,
vectorModel: getVectorModel(item.vectorModel)
}));
}
export default NextAPI(handler);

View File

@ -9,7 +9,7 @@ import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { NextAPI } from '@/service/middle/entry';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
// 凭证校验
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
req,

View File

@ -8,8 +8,18 @@ import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id, parentId, name, avatar, intro, permission, agentModel, websiteConfig, status } =
req.body as DatasetUpdateBody;
const {
id,
parentId,
name,
avatar,
intro,
permission,
agentModel,
websiteConfig,
externalReadUrl,
status
} = req.body as DatasetUpdateBody;
if (!id) {
throw new Error('缺少参数');
@ -33,7 +43,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
...(agentModel && { agentModel: agentModel.model }),
...(websiteConfig && { websiteConfig }),
...(status && { status }),
...(intro && { intro })
...(intro && { intro }),
...(externalReadUrl && { externalReadUrl })
}
);

View File

@ -9,7 +9,7 @@ import { PluginListItemType } from '@fastgpt/global/core/plugin/controller';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
const { teamId } = await authCert({ req, authToken: true });

View File

@ -82,7 +82,7 @@ export default NextAPI(handler);
export const config = {
api: {
bodyParser: {
sizeLimit: '10mb'
sizeLimit: '20mb'
},
responseLimit: '20mb'
}

View File

@ -43,6 +43,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
export const config = {
api: {
bodyParser: {
sizeLimit: '16mb'
},
responseLimit: '16mb'
}
};

View File

@ -523,6 +523,9 @@ const authHeaderRequest = async ({
export const config = {
api: {
bodyParser: {
sizeLimit: '20mb'
},
responseLimit: '20mb'
}
};

View File

@ -269,139 +269,141 @@ const OutLink = ({
}, []);
return (
<PageContainer
{...(isEmbed
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
: { p: [0, 5] })}
>
<>
<Head>
<title>{appName || chatData.app?.name}</title>
<meta name="description" content={appIntro} />
<link rel="icon" href={appAvatar || chatData.app?.avatar} />
</Head>
<MyBox
isLoading={isFetching}
h={'100%'}
display={'flex'}
flexDirection={['column', 'row']}
bg={'white'}
<PageContainer
{...(isEmbed
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
: { p: [0, 5] })}
>
{showHistory === '1'
? ((children: React.ReactNode) => {
return isPc ? (
<SideBar>{children}</SideBar>
) : (
<Drawer
isOpen={isOpenSlider}
placement="left"
autoFocus={false}
size={'xs'}
onClose={onCloseSlider}
>
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
{children}
</DrawerContent>
</Drawer>
);
})(
<ChatHistorySlider
appName={chatData.app.name}
appAvatar={chatData.app.avatar}
confirmClearText={t('core.chat.Confirm to clear share chat history')}
activeChatId={chatId}
history={histories.map((item) => ({
id: item.chatId,
title: item.title,
customTitle: item.customTitle,
top: item.top
}))}
onClose={onCloseSlider}
onChangeChat={(chatId) => {
router.replace({
query: {
...router.query,
chatId: chatId || ''
}
});
if (!isPc) {
onCloseSlider();
}
}}
onDelHistory={({ chatId }) =>
delOneHistory({ appId: chatData.appId, chatId, shareId, outLinkUid })
}
onClearHistory={() => {
clearHistories({ shareId, outLinkUid });
router.replace({
query: {
...router.query,
chatId: ''
}
});
}}
onSetHistoryTop={(e) => {
updateHistory({
...e,
appId: chatData.appId,
shareId,
outLinkUid
});
}}
onSetCustomTitle={async (e) => {
updateHistory({
appId: chatData.appId,
chatId: e.chatId,
title: e.title,
customTitle: e.title,
shareId,
outLinkUid
});
}}
/>
)
: null}
{/* chat container */}
<Flex
position={'relative'}
h={[0, '100%']}
w={['100%', 0]}
flex={'1 0 0'}
flexDirection={'column'}
<MyBox
isLoading={isFetching}
h={'100%'}
display={'flex'}
flexDirection={['column', 'row']}
bg={'white'}
>
{/* header */}
<ChatHeader
appAvatar={chatData.app.avatar}
appName={chatData.app.name}
history={chatData.history}
showHistory={showHistory === '1'}
onOpenSlider={onOpenSlider}
/>
{/* chat box */}
<Box flex={1}>
<ChatBox
active={!!chatData.app.name}
ref={ChatBoxRef}
{showHistory === '1'
? ((children: React.ReactNode) => {
return isPc ? (
<SideBar>{children}</SideBar>
) : (
<Drawer
isOpen={isOpenSlider}
placement="left"
autoFocus={false}
size={'xs'}
onClose={onCloseSlider}
>
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
{children}
</DrawerContent>
</Drawer>
);
})(
<ChatHistorySlider
appName={chatData.app.name}
appAvatar={chatData.app.avatar}
confirmClearText={t('core.chat.Confirm to clear share chat history')}
activeChatId={chatId}
history={histories.map((item) => ({
id: item.chatId,
title: item.title,
customTitle: item.customTitle,
top: item.top
}))}
onClose={onCloseSlider}
onChangeChat={(chatId) => {
router.replace({
query: {
...router.query,
chatId: chatId || ''
}
});
if (!isPc) {
onCloseSlider();
}
}}
onDelHistory={({ chatId }) =>
delOneHistory({ appId: chatData.appId, chatId, shareId, outLinkUid })
}
onClearHistory={() => {
clearHistories({ shareId, outLinkUid });
router.replace({
query: {
...router.query,
chatId: ''
}
});
}}
onSetHistoryTop={(e) => {
updateHistory({
...e,
appId: chatData.appId,
shareId,
outLinkUid
});
}}
onSetCustomTitle={async (e) => {
updateHistory({
appId: chatData.appId,
chatId: e.chatId,
title: e.title,
customTitle: e.title,
shareId,
outLinkUid
});
}}
/>
)
: null}
{/* chat container */}
<Flex
position={'relative'}
h={[0, '100%']}
w={['100%', 0]}
flex={'1 0 0'}
flexDirection={'column'}
>
{/* header */}
<ChatHeader
appAvatar={chatData.app.avatar}
userAvatar={chatData.userAvatar}
userGuideModule={chatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onUpdateVariable={(e) => {}}
onStartChat={startChat}
onDelMessage={(e) =>
delOneHistoryItem({ ...e, appId: chatData.appId, chatId, shareId, outLinkUid })
}
appId={chatData.appId}
chatId={chatId}
shareId={shareId}
outLinkUid={outLinkUid}
appName={chatData.app.name}
history={chatData.history}
showHistory={showHistory === '1'}
onOpenSlider={onOpenSlider}
/>
</Box>
</Flex>
</MyBox>
</PageContainer>
{/* chat box */}
<Box flex={1}>
<ChatBox
active={!!chatData.app.name}
ref={ChatBoxRef}
appAvatar={chatData.app.avatar}
userAvatar={chatData.userAvatar}
userGuideModule={chatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onUpdateVariable={(e) => {}}
onStartChat={startChat}
onDelMessage={(e) =>
delOneHistoryItem({ ...e, appId: chatData.appId, chatId, shareId, outLinkUid })
}
appId={chatData.appId}
chatId={chatId}
shareId={shareId}
outLinkUid={outLinkUid}
/>
</Box>
</Flex>
</MyBox>
</PageContainer>
</>
);
};

View File

@ -0,0 +1,158 @@
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { Dispatch, ReactNode, SetStateAction, useEffect, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { createContext, useContextSelector } from 'use-context-selector';
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
import { useDisclosure } from '@chakra-ui/react';
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
import { postCreateTrainingUsage } from '@/web/support/wallet/usage/api';
import { getDatasetCollections, postWebsiteSync } from '@/web/core/dataset/api';
import dynamic from 'next/dynamic';
import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { DatasetCollectionsListItemType } from '@/global/core/dataset/type';
import { useRouter } from 'next/router';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
const WebSiteConfigModal = dynamic(() => import('./WebsiteConfig'));
type CollectionPageContextType = {
openWebSyncConfirm: () => void;
onOpenWebsiteModal: () => void;
collections: DatasetCollectionsListItemType[];
Pagination: () => JSX.Element;
total: number;
getData: (e: number) => void;
isGetting: boolean;
pageNum: number;
pageSize: number;
searchText: string;
setSearchText: Dispatch<SetStateAction<string>>;
};
export const CollectionPageContext = createContext<CollectionPageContextType>({
openWebSyncConfirm: function (): () => void {
throw new Error('Function not implemented.');
},
onOpenWebsiteModal: function (): void {
throw new Error('Function not implemented.');
},
collections: [],
Pagination: function (): JSX.Element {
throw new Error('Function not implemented.');
},
total: 0,
getData: function (e: number): void {
throw new Error('Function not implemented.');
},
isGetting: false,
pageNum: 0,
pageSize: 0,
searchText: '',
setSearchText: function (value: SetStateAction<string>): void {
throw new Error('Function not implemented.');
}
});
const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => {
const { t } = useTranslation();
const router = useRouter();
const { parentId = '' } = router.query as { parentId: string };
const { datasetDetail, datasetId, updateDataset } = useContextSelector(
DatasetPageContext,
(v) => v
);
// website config
const { openConfirm: openWebSyncConfirm, ConfirmModal: ConfirmWebSyncModal } = useConfirm({
content: t('core.dataset.collection.Start Sync Tip')
});
const {
isOpen: isOpenWebsiteModal,
onOpen: onOpenWebsiteModal,
onClose: onCloseWebsiteModal
} = useDisclosure();
const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({
mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => {
onCloseWebsiteModal();
await checkTeamWebSyncLimit();
const billId = await postCreateTrainingUsage({
name: t('core.dataset.training.Website Sync'),
datasetId: datasetId
});
await postWebsiteSync({ datasetId: datasetId, billId });
await updateDataset({
id: datasetId,
websiteConfig,
status: DatasetStatusEnum.syncing
});
return;
},
errorToast: t('common.Update Failed')
});
// collection list
const [searchText, setSearchText] = useState('');
const {
data: collections,
Pagination,
total,
getData,
isLoading: isGetting,
pageNum,
pageSize
} = usePagination<DatasetCollectionsListItemType>({
api: getDatasetCollections,
pageSize: 20,
params: {
datasetId,
parentId,
searchText
},
defaultRequest: false
});
useEffect(() => {
getData(1);
}, [parentId]);
const contextValue: CollectionPageContextType = {
openWebSyncConfirm: openWebSyncConfirm(onUpdateDatasetWebsiteConfig),
onOpenWebsiteModal,
searchText,
setSearchText,
collections,
Pagination,
total,
getData,
isGetting,
pageNum,
pageSize
};
return (
<CollectionPageContext.Provider value={contextValue}>
{children}
{datasetDetail.type === DatasetTypeEnum.websiteDataset && (
<>
{isOpenWebsiteModal && (
<WebSiteConfigModal
onClose={onCloseWebsiteModal}
onSuccess={onUpdateDatasetWebsiteConfig}
defaultValue={{
url: datasetDetail?.websiteConfig?.url,
selector: datasetDetail?.websiteConfig?.selector
}}
/>
)}
<ConfirmWebSyncModal />
</>
)}
</CollectionPageContext.Provider>
);
};
export default CollectionPageContextProvider;

View File

@ -0,0 +1,55 @@
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import React from 'react';
import { useTranslation } from 'next-i18next';
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { Box, Flex } from '@chakra-ui/react';
import { useContextSelector } from 'use-context-selector';
import { CollectionPageContext } from './Context';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
const EmptyCollectionTip = () => {
const { t } = useTranslation();
const onOpenWebsiteModal = useContextSelector(CollectionPageContext, (v) => v.onOpenWebsiteModal);
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
return (
<>
{(datasetDetail.type === DatasetTypeEnum.dataset ||
datasetDetail.type === DatasetTypeEnum.externalFile) && (
<EmptyTip text={t('core.dataset.collection.Empty Tip')} />
)}
{datasetDetail.type === DatasetTypeEnum.websiteDataset && (
<EmptyTip
text={
<Flex>
{datasetDetail.status === DatasetStatusEnum.syncing && (
<>{t('core.dataset.status.syncing')}</>
)}
{datasetDetail.status === DatasetStatusEnum.active && (
<>
{!datasetDetail?.websiteConfig?.url ? (
<>
{t('core.dataset.collection.Website Empty Tip')}
{', '}
<Box
textDecoration={'underline'}
cursor={'pointer'}
onClick={onOpenWebsiteModal}
>
{t('core.dataset.collection.Click top config website')}
</Box>
</>
) : (
<>{t('core.dataset.website.UnValid Website Tip')}</>
)}
</>
)}
</Flex>
}
/>
)}
</>
);
};
export default EmptyCollectionTip;

View File

@ -0,0 +1,399 @@
import React, { useCallback, useRef } from 'react';
import { Box, Flex, MenuButton, Button, Link, useTheme, useDisclosure } from '@chakra-ui/react';
import {
getDatasetCollectionPathById,
postDatasetCollection,
putDatasetCollectionById
} from '@/web/core/dataset/api';
import { useQuery } from '@tanstack/react-query';
import { debounce } from 'lodash';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRouter } from 'next/router';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum,
DatasetTypeEnum,
DatasetTypeMap,
DatasetStatusEnum
} from '@fastgpt/global/core/dataset/constants';
import EditFolderModal, { useEditFolder } from '../../../component/EditFolderModal';
import { TabEnum } from '../../index';
import ParentPath from '@/components/common/ParentPaths';
import dynamic from 'next/dynamic';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useContextSelector } from 'use-context-selector';
import { CollectionPageContext } from './Context';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
const FileSourceSelector = dynamic(() => import('../Import/components/FileSourceSelector'));
const Header = ({}: {}) => {
const { t } = useTranslation();
const theme = useTheme();
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const router = useRouter();
const { parentId = '' } = router.query as { parentId: string; datasetId: string };
const { isPc } = useSystemStore();
const lastSearch = useRef('');
const { searchText, setSearchText, total, getData, pageNum, onOpenWebsiteModal } =
useContextSelector(CollectionPageContext, (v) => v);
// change search
const debounceRefetch = useCallback(
debounce(() => {
getData(1);
lastSearch.current = searchText;
}, 300),
[]
);
const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () =>
getDatasetCollectionPathById(parentId)
);
const { editFolderData, setEditFolderData } = useEditFolder();
const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } =
useEditTitle({
title: t('dataset.Create manual collection'),
tip: t('dataset.Manual collection Tip'),
canEmpty: false
});
const {
isOpen: isOpenFileSourceSelector,
onOpen: onOpenFileSourceSelector,
onClose: onCloseFileSourceSelector
} = useDisclosure();
const { mutate: onCreateCollection } = useRequest({
mutationFn: async ({
name,
type,
callback,
...props
}: {
name: string;
type: `${DatasetCollectionTypeEnum}`;
callback?: (id: string) => void;
trainingType?: TrainingModeEnum;
rawLink?: string;
chunkSize?: number;
}) => {
setLoading(true);
const id = await postDatasetCollection({
parentId,
datasetId: datasetDetail._id,
name,
type,
...props
});
callback?.(id);
return id;
},
onSuccess() {
getData(pageNum);
},
onSettled() {
setLoading(false);
},
successToast: t('common.Create Success'),
errorToast: t('common.Create Failed')
});
return (
<Flex px={[2, 6]} alignItems={'flex-start'} h={'35px'}>
<Box flex={1}>
<ParentPath
paths={paths.map((path, i) => ({
parentId: path.parentId,
parentName: i === paths.length - 1 ? `${path.parentName}` : path.parentName
}))}
FirstPathDom={
<>
<Box fontWeight={'bold'} fontSize={['sm', 'lg']}>
{t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel)}({total})
</Box>
{datasetDetail?.websiteConfig?.url && (
<Flex fontSize={'sm'}>
{t('core.dataset.website.Base Url')}:
<Link
href={datasetDetail.websiteConfig.url}
target="_blank"
mr={2}
textDecoration={'underline'}
color={'primary.600'}
>
{datasetDetail.websiteConfig.url}
</Link>
</Flex>
)}
</>
}
onClick={(e) => {
router.replace({
query: {
...router.query,
parentId: e
}
});
}}
/>
</Box>
{/* search input */}
{isPc && (
<Flex alignItems={'center'} mr={4}>
<MyInput
bg={'myGray.50'}
w={['100%', '250px']}
size={'sm'}
h={'36px'}
placeholder={t('common.Search') || ''}
value={searchText}
leftIcon={
<MyIcon
name="common/searchLight"
position={'absolute'}
w={'16px'}
color={'myGray.500'}
/>
}
onChange={(e) => {
setSearchText(e.target.value);
debounceRefetch();
}}
onBlur={() => {
if (searchText === lastSearch.current) return;
getData(1);
}}
onKeyDown={(e) => {
if (searchText === lastSearch.current) return;
if (e.key === 'Enter') {
getData(1);
}
}}
/>
</Flex>
)}
{/* diff collection button */}
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
<>
{datasetDetail?.type === DatasetTypeEnum.dataset && (
<MyMenu
offset={[0, 5]}
Button={
<MenuButton
_hover={{
color: 'primary.500'
}}
fontSize={['sm', 'md']}
>
<Flex
alignItems={'center'}
px={5}
py={2}
borderRadius={'md'}
cursor={'pointer'}
bg={'primary.500'}
overflow={'hidden'}
color={'white'}
h={['28px', '35px']}
>
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
<Box>{t('dataset.collections.Create And Import')}</Box>
</Flex>
</MenuButton>
}
menuList={[
{
label: (
<Flex>
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
{t('Folder')}
</Flex>
),
onClick: () => setEditFolderData({})
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/manualCollection'} mr={2} w={'20px'} />
{t('core.dataset.Manual collection')}
</Flex>
),
onClick: () => {
onOpenCreateVirtualFileModal({
defaultVal: '',
onSuccess: (name) => {
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual });
}
});
}
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
{t('core.dataset.Text collection')}
</Flex>
),
onClick: onOpenFileSourceSelector
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/tableCollection'} mr={2} w={'20px'} />
{t('core.dataset.Table collection')}
</Flex>
),
onClick: () =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.import,
source: ImportDataSourceEnum.csvTable
}
})
}
]}
/>
)}
{datasetDetail?.type === DatasetTypeEnum.websiteDataset && (
<>
{datasetDetail?.websiteConfig?.url ? (
<Flex alignItems={'center'}>
{datasetDetail.status === DatasetStatusEnum.active && (
<Button onClick={onOpenWebsiteModal}>{t('common.Config')}</Button>
)}
{datasetDetail.status === DatasetStatusEnum.syncing && (
<Flex
ml={3}
alignItems={'center'}
px={3}
py={1}
borderRadius="md"
border={theme.borders.base}
>
<Box
animation={'zoomStopIcon 0.5s infinite alternate'}
bg={'myGray.700'}
w="8px"
h="8px"
borderRadius={'50%'}
mt={'1px'}
></Box>
<Box ml={2} color={'myGray.600'}>
{t('core.dataset.status.syncing')}
</Box>
</Flex>
)}
</Flex>
) : (
<Button onClick={onOpenWebsiteModal}>{t('core.dataset.Set Website Config')}</Button>
)}
</>
)}
{datasetDetail?.type === DatasetTypeEnum.externalFile && (
<MyMenu
offset={[0, 5]}
Button={
<MenuButton
_hover={{
color: 'primary.500'
}}
fontSize={['sm', 'md']}
>
<Flex
alignItems={'center'}
px={5}
py={2}
borderRadius={'md'}
cursor={'pointer'}
bg={'primary.500'}
overflow={'hidden'}
color={'white'}
h={['28px', '35px']}
>
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
<Box>{t('dataset.collections.Create And Import')}</Box>
</Flex>
</MenuButton>
}
menuList={[
{
label: (
<Flex>
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
{t('Folder')}
</Flex>
),
onClick: () => setEditFolderData({})
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
{t('core.dataset.Text collection')}
</Flex>
),
onClick: () =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.import,
source: ImportDataSourceEnum.externalFile
}
})
}
]}
/>
)}
</>
)}
{/* modal */}
{!!editFolderData && (
<EditFolderModal
onClose={() => setEditFolderData(undefined)}
editCallback={async (name) => {
try {
if (editFolderData.id) {
await putDatasetCollectionById({
id: editFolderData.id,
name
});
getData(pageNum);
} else {
onCreateCollection({
name,
type: DatasetCollectionTypeEnum.folder
});
}
} catch (error) {
return Promise.reject(error);
}
}}
isEdit={!!editFolderData.id}
name={editFolderData.name}
/>
)}
<EditCreateVirtualFileModal iconSrc={'modal/manualDataset'} closeBtnText={''} />
{isOpenFileSourceSelector && <FileSourceSelector onClose={onCloseFileSourceSelector} />}
</Flex>
);
};
export default Header;

View File

@ -1,4 +1,4 @@
import React, { useCallback, useState, useRef, useMemo, useEffect } from 'react';
import React, { useState, useRef, useMemo } from 'react';
import {
Box,
Flex,
@ -9,47 +9,29 @@ import {
Th,
Td,
Tbody,
Image,
MenuButton,
useDisclosure,
Button,
Link,
useTheme
MenuButton
} from '@chakra-ui/react';
import {
getDatasetCollections,
delDatasetCollectionById,
putDatasetCollectionById,
postDatasetCollection,
getDatasetCollectionPathById,
postLinkCollectionSync
} from '@/web/core/dataset/api';
import { useQuery } from '@tanstack/react-query';
import { debounce } from 'lodash';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import dayjs from 'dayjs';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRouter } from 'next/router';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum,
DatasetTypeEnum,
DatasetTypeMap,
DatasetStatusEnum,
DatasetCollectionSyncResultMap
} from '@fastgpt/global/core/dataset/constants';
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
import EditFolderModal, { useEditFolder } from '../../component/EditFolderModal';
import { TabEnum } from '..';
import ParentPath from '@/components/common/ParentPaths';
import { TabEnum } from '../../index';
import dynamic from 'next/dynamic';
import { useDrag } from '@/web/common/hooks/useDrag';
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
@ -58,27 +40,22 @@ import MyTooltip from '@/components/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useContextSelector } from 'use-context-selector';
import { CollectionPageContext } from './Context';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
const WebSiteConfigModal = dynamic(() => import('./Import/WebsiteConfig'), {});
const FileSourceSelector = dynamic(() => import('./Import/components/FileSourceSelector'), {});
const Header = dynamic(() => import('./Header'));
const EmptyCollectionTip = dynamic(() => import('./EmptyCollectionTip'));
const CollectionCard = () => {
const BoxRef = useRef<HTMLDivElement>(null);
const lastSearch = useRef('');
const router = useRouter();
const theme = useTheme();
const { toast } = useToast();
const { parentId = '', datasetId } = router.query as { parentId: string; datasetId: string };
const { t } = useTranslation();
const { isPc } = useSystemStore();
const { userInfo } = useUserStore();
const [searchText, setSearchText] = useState('');
const { datasetDetail, updateDataset, startWebsiteSync, loadDatasetDetail } = useDatasetStore();
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
content: t('dataset.Confirm to delete the file'),
@ -88,66 +65,18 @@ const CollectionCard = () => {
content: t('core.dataset.collection.Start Sync Tip')
});
const {
isOpen: isOpenFileSourceSelector,
onOpen: onOpenFileSourceSelector,
onClose: onCloseFileSourceSelector
} = useDisclosure();
const {
isOpen: isOpenWebsiteModal,
onOpen: onOpenWebsiteModal,
onClose: onCloseWebsiteModal
} = useDisclosure();
const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } =
useEditTitle({
title: t('dataset.Create manual collection'),
tip: t('dataset.Manual collection Tip'),
canEmpty: false
});
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
const { editFolderData, setEditFolderData } = useEditFolder();
const [moveCollectionData, setMoveCollectionData] = useState<{ collectionId: string }>();
const {
data: collections,
Pagination,
total,
getData,
isLoading: isGetting,
pageNum,
pageSize
} = usePagination<DatasetCollectionsListItemType>({
api: getDatasetCollections,
pageSize: 20,
params: {
datasetId,
parentId,
searchText
},
defaultRequest: false,
onChange() {
if (BoxRef.current) {
BoxRef.current.scrollTop = 0;
}
}
});
const { collections, Pagination, total, getData, isGetting, pageNum, pageSize } =
useContextSelector(CollectionPageContext, (v) => v);
const { dragStartId, setDragStartId, dragTargetId, setDragTargetId } = useDrag();
// change search
const debounceRefetch = useCallback(
debounce(() => {
getData(1);
lastSearch.current = searchText;
}, 300),
[]
);
// add file icon
// Ad file status icon
const formatCollections = useMemo(
() =>
collections.map((collection) => {
@ -180,37 +109,6 @@ const CollectionCard = () => {
[collections, t]
);
const { mutate: onCreateCollection, isLoading: isCreating } = useRequest({
mutationFn: async ({
name,
type,
callback,
...props
}: {
name: string;
type: `${DatasetCollectionTypeEnum}`;
callback?: (id: string) => void;
trainingType?: `${TrainingModeEnum}`;
rawLink?: string;
chunkSize?: number;
}) => {
const id = await postDatasetCollection({
parentId,
datasetId,
name,
type,
...props
});
callback?.(id);
return id;
},
onSuccess() {
getData(pageNum);
},
successToast: t('common.Create Success'),
errorToast: t('common.Create Failed')
});
const { mutate: onUpdateCollectionName } = useRequest({
mutationFn: ({ collectionId, name }: { collectionId: string; name: string }) => {
return putDatasetCollectionById({
@ -237,17 +135,7 @@ const CollectionCard = () => {
successToast: t('common.Delete Success'),
errorToast: t('common.Delete Failed')
});
const { mutate: onUpdateDatasetWebsiteConfig, isLoading: isUpdating } = useRequest({
mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => {
onCloseWebsiteModal();
await updateDataset({
id: datasetDetail._id,
websiteConfig
});
return startWebsiteSync();
},
errorToast: t('common.Update Failed')
});
const { mutate: onclickStartSync, isLoading: isSyncing } = useRequest({
mutationFn: (collectionId: string) => {
return postLinkCollectionSync(collectionId);
@ -262,22 +150,13 @@ const CollectionCard = () => {
errorToast: t('core.dataset.error.Start Sync Failed')
});
const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () =>
getDatasetCollectionPathById(parentId)
);
const hasTrainingData = useMemo(
() => !!formatCollections.find((item) => item.trainingAmount > 0),
[formatCollections]
);
const isLoading = useMemo(
() =>
isCreating ||
isDeleting ||
isUpdating ||
isSyncing ||
(isGetting && collections.length === 0),
[collections.length, isCreating, isDeleting, isGetting, isSyncing, isUpdating]
() => isDeleting || isSyncing || (isGetting && collections.length === 0),
[collections.length, isDeleting, isGetting, isSyncing]
);
useQuery(
@ -285,7 +164,7 @@ const CollectionCard = () => {
() => {
getData(1);
if (datasetDetail.status === DatasetStatusEnum.syncing) {
loadDatasetDetail(datasetId, true);
loadDatasetDetail(datasetDetail._id);
}
return null;
},
@ -295,207 +174,11 @@ const CollectionCard = () => {
}
);
useEffect(() => {
getData(1);
}, [parentId]);
return (
<MyBox isLoading={isLoading} h={'100%'} py={[2, 4]}>
<Flex ref={BoxRef} flexDirection={'column'} py={[1, 3]} h={'100%'}>
{/* header */}
<Flex px={[2, 6]} alignItems={'flex-start'} h={'35px'}>
<Box flex={1}>
<ParentPath
paths={paths.map((path, i) => ({
parentId: path.parentId,
parentName: i === paths.length - 1 ? `${path.parentName}` : path.parentName
}))}
FirstPathDom={
<>
<Box fontWeight={'bold'} fontSize={['sm', 'lg']}>
{t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel)}({total})
</Box>
{datasetDetail?.websiteConfig?.url && (
<Flex fontSize={'sm'}>
{t('core.dataset.website.Base Url')}:
<Link
href={datasetDetail.websiteConfig.url}
target="_blank"
mr={2}
textDecoration={'underline'}
color={'primary.600'}
>
{datasetDetail.websiteConfig.url}
</Link>
</Flex>
)}
</>
}
onClick={(e) => {
router.replace({
query: {
...router.query,
parentId: e
}
});
}}
/>
</Box>
{isPc && (
<Flex alignItems={'center'} mr={4}>
<MyInput
bg={'myGray.50'}
w={['100%', '250px']}
size={'sm'}
h={'36px'}
placeholder={t('common.Search') || ''}
value={searchText}
leftIcon={
<MyIcon
name="common/searchLight"
position={'absolute'}
w={'16px'}
color={'myGray.500'}
/>
}
onChange={(e) => {
setSearchText(e.target.value);
debounceRefetch();
}}
onBlur={() => {
if (searchText === lastSearch.current) return;
getData(1);
}}
onKeyDown={(e) => {
if (searchText === lastSearch.current) return;
if (e.key === 'Enter') {
getData(1);
}
}}
/>
</Flex>
)}
{datasetDetail?.type === DatasetTypeEnum.dataset && (
<>
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
<MyMenu
offset={[0, 5]}
Button={
<MenuButton
_hover={{
color: 'primary.500'
}}
fontSize={['sm', 'md']}
>
<Flex
alignItems={'center'}
px={5}
py={2}
borderRadius={'md'}
cursor={'pointer'}
bg={'primary.500'}
overflow={'hidden'}
color={'white'}
h={['28px', '35px']}
>
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
<Box>{t('dataset.collections.Create And Import')}</Box>
</Flex>
</MenuButton>
}
menuList={[
{
label: (
<Flex>
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
{t('Folder')}
</Flex>
),
onClick: () => setEditFolderData({})
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/manualCollection'} mr={2} w={'20px'} />
{t('core.dataset.Manual collection')}
</Flex>
),
onClick: () => {
onOpenCreateVirtualFileModal({
defaultVal: '',
onSuccess: (name) => {
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual });
}
});
}
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
{t('core.dataset.Text collection')}
</Flex>
),
onClick: onOpenFileSourceSelector
},
{
label: (
<Flex>
<MyIcon name={'core/dataset/tableCollection'} mr={2} w={'20px'} />
{t('core.dataset.Table collection')}
</Flex>
),
onClick: () =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.import,
source: ImportDataSourceEnum.csvTable
}
})
}
]}
/>
)}
</>
)}
{datasetDetail?.type === DatasetTypeEnum.websiteDataset && (
<>
{datasetDetail?.websiteConfig?.url ? (
<Flex alignItems={'center'}>
{datasetDetail.status === DatasetStatusEnum.active && (
<Button onClick={onOpenWebsiteModal}>{t('common.Config')}</Button>
)}
{datasetDetail.status === DatasetStatusEnum.syncing && (
<Flex
ml={3}
alignItems={'center'}
px={3}
py={1}
borderRadius="md"
border={theme.borders.base}
>
<Box
animation={'zoomStopIcon 0.5s infinite alternate'}
bg={'myGray.700'}
w="8px"
h="8px"
borderRadius={'50%'}
mt={'1px'}
></Box>
<Box ml={2} color={'myGray.600'}>
{t('core.dataset.status.syncing')}
</Box>
</Flex>
)}
</Flex>
) : (
<Button onClick={onOpenWebsiteModal}>{t('core.dataset.Set Website Config')}</Button>
)}
</>
)}
</Flex>
<Header />
{/* collection table */}
<TableContainer
@ -731,86 +414,16 @@ const CollectionCard = () => {
<Pagination />
</Flex>
)}
{total === 0 && (
<EmptyTip
text={
datasetDetail.type === DatasetTypeEnum.dataset ? (
t('core.dataset.collection.Empty Tip')
) : (
<Flex>
{datasetDetail.status === DatasetStatusEnum.syncing && (
<>{t('core.dataset.status.syncing')}</>
)}
{datasetDetail.status === DatasetStatusEnum.active && (
<>
{!datasetDetail?.websiteConfig?.url ? (
<>
{t('core.dataset.collection.Website Empty Tip')}
{', '}
<Box
textDecoration={'underline'}
cursor={'pointer'}
onClick={onOpenWebsiteModal}
>
{t('core.dataset.collection.Click top config website')}
</Box>
</>
) : (
<>{t('core.dataset.website.UnValid Website Tip')}</>
)}
</>
)}
</Flex>
)
}
/>
)}
{total === 0 && <EmptyCollectionTip />}
</TableContainer>
<ConfirmDeleteModal />
<ConfirmSyncModal />
<EditTitleModal />
<EditCreateVirtualFileModal iconSrc={'modal/manualDataset'} closeBtnText={''} />
{/* {isOpenFileImportModal && (
<FileImportModal
datasetId={datasetId}
parentId={parentId}
uploadSuccess={() => {
getData(1);
onCloseFileImportModal();
}}
onClose={onCloseFileImportModal}
/>
)} */}
{isOpenFileSourceSelector && <FileSourceSelector onClose={onCloseFileSourceSelector} />}
{!!editFolderData && (
<EditFolderModal
onClose={() => setEditFolderData(undefined)}
editCallback={async (name) => {
try {
if (editFolderData.id) {
await putDatasetCollectionById({
id: editFolderData.id,
name
});
getData(pageNum);
} else {
onCreateCollection({
name,
type: DatasetCollectionTypeEnum.folder
});
}
} catch (error) {
return Promise.reject(error);
}
}}
isEdit={!!editFolderData.id}
name={editFolderData.name}
/>
)}
{!!moveCollectionData && (
<SelectCollections
datasetId={datasetId}
datasetId={datasetDetail._id}
type="folder"
defaultSelectedId={[moveCollectionData.collectionId]}
onClose={() => setMoveCollectionData(undefined)}
@ -828,16 +441,6 @@ const CollectionCard = () => {
}}
/>
)}
{isOpenWebsiteModal && (
<WebSiteConfigModal
onClose={onCloseWebsiteModal}
onSuccess={onUpdateDatasetWebsiteConfig}
defaultValue={{
url: datasetDetail?.websiteConfig?.url,
selector: datasetDetail?.websiteConfig?.selector
}}
/>
)}
</Flex>
</MyBox>
);

View File

@ -0,0 +1,302 @@
import { useRouter } from 'next/router';
import { SetStateAction, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { createContext, useContextSelector } from 'use-context-selector';
import { ImportDataSourceEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { useMyStep } from '@fastgpt/web/hooks/useStep';
import { Box, Button, Flex, IconButton } from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { TabEnum } from '../Slider';
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
import { UseFormReturn, useForm } from 'react-hook-form';
import { ImportSourceItemType } from '@/web/core/dataset/type';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
type TrainingFiledType = {
chunkOverlapRatio: number;
maxChunkSize: number;
minChunkSize: number;
autoChunkSize: number;
chunkSize: number;
showChunkInput: boolean;
showPromptInput: boolean;
charsPointsPrice: number;
priceTip: string;
uploadRate: number;
chunkSizeField?: ChunkSizeFieldType;
};
type DatasetImportContextType = {
importSource: ImportDataSourceEnum;
parentId: string | undefined;
activeStep: number;
goToNext: () => void;
processParamsForm: UseFormReturn<ImportFormType, any>;
sources: ImportSourceItemType[];
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
} & TrainingFiledType;
type ChunkSizeFieldType = 'embeddingChunkSize';
export type ImportFormType = {
mode: TrainingModeEnum;
way: ImportProcessWayEnum;
embeddingChunkSize: number;
customSplitChar: string;
qaPrompt: string;
webSelector: string;
};
export const DatasetImportContext = createContext<DatasetImportContextType>({
importSource: ImportDataSourceEnum.fileLocal,
goToNext: function (): void {
throw new Error('Function not implemented.');
},
activeStep: 0,
parentId: undefined,
maxChunkSize: 0,
minChunkSize: 0,
showChunkInput: false,
showPromptInput: false,
sources: [],
setSources: function (value: SetStateAction<ImportSourceItemType[]>): void {
throw new Error('Function not implemented.');
},
chunkSize: 0,
chunkOverlapRatio: 0,
uploadRate: 0,
//@ts-ignore
processParamsForm: undefined,
autoChunkSize: 0,
charsPointsPrice: 0,
priceTip: ''
});
const DatasetImportContextProvider = ({ children }: { children: React.ReactNode }) => {
const { t } = useTranslation();
const router = useRouter();
const { source = ImportDataSourceEnum.fileLocal, parentId } = (router.query || {}) as {
source: ImportDataSourceEnum;
parentId?: string;
};
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
// step
const modeSteps: Record<ImportDataSourceEnum, { title: string }[]> = {
[ImportDataSourceEnum.fileLocal]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.fileLink]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.fileCustom]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.csvTable]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.externalFile]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
]
};
const steps = modeSteps[source];
const { activeStep, goToNext, goToPrevious, MyStep } = useMyStep({
defaultStep: 0,
steps
});
// -----
const vectorModel = datasetDetail.vectorModel;
const agentModel = datasetDetail.agentModel;
const processParamsForm = useForm<ImportFormType>({
defaultValues: {
mode: TrainingModeEnum.chunk,
way: ImportProcessWayEnum.auto,
embeddingChunkSize: vectorModel?.defaultToken || 512,
customSplitChar: '',
qaPrompt: Prompt_AgentQA.description,
webSelector: ''
}
});
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
// watch form
const mode = processParamsForm.watch('mode');
const way = processParamsForm.watch('way');
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
const customSplitChar = processParamsForm.watch('customSplitChar');
const modeStaticParams: Record<TrainingModeEnum, TrainingFiledType> = {
[TrainingModeEnum.auto]: {
chunkOverlapRatio: 0.2,
maxChunkSize: 2048,
minChunkSize: 100,
autoChunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
chunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
showChunkInput: false,
showPromptInput: false,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
price: agentModel.charsPointsPrice
}),
uploadRate: 100
},
[TrainingModeEnum.chunk]: {
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
chunkOverlapRatio: 0.2,
maxChunkSize: vectorModel?.maxToken || 512,
minChunkSize: 100,
autoChunkSize: vectorModel?.defaultToken || 512,
chunkSize: embeddingChunkSize,
showChunkInput: true,
showPromptInput: false,
charsPointsPrice: vectorModel.charsPointsPrice,
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
price: vectorModel.charsPointsPrice
}),
uploadRate: 150
},
[TrainingModeEnum.qa]: {
chunkOverlapRatio: 0,
maxChunkSize: 8000,
minChunkSize: 3000,
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
chunkSize: agentModel.maxContext * 0.55 || 6000,
showChunkInput: false,
showPromptInput: true,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
price: agentModel?.charsPointsPrice
}),
uploadRate: 30
}
};
const selectModelStaticParam = modeStaticParams[mode];
const wayStaticPrams = {
[ImportProcessWayEnum.auto]: {
chunkSize: selectModelStaticParam.autoChunkSize,
customSplitChar: ''
},
[ImportProcessWayEnum.custom]: {
chunkSize: modeStaticParams[mode].chunkSize,
customSplitChar
}
};
const chunkSize = wayStaticPrams[way].chunkSize;
const contextValue = {
importSource: source,
parentId,
activeStep,
goToNext,
processParamsForm,
...selectModelStaticParam,
sources,
setSources,
chunkSize
};
return (
<DatasetImportContext.Provider value={contextValue}>
<Flex>
{activeStep === 0 ? (
<Flex alignItems={'center'}>
<IconButton
icon={<MyIcon name={'common/backFill'} w={'14px'} />}
aria-label={''}
size={'smSquare'}
w={'26px'}
h={'26px'}
borderRadius={'50%'}
variant={'whiteBase'}
mr={2}
onClick={() =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.collectionCard
}
})
}
/>
{t('common.Exit')}
</Flex>
) : (
<Button
variant={'whiteBase'}
leftIcon={<MyIcon name={'common/backFill'} w={'14px'} />}
onClick={goToPrevious}
>
{t('common.Last Step')}
</Button>
)}
<Box flex={1} />
</Flex>
{/* step */}
<Box
mt={4}
mb={5}
px={3}
py={[2, 4]}
bg={'myGray.50'}
borderWidth={'1px'}
borderColor={'borderColor.low'}
borderRadius={'md'}
>
<Box maxW={['100%', '900px']} mx={'auto'}>
<MyStep />
</Box>
</Box>
{children}
</DatasetImportContext.Provider>
);
};
export default DatasetImportContextProvider;

View File

@ -1,165 +0,0 @@
import React, { useContext, createContext, useState, useMemo, useEffect } from 'react';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { useTranslation } from 'next-i18next';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import { UseFormReturn, useForm } from 'react-hook-form';
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
import { ImportSourceItemType } from '@/web/core/dataset/type';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
type ChunkSizeFieldType = 'embeddingChunkSize';
export type FormType = {
mode: `${TrainingModeEnum}`;
way: `${ImportProcessWayEnum}`;
embeddingChunkSize: number;
customSplitChar: string;
qaPrompt: string;
webSelector: string;
};
type useImportStoreType = {
parentId?: string;
processParamsForm: UseFormReturn<FormType, any>;
chunkSizeField?: ChunkSizeFieldType;
maxChunkSize: number;
minChunkSize: number;
showChunkInput: boolean;
showPromptInput: boolean;
sources: ImportSourceItemType[];
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
chunkSize: number;
chunkOverlapRatio: number;
priceTip: string;
uploadRate: number;
importSource: `${ImportDataSourceEnum}`;
};
const StateContext = createContext<useImportStoreType>({
processParamsForm: {} as any,
sources: [],
setSources: function (value: React.SetStateAction<ImportSourceItemType[]>): void {
throw new Error('Function not implemented.');
},
maxChunkSize: 0,
minChunkSize: 0,
showChunkInput: false,
showPromptInput: false,
chunkSizeField: 'embeddingChunkSize',
chunkSize: 0,
chunkOverlapRatio: 0,
priceTip: '',
uploadRate: 50,
importSource: ImportDataSourceEnum.fileLocal
});
export const useImportStore = () => useContext(StateContext);
const Provider = ({
importSource,
dataset,
parentId,
children
}: {
importSource: `${ImportDataSourceEnum}`;
dataset: DatasetItemType;
parentId?: string;
children: React.ReactNode;
}) => {
const vectorModel = dataset.vectorModel;
const agentModel = dataset.agentModel;
const processParamsForm = useForm<FormType>({
defaultValues: {
mode: TrainingModeEnum.chunk,
way: ImportProcessWayEnum.auto,
embeddingChunkSize: vectorModel?.defaultToken || 512,
customSplitChar: '',
qaPrompt: Prompt_AgentQA.description,
webSelector: ''
}
});
const { t } = useTranslation();
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
// watch form
const mode = processParamsForm.watch('mode');
const way = processParamsForm.watch('way');
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
const customSplitChar = processParamsForm.watch('customSplitChar');
const modeStaticParams = {
[TrainingModeEnum.auto]: {
chunkOverlapRatio: 0.2,
maxChunkSize: 2048,
minChunkSize: 100,
autoChunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
chunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
showChunkInput: false,
showPromptInput: false,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
price: agentModel.charsPointsPrice
}),
uploadRate: 100
},
[TrainingModeEnum.chunk]: {
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
chunkOverlapRatio: 0.2,
maxChunkSize: vectorModel?.maxToken || 512,
minChunkSize: 100,
autoChunkSize: vectorModel?.defaultToken || 512,
chunkSize: embeddingChunkSize,
showChunkInput: true,
showPromptInput: false,
charsPointsPrice: vectorModel.charsPointsPrice,
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
price: vectorModel.charsPointsPrice
}),
uploadRate: 150
},
[TrainingModeEnum.qa]: {
chunkOverlapRatio: 0,
maxChunkSize: 8000,
minChunkSize: 3000,
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
chunkSize: agentModel.maxContext * 0.55 || 6000,
showChunkInput: false,
showPromptInput: true,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
price: agentModel?.charsPointsPrice
}),
uploadRate: 30
}
};
const selectModelStaticParam = useMemo(() => modeStaticParams[mode], [mode]);
const wayStaticPrams = {
[ImportProcessWayEnum.auto]: {
chunkSize: selectModelStaticParam.autoChunkSize,
customSplitChar: ''
},
[ImportProcessWayEnum.custom]: {
chunkSize: modeStaticParams[mode].chunkSize,
customSplitChar
}
};
const chunkSize = wayStaticPrams[way].chunkSize;
const value: useImportStoreType = {
parentId,
processParamsForm,
...selectModelStaticParam,
sources,
setSources,
chunkSize,
importSource
};
return <StateContext.Provider value={value}>{children}</StateContext.Provider>;
};
export default React.memo(Provider);

View File

@ -20,23 +20,20 @@ import LeftRadio from '@fastgpt/web/components/common/Radio/LeftRadio';
import { TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
import MyTooltip from '@/components/MyTooltip';
import { useImportStore } from '../Provider';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
import Preview from '../components/Preview';
import Tag from '@fastgpt/web/components/common/Tag/index';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
function DataProcess({
showPreviewChunks = true,
goToNext
}: {
showPreviewChunks: boolean;
goToNext: () => void;
}) {
function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean }) {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const {
goToNext,
processParamsForm,
chunkSizeField,
minChunkSize,
@ -44,7 +41,7 @@ function DataProcess({
showPromptInput,
maxChunkSize,
priceTip
} = useImportStore();
} = useContextSelector(DatasetImportContext, (v) => v);
const { getValues, setValue, register } = processParamsForm;
const [refresh, setRefresh] = useState(false);

View File

@ -2,15 +2,12 @@ import React from 'react';
import Preview from '../components/Preview';
import { Box, Button, Flex } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const PreviewData = ({
showPreviewChunks,
goToNext
}: {
showPreviewChunks: boolean;
goToNext: () => void;
}) => {
const PreviewData = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
const { t } = useTranslation();
const goToNext = useContextSelector(DatasetImportContext, (v) => v.goToNext);
return (
<Flex flexDirection={'column'} h={'100%'}>

View File

@ -11,7 +11,6 @@ import {
Flex,
Button
} from '@chakra-ui/react';
import { useImportStore, type FormType } from '../Provider';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
@ -28,20 +27,23 @@ import {
} from '@/web/core/dataset/api';
import Tag from '@fastgpt/web/components/common/Tag/index';
import { useI18n } from '@/web/context/I18n';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { DatasetImportContext, type ImportFormType } from '../Context';
const Upload = () => {
const { t } = useTranslation();
const { fileT } = useI18n();
const { toast } = useToast();
const router = useRouter();
const { datasetDetail } = useDatasetStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { importSource, parentId, sources, setSources, processParamsForm, chunkSize } =
useImportStore();
useContextSelector(DatasetImportContext, (v) => v);
const { handleSubmit } = processParamsForm;
const { mutate: startUpload, isLoading } = useRequest({
mutationFn: async ({ mode, customSplitChar, qaPrompt, webSelector }: FormType) => {
mutationFn: async ({ mode, customSplitChar, qaPrompt, webSelector }: ImportFormType) => {
if (sources.length === 0) return;
const filterWaitingSources = sources.filter((item) => item.createStatus === 'waiting');

View File

@ -10,7 +10,7 @@ import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
const FileModeSelector = ({ onClose }: { onClose: () => void }) => {
const { t } = useTranslation();
const router = useRouter();
const [value, setValue] = useState<`${ImportDataSourceEnum}`>(ImportDataSourceEnum.fileLocal);
const [value, setValue] = useState<ImportDataSourceEnum>(ImportDataSourceEnum.fileLocal);
return (
<MyModal

View File

@ -3,17 +3,18 @@ import { Box, Flex, IconButton } from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import { useImportStore } from '../Provider';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { ImportSourceItemType } from '@/web/core/dataset/type';
import dynamic from 'next/dynamic';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const PreviewRawText = dynamic(() => import('./PreviewRawText'));
const PreviewChunks = dynamic(() => import('./PreviewChunks'));
const Preview = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
const { t } = useTranslation();
const { sources } = useImportStore();
const { sources } = useContextSelector(DatasetImportContext, (v) => v);
const [previewRawTextSource, setPreviewRawTextSource] = useState<ImportSourceItemType>();
const [previewChunkSource, setPreviewChunkSource] = useState<ImportSourceItemType>();

View File

@ -4,11 +4,12 @@ import { ImportSourceItemType } from '@/web/core/dataset/type';
import { useQuery } from '@tanstack/react-query';
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
import { getPreviewChunks } from '@/web/core/dataset/api';
import { useImportStore } from '../Provider';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const PreviewChunks = ({
previewSource,
@ -18,7 +19,10 @@ const PreviewChunks = ({
onClose: () => void;
}) => {
const { toast } = useToast();
const { importSource, chunkSize, chunkOverlapRatio, processParamsForm } = useImportStore();
const { importSource, chunkSize, chunkOverlapRatio, processParamsForm } = useContextSelector(
DatasetImportContext,
(v) => v
);
const { data = [], isLoading } = useQuery(
['previewSource'],

View File

@ -4,10 +4,11 @@ import { ImportSourceItemType } from '@/web/core/dataset/type';
import { useQuery } from '@tanstack/react-query';
import { getPreviewFileContent } from '@/web/common/file/api';
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
import { useImportStore } from '../Provider';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const PreviewRawText = ({
previewSource,
@ -17,7 +18,7 @@ const PreviewRawText = ({
onClose: () => void;
}) => {
const { toast } = useToast();
const { importSource } = useImportStore();
const { importSource } = useContextSelector(DatasetImportContext, (v) => v);
const { data, isLoading } = useQuery(
['previewSource', previewSource?.dbFileId],

View File

@ -0,0 +1,188 @@
import React, { useEffect } from 'react';
import dynamic from 'next/dynamic';
import { useTranslation } from 'next-i18next';
import { useFieldArray, useForm } from 'react-hook-form';
import {
Box,
Button,
Flex,
Table,
Thead,
Tbody,
Tr,
Th,
Td,
TableContainer,
Input
} from '@chakra-ui/react';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import MyIcon from '@fastgpt/web/components/common/Icon';
import Loading from '@fastgpt/web/components/common/MyLoading';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { useI18n } from '@/web/context/I18n';
import { SmallAddIcon } from '@chakra-ui/icons';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
loading: () => <Loading fixed={false} />
});
const Upload = dynamic(() => import('../commonProgress/Upload'));
const ExternalFileCollection = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <CustomLinkInput />}
{activeStep === 1 && <DataProcess showPreviewChunks={true} />}
{activeStep === 2 && <Upload />}
</>
);
};
export default React.memo(ExternalFileCollection);
const CustomLinkInput = () => {
const { t } = useTranslation();
const { datasetT, commonT } = useI18n();
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
const { register, reset, handleSubmit, control } = useForm<{
list: {
sourceName: string;
sourceUrl: string;
externalId: string;
}[];
}>({
defaultValues: {
list: [
{
sourceName: '',
sourceUrl: '',
externalId: ''
}
]
}
});
const {
fields: list,
append,
remove,
update
} = useFieldArray({
control,
name: 'list'
});
useEffect(() => {
if (sources.length > 0) {
reset({
list: sources.map((item) => ({
sourceName: item.sourceName,
sourceUrl: item.sourceUrl || '',
externalId: item.externalId || ''
}))
});
}
}, []);
return (
<Box>
<TableContainer>
<Table bg={'white'}>
<Thead>
<Tr bg={'myGray.50'}>
<Th>{datasetT('External url')}</Th>
<Th>{datasetT('External id')}</Th>
<Th>{datasetT('filename')}</Th>
<Th></Th>
</Tr>
</Thead>
<Tbody>
{list.map((item, index) => (
<Tr key={item.id}>
<Td>
<Input
{...register(`list.${index}.sourceUrl`, {
required: index !== list.length - 1,
onBlur(e) {
const val = (e.target.value || '') as string;
if (val.includes('.') && !list[index]?.sourceName) {
const sourceName = val.split('/').pop() || '';
update(index, {
...list[index],
sourceUrl: val,
sourceName: decodeURIComponent(sourceName)
});
}
if (val && index === list.length - 1) {
append({
sourceName: '',
sourceUrl: '',
externalId: ''
});
}
}
})}
/>
</Td>
<Td>
<Input {...register(`list.${index}.externalId`)} />
</Td>
<Td>
<Input {...register(`list.${index}.sourceName`)} />
</Td>
<Td>
<MyIcon
name={'delete'}
w={'16px'}
cursor={'pointer'}
_hover={{ color: 'red.600' }}
onClick={() => remove(index)}
/>
</Td>
</Tr>
))}
</Tbody>
</Table>
</TableContainer>
<Flex mt={5} justifyContent={'space-between'}>
<Button
variant={'whitePrimary'}
leftIcon={<SmallAddIcon />}
onClick={() => {
append({
sourceName: '',
sourceUrl: '',
externalId: ''
});
}}
>
{commonT('Add new')}
</Button>
<Button
isDisabled={list.length === 0}
onClick={handleSubmit((data) => {
setSources(
data.list
.filter((item) => !!item.sourceUrl)
.map((item) => ({
id: getNanoid(32),
createStatus: 'waiting',
sourceName: item.sourceName || item.sourceUrl,
icon: getFileIcon(item.sourceUrl),
externalId: item.externalId,
sourceUrl: item.sourceUrl
}))
);
goToNext();
})}
>
{t('common.Next Step')}
</Button>
</Flex>
</Box>
);
};

View File

@ -1,24 +1,25 @@
import React, { useCallback, useEffect } from 'react';
import { ImportDataComponentProps } from '@/web/core/dataset/type.d';
import dynamic from 'next/dynamic';
import { useImportStore } from '../Provider';
import { useTranslation } from 'next-i18next';
import { useForm } from 'react-hook-form';
import { Box, Button, Flex, Input, Textarea } from '@chakra-ui/react';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import Loading from '@fastgpt/web/components/common/MyLoading';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
loading: () => <Loading fixed={false} />
});
const Upload = dynamic(() => import('../commonProgress/Upload'));
const CustomTet = ({ activeStep, goToNext }: ImportDataComponentProps) => {
const CustomTet = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <CustomTextInput goToNext={goToNext} />}
{activeStep === 1 && <DataProcess showPreviewChunks goToNext={goToNext} />}
{activeStep === 0 && <CustomTextInput />}
{activeStep === 1 && <DataProcess showPreviewChunks />}
{activeStep === 2 && <Upload />}
</>
);
@ -26,9 +27,9 @@ const CustomTet = ({ activeStep, goToNext }: ImportDataComponentProps) => {
export default React.memo(CustomTet);
const CustomTextInput = ({ goToNext }: { goToNext: () => void }) => {
const CustomTextInput = () => {
const { t } = useTranslation();
const { sources, setSources } = useImportStore();
const { sources, goToNext, setSources } = useContextSelector(DatasetImportContext, (v) => v);
const { register, reset, handleSubmit } = useForm({
defaultValues: {
name: '',

View File

@ -1,8 +1,5 @@
import React, { useEffect } from 'react';
import { ImportDataComponentProps } from '@/web/core/dataset/type.d';
import dynamic from 'next/dynamic';
import { useImportStore } from '../Provider';
import { useTranslation } from 'next-i18next';
import { useForm } from 'react-hook-form';
import { Box, Button, Flex, Input, Link, Textarea } from '@chakra-ui/react';
@ -12,17 +9,21 @@ import { LinkCollectionIcon } from '@fastgpt/global/core/dataset/constants';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { getDocPath } from '@/web/common/system/doc';
import Loading from '@fastgpt/web/components/common/MyLoading';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
loading: () => <Loading fixed={false} />
});
const Upload = dynamic(() => import('../commonProgress/Upload'));
const LinkCollection = ({ activeStep, goToNext }: ImportDataComponentProps) => {
const LinkCollection = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <CustomLinkImport goToNext={goToNext} />}
{activeStep === 1 && <DataProcess showPreviewChunks={false} goToNext={goToNext} />}
{activeStep === 0 && <CustomLinkImport />}
{activeStep === 1 && <DataProcess showPreviewChunks={false} />}
{activeStep === 2 && <Upload />}
</>
);
@ -30,10 +31,13 @@ const LinkCollection = ({ activeStep, goToNext }: ImportDataComponentProps) => {
export default React.memo(LinkCollection);
const CustomLinkImport = ({ goToNext }: { goToNext: () => void }) => {
const CustomLinkImport = () => {
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const { sources, setSources, processParamsForm } = useImportStore();
const { goToNext, sources, setSources, processParamsForm } = useContextSelector(
DatasetImportContext,
(v) => v
);
const { register, reset, handleSubmit, watch } = useForm({
defaultValues: {
link: ''

View File

@ -1,13 +1,14 @@
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { ImportDataComponentProps, ImportSourceItemType } from '@/web/core/dataset/type.d';
import { ImportSourceItemType } from '@/web/core/dataset/type.d';
import { Box, Button } from '@chakra-ui/react';
import FileSelector from '../components/FileSelector';
import { useTranslation } from 'next-i18next';
import { useImportStore } from '../Provider';
import dynamic from 'next/dynamic';
import Loading from '@fastgpt/web/components/common/MyLoading';
import { RenderUploadFiles } from '../components/RenderFiles';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
loading: () => <Loading fixed={false} />
@ -16,11 +17,13 @@ const Upload = dynamic(() => import('../commonProgress/Upload'));
const fileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';
const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
const FileLocal = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <SelectFile goToNext={goToNext} />}
{activeStep === 1 && <DataProcess showPreviewChunks goToNext={goToNext} />}
{activeStep === 0 && <SelectFile />}
{activeStep === 1 && <DataProcess showPreviewChunks />}
{activeStep === 2 && <Upload />}
</>
);
@ -28,9 +31,9 @@ const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
export default React.memo(FileLocal);
const SelectFile = React.memo(function SelectFile({ goToNext }: { goToNext: () => void }) {
const SelectFile = React.memo(function SelectFile() {
const { t } = useTranslation();
const { sources, setSources } = useImportStore();
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
sources.map((source) => ({
isUploading: false,

View File

@ -1,24 +1,27 @@
import React, { useEffect, useMemo, useState } from 'react';
import { ImportDataComponentProps, ImportSourceItemType } from '@/web/core/dataset/type.d';
import { ImportSourceItemType } from '@/web/core/dataset/type.d';
import { Box, Button } from '@chakra-ui/react';
import FileSelector from '../components/FileSelector';
import { useTranslation } from 'next-i18next';
import { useImportStore } from '../Provider';
import dynamic from 'next/dynamic';
import { fileDownload } from '@/web/common/file/utils';
import { RenderUploadFiles } from '../components/RenderFiles';
import { useContextSelector } from 'use-context-selector';
import { DatasetImportContext } from '../Context';
const PreviewData = dynamic(() => import('../commonProgress/PreviewData'));
const Upload = dynamic(() => import('../commonProgress/Upload'));
const fileType = '.csv';
const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
const FileLocal = () => {
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
return (
<>
{activeStep === 0 && <SelectFile goToNext={goToNext} />}
{activeStep === 1 && <PreviewData showPreviewChunks goToNext={goToNext} />}
{activeStep === 0 && <SelectFile />}
{activeStep === 1 && <PreviewData showPreviewChunks />}
{activeStep === 2 && <Upload />}
</>
);
@ -32,9 +35,9 @@ const csvTemplate = `"第一列内容","第二列内容"
"结合人工智能的演进历程,AIGC的发展大致可以分为三个阶段即:早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期),以及快速发展展阶段(21世纪10年代中期至今)。",""
"AIGC发展分为几个阶段","早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期)、快速发展展阶段(21世纪10年代中期至今)"`;
const SelectFile = React.memo(function SelectFile({ goToNext }: { goToNext: () => void }) {
const SelectFile = React.memo(function SelectFile() {
const { t } = useTranslation();
const { sources, setSources } = useImportStore();
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
sources.map((source) => ({
isUploading: false,

View File

@ -1,147 +1,42 @@
import React, { useMemo } from 'react';
import { Box, Button, Flex, IconButton } from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import { useRouter } from 'next/router';
import { TabEnum } from '../../index';
import { useMyStep } from '@fastgpt/web/hooks/useStep';
import { Box, Flex } from '@chakra-ui/react';
import dynamic from 'next/dynamic';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import Provider from './Provider';
import { useContextSelector } from 'use-context-selector';
import DatasetImportContextProvider, { DatasetImportContext } from './Context';
const FileLocal = dynamic(() => import('./diffSource/FileLocal'));
const FileLink = dynamic(() => import('./diffSource/FileLink'));
const FileCustomText = dynamic(() => import('./diffSource/FileCustomText'));
const TableLocal = dynamic(() => import('./diffSource/TableLocal'));
const ExternalFileCollection = dynamic(() => import('./diffSource/ExternalFile'));
const ImportDataset = () => {
const { t } = useTranslation();
const router = useRouter();
const { datasetDetail } = useDatasetStore();
const { source = ImportDataSourceEnum.fileLocal, parentId } = (router.query || {}) as {
source: `${ImportDataSourceEnum}`;
parentId?: string;
};
const modeSteps: Record<`${ImportDataSourceEnum}`, { title: string }[]> = {
[ImportDataSourceEnum.fileLocal]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.fileLink]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.fileCustom]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
],
[ImportDataSourceEnum.csvTable]: [
{
title: t('core.dataset.import.Select file')
},
{
title: t('core.dataset.import.Data Preprocessing')
},
{
title: t('core.dataset.import.Upload data')
}
]
};
const steps = modeSteps[source];
const { activeStep, goToNext, goToPrevious, MyStep } = useMyStep({
defaultStep: 0,
steps
});
const importSource = useContextSelector(DatasetImportContext, (v) => v.importSource);
const ImportComponent = useMemo(() => {
if (source === ImportDataSourceEnum.fileLocal) return FileLocal;
if (source === ImportDataSourceEnum.fileLink) return FileLink;
if (source === ImportDataSourceEnum.fileCustom) return FileCustomText;
if (source === ImportDataSourceEnum.csvTable) return TableLocal;
}, [source]);
if (importSource === ImportDataSourceEnum.fileLocal) return FileLocal;
if (importSource === ImportDataSourceEnum.fileLink) return FileLink;
if (importSource === ImportDataSourceEnum.fileCustom) return FileCustomText;
if (importSource === ImportDataSourceEnum.csvTable) return TableLocal;
if (importSource === ImportDataSourceEnum.externalFile) return ExternalFileCollection;
}, [importSource]);
return ImportComponent ? (
<Flex flexDirection={'column'} bg={'white'} h={'100%'} px={[2, 9]} py={[2, 5]}>
<Flex>
{activeStep === 0 ? (
<Flex alignItems={'center'}>
<IconButton
icon={<MyIcon name={'common/backFill'} w={'14px'} />}
aria-label={''}
size={'smSquare'}
w={'26px'}
h={'26px'}
borderRadius={'50%'}
variant={'whiteBase'}
mr={2}
onClick={() =>
router.replace({
query: {
...router.query,
currentTab: TabEnum.collectionCard
}
})
}
/>
{t('common.Exit')}
</Flex>
) : (
<Button
variant={'whiteBase'}
leftIcon={<MyIcon name={'common/backFill'} w={'14px'} />}
onClick={goToPrevious}
>
{t('common.Last Step')}
</Button>
)}
<Box flex={1} />
</Flex>
{/* step */}
<Box
mt={4}
mb={5}
px={3}
py={[2, 4]}
bg={'myGray.50'}
borderWidth={'1px'}
borderColor={'borderColor.low'}
borderRadius={'md'}
>
<Box maxW={['100%', '900px']} mx={'auto'}>
<MyStep />
</Box>
</Box>
<Provider dataset={datasetDetail} parentId={parentId} importSource={source}>
<Box flex={'1 0 0'} overflow={'auto'} position={'relative'}>
<ImportComponent activeStep={activeStep} goToNext={goToNext} />
</Box>
</Provider>
</Flex>
<Box flex={'1 0 0'} overflow={'auto'} position={'relative'}>
<ImportComponent />
</Box>
) : null;
};
export default React.memo(ImportDataset);
const Render = () => {
return (
<Flex flexDirection={'column'} bg={'white'} h={'100%'} px={[2, 9]} py={[2, 5]}>
<DatasetImportContextProvider>
<ImportDataset />
</DatasetImportContextProvider>
</Flex>
);
};
export default React.memo(Render);

View File

@ -23,13 +23,14 @@ import type { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
const Info = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
const { datasetT } = useI18n();
const { datasetDetail, loadDatasetDetail, loadDatasets, updateDataset } = useDatasetStore();
const rebuildingCount = useContextSelector(DatasetPageContext, (v) => v.rebuildingCount);
const trainingCount = useContextSelector(DatasetPageContext, (v) => v.trainingCount);
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
useContextSelector(DatasetPageContext, (v) => v);
const refetchDatasetTraining = useContextSelector(
DatasetPageContext,
(v) => v.refetchDatasetTraining
@ -82,9 +83,6 @@ const Info = ({ datasetId }: { datasetId: string }) => {
...data
});
},
onSuccess() {
loadDatasets();
},
successToast: t('common.Update Success'),
errorToast: t('common.Update Failed')
});
@ -117,7 +115,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
},
onSuccess() {
refetchDatasetTraining();
loadDatasetDetail(datasetId, true);
loadDatasetDetail(datasetId);
},
successToast: datasetT('Rebuild embedding start tip'),
errorToast: t('common.Update Failed')
@ -128,16 +126,16 @@ const Info = ({ datasetId }: { datasetId: string }) => {
return (
<Box py={5} px={[5, 10]}>
<Flex mt={5} w={'100%'} alignItems={'center'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.dataset.Dataset ID')}
</Box>
<Box flex={1}>{datasetDetail._id}</Box>
</Flex>
<Flex mt={8} w={'100%'} alignItems={'center'} flexWrap={'wrap'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.ai.model.Vector Model')}
</Box>
<Box flex={[1, '0 0 300px']}>
<Box flex={[1, '0 0 320px']}>
<AIModelSelector
w={'100%'}
value={vectorModel.model}
@ -162,16 +160,16 @@ const Info = ({ datasetId }: { datasetId: string }) => {
</Box>
</Flex>
<Flex mt={8} w={'100%'} alignItems={'center'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.Max Token')}
</Box>
<Box flex={[1, '0 0 300px']}>{vectorModel.maxToken}</Box>
<Box flex={[1, '0 0 320px']}>{vectorModel.maxToken}</Box>
</Flex>
<Flex mt={6} alignItems={'center'} flexWrap={'wrap'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.ai.model.Dataset Agent Model')}
</Box>
<Box flex={[1, '0 0 300px']}>
<Box flex={[1, '0 0 320px']}>
<AIModelSelector
w={'100%'}
value={agentModel.model}
@ -188,13 +186,29 @@ const Info = ({ datasetId }: { datasetId: string }) => {
</Box>
</Flex>
<MyDivider my={4} h={'2px'} maxW={'500px'} />
<MyDivider my={6} h={'2px'} maxW={'500px'} />
{datasetDetail.type === DatasetTypeEnum.externalFile && (
<>
<Flex w={'100%'} alignItems={'center'}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{datasetT('External read url')}
</Box>
<Input
flex={[1, '0 0 320px']}
placeholder="https://test.com/read?fileId={{fileId}}"
{...register('externalReadUrl')}
/>
</Flex>
<MyDivider my={6} h={'2px'} maxW={'500px'} />
</>
)}
<Flex mt={5} w={'100%'} alignItems={'center'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.dataset.Avatar')}
</Box>
<Box flex={[1, '0 0 300px']}>
<Box flex={[1, '0 0 320px']}>
<MyTooltip label={t('common.avatar.Select Avatar')}>
<Avatar
m={'auto'}
@ -208,18 +222,20 @@ const Info = ({ datasetId }: { datasetId: string }) => {
</Box>
</Flex>
<Flex mt={8} w={'100%'} alignItems={'center'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('core.dataset.Name')}
</Box>
<Input flex={[1, '0 0 300px']} maxLength={30} {...register('name')} />
<Input flex={[1, '0 0 320px']} maxLength={30} {...register('name')} />
</Flex>
<Flex mt={8} alignItems={'center'} w={'100%'}>
<Box flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</Box>
<Textarea flex={[1, '0 0 300px']} {...register('intro')} placeholder={t('common.Intro')} />
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']}>
{t('common.Intro')}
</Box>
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
</Flex>
{datasetDetail.isOwner && (
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
{t('user.Permission')}
</Box>
<Box>
@ -234,7 +250,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
)}
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
<Box flex={['0 0 90px', '0 0 160px']} w={0}></Box>
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}></Box>
<Button
isLoading={btnLoading}
mr={4}

View File

@ -5,17 +5,10 @@ import { useUserStore } from '@/web/support/user/useUserStore';
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import Avatar from '@/components/Avatar';
import {
DatasetStatusEnum,
DatasetTypeEnum,
DatasetTypeMap
} from '@fastgpt/global/core/dataset/constants';
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import SideTabs from '@/components/SideTabs';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRouter } from 'next/router';
import Tabs from '@/components/Tabs';
import { useContextSelector } from 'use-context-selector';
@ -36,12 +29,10 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
const { datasetT } = useI18n();
const router = useRouter();
const query = router.query;
const { datasetDetail, startWebsiteSync } = useDatasetStore();
const { userInfo } = useUserStore();
const { isPc, setLoading } = useSystemStore();
const vectorTrainingMap = useContextSelector(DatasetPageContext, (v) => v.vectorTrainingMap);
const agentTrainingMap = useContextSelector(DatasetPageContext, (v) => v.agentTrainingMap);
const rebuildingCount = useContextSelector(DatasetPageContext, (v) => v.rebuildingCount);
const { isPc } = useSystemStore();
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
useContextSelector(DatasetPageContext, (v) => v);
const tabList = [
{
@ -67,20 +58,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
[query, router]
);
const { ConfirmModal: ConfirmSyncModal, openConfirm: openConfirmSync } = useConfirm({
type: 'common'
});
const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({
mutationFn: () => {
setLoading(true);
return startWebsiteSync();
},
onSettled() {
setLoading(false);
},
errorToast: t('common.Update Failed')
});
return (
<>
{isPc ? (
@ -101,25 +78,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
{DatasetTypeMap[datasetDetail.type] && (
<Flex alignItems={'center'} pl={2} justifyContent={'space-between'}>
<DatasetTypeTag type={datasetDetail.type} />
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
datasetDetail.status === DatasetStatusEnum.active && (
<MyTooltip label={t('core.dataset.website.Start Sync')}>
<MyIcon
mt={1}
name={'common/refreshLight'}
w={'12px'}
color={'myGray.500'}
cursor={'pointer'}
onClick={() =>
openConfirmSync(
onUpdateDatasetWebsiteConfig,
undefined,
t('core.dataset.website.Confirm Create Tips')
)()
}
/>
</MyTooltip>
)}
</Flex>
)}
</Box>
@ -206,8 +164,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
/>
</Box>
)}
<ConfirmSyncModal />
</>
);
};

View File

@ -25,6 +25,8 @@ import { fileDownload } from '@/web/common/file/utils';
import QuoteItem from '@/components/core/dataset/QuoteItem';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
@ -48,7 +50,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
const theme = useTheme();
const { toast } = useToast();
const { llmModelList } = useSystemStore();
const { datasetDetail } = useDatasetStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { pushDatasetTestItem } = useSearchTestStore();
const [inputType, setInputType] = useState<'text' | 'file'>('text');
const [datasetTestItem, setDatasetTestItem] = useState<SearchTestStoreItemType>();

View File

@ -1,4 +1,4 @@
import React, { useCallback, useMemo } from 'react';
import React from 'react';
import { useRouter } from 'next/router';
import { Box } from '@chakra-ui/react';
import { useToast } from '@fastgpt/web/hooks/useToast';
@ -9,15 +9,17 @@ import PageContainer from '@/components/PageContainer';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import CollectionCard from './components/CollectionCard';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import Head from 'next/head';
import Slider from './components/Slider';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { DatasetPageContextProvider } from '@/web/core/dataset/context/datasetPageContext';
import {
DatasetPageContext,
DatasetPageContextProvider
} from '@/web/core/dataset/context/datasetPageContext';
import CollectionPageContextProvider from './components/CollectionCard/Context';
import { useContextSelector } from 'use-context-selector';
import NextHead from '@/components/common/NextHead';
const CollectionCard = dynamic(() => import('./components/CollectionCard/index'));
const DataCard = dynamic(() => import('./components/DataCard'));
const Test = dynamic(() => import('./components/Test'));
const Info = dynamic(() => import('./components/Info'));
@ -30,16 +32,14 @@ export enum TabEnum {
info = 'info',
import = 'import'
}
type Props = { datasetId: string; currentTab: TabEnum };
const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: TabEnum }) => {
const Detail = ({ datasetId, currentTab }: Props) => {
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { datasetDetail, loadDatasetDetail } = useDatasetStore();
const { ConfirmModal: ConfirmSyncModal, openConfirm: openConfirmSync } = useConfirm({
type: 'common'
});
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const loadDatasetDetail = useContextSelector(DatasetPageContext, (v) => v.loadDatasetDetail);
useQuery([datasetId], () => loadDatasetDetail(datasetId), {
onError(err: any) {
@ -53,36 +53,37 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: TabE
return (
<>
<Head>
<title>{datasetDetail?.name}</title>
</Head>
<DatasetPageContextProvider
value={{
datasetId
}}
>
<PageContainer>
<MyBox display={'flex'} flexDirection={['column', 'row']} h={'100%'} pt={[4, 0]}>
<Slider currentTab={currentTab} />
<NextHead title={datasetDetail?.name} icon={datasetDetail?.avatar} />
<PageContainer>
<MyBox display={'flex'} flexDirection={['column', 'row']} h={'100%'} pt={[4, 0]}>
<Slider currentTab={currentTab} />
{!!datasetDetail._id && (
<Box flex={'1 0 0'} pb={0}>
{currentTab === TabEnum.collectionCard && <CollectionCard />}
{currentTab === TabEnum.dataCard && <DataCard />}
{currentTab === TabEnum.test && <Test datasetId={datasetId} />}
{currentTab === TabEnum.info && <Info datasetId={datasetId} />}
{currentTab === TabEnum.import && <Import />}
</Box>
)}
</MyBox>
</PageContainer>
</DatasetPageContextProvider>
<ConfirmSyncModal />
{!!datasetDetail._id && (
<Box flex={'1 0 0'} pb={0}>
{currentTab === TabEnum.collectionCard && (
<CollectionPageContextProvider>
<CollectionCard />
</CollectionPageContextProvider>
)}
{currentTab === TabEnum.dataCard && <DataCard />}
{currentTab === TabEnum.test && <Test datasetId={datasetId} />}
{currentTab === TabEnum.info && <Info datasetId={datasetId} />}
{currentTab === TabEnum.import && <Import />}
</Box>
)}
</MyBox>
</PageContainer>
</>
);
};
const Render = (data: Props) => (
<DatasetPageContextProvider datasetId={data.datasetId}>
<Detail {...data} />
</DatasetPageContextProvider>
);
export default Render;
export async function getServerSideProps(context: any) {
const currentTab = context?.query?.currentTab || TabEnum.collectionCard;
const datasetId = context?.query?.datasetId;
@ -91,5 +92,3 @@ export async function getServerSideProps(context: any) {
props: { currentTab, datasetId, ...(await serviceSideProps(context, ['dataset', 'file'])) }
};
}
export default React.memo(Detail);

View File

@ -20,9 +20,11 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import MySelect from '@fastgpt/web/components/common/MySelect';
import AIModelSelector from '@/components/Select/AIModelSelector';
import { useI18n } from '@/web/context/I18n';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
const { t } = useTranslation();
const { datasetT } = useI18n();
const [refresh, setRefresh] = useState(false);
const { toast } = useToast();
const router = useRouter();
@ -102,25 +104,31 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
gridTemplateColumns={'repeat(1,1fr)'}
list={[
{
title: t('core.dataset.Common Dataset'),
title: datasetT('Common Dataset'),
value: DatasetTypeEnum.dataset,
icon: 'core/dataset/commonDataset',
desc: t('core.dataset.Common Dataset Desc')
desc: datasetT('Common Dataset Desc')
},
...(feConfigs.isPlus
? [
{
title: t('core.dataset.Website Dataset'),
title: datasetT('Website Dataset'),
value: DatasetTypeEnum.websiteDataset,
icon: 'core/dataset/websiteDataset',
desc: t('core.dataset.Website Dataset Desc')
desc: datasetT('Website Dataset Desc')
}
// {
// title: datasetT('External File'),
// value: DatasetTypeEnum.externalFile,
// icon: 'core/dataset/websiteDataset',
// desc: datasetT('External file Dataset Desc')
// }
]
: [])
]}
value={getValues('type')}
onChange={(e) => {
setValue('type', e as `${DatasetTypeEnum}`);
setValue('type', e as DatasetTypeEnum);
setRefresh(!refresh);
}}
/>

View File

@ -35,7 +35,10 @@ const MoveModal = ({
const [parentId, setParentId] = useState<string>('');
const { data } = useQuery(['getDatasets', parentId], () => {
return Promise.all([getDatasets({ parentId, type: 'folder' }), getDatasetPaths(parentId)]);
return Promise.all([
getDatasets({ parentId, type: DatasetTypeEnum.folder }),
getDatasetPaths(parentId)
]);
});
const paths = useMemo(
() => [

View File

@ -1,7 +1,6 @@
import React, { useMemo, useRef } from 'react';
import React, { useMemo, useRef, useState } from 'react';
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import PageContainer from '@/components/PageContainer';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { AddIcon } from '@chakra-ui/icons';
@ -35,28 +34,30 @@ import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { xmlDownloadFetch } from '@/web/common/api/xmlFetch';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
const Kb = () => {
const Dataset = () => {
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
const DeleteTipsMap = useRef({
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm')
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
});
const { openConfirm, ConfirmModal } = useConfirm({
type: 'delete'
});
const { myDatasets, loadDatasets, setDatasets, updateDataset } = useDatasetStore();
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
@ -78,7 +79,7 @@ const Kb = () => {
return id;
},
onSuccess(id: string) {
setDatasets(myDatasets.filter((item) => item._id !== id));
setMyDatasets(myDatasets.filter((item) => item._id !== id));
},
onSettled() {
setLoading(false);
@ -112,7 +113,7 @@ const Kb = () => {
const { data, refetch, isFetching } = useQuery(
['loadDataset', parentId],
() => {
return Promise.all([loadDatasets(parentId), getDatasetPaths(parentId)]);
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
},
{
onError(err) {
@ -139,7 +140,10 @@ const Kb = () => {
);
return (
<PageContainer isLoading={isFetching} insertProps={{ px: [5, '48px'] }}>
<PageContainer
isLoading={myDatasets.length === 0 && isFetching}
insertProps={{ px: [5, '48px'] }}
>
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
{/* url path */}
<ParentPaths
@ -317,7 +321,10 @@ const Kb = () => {
defaultVal: dataset.name,
onSuccess: (val) => {
if (val === dataset.name || !val) return;
updateDataset({ id: dataset._id, name: val });
putDatasetById({
id: dataset._id,
name: val
});
}
})
},
@ -351,7 +358,7 @@ const Kb = () => {
</Flex>
),
onClick: () => {
updateDataset({
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.public
});
@ -371,7 +378,7 @@ const Kb = () => {
</Flex>
),
onClick: () => {
updateDataset({
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.private
});
@ -476,9 +483,9 @@ const Kb = () => {
export async function getServerSideProps(content: any) {
return {
props: {
...(await serviceSideProps(content))
...(await serviceSideProps(content, ['dataset']))
}
};
}
export default Kb;
export default Dataset;

View File

@ -66,7 +66,7 @@ export const pushQAUsage = async ({
modelType: ModelTypeEnum.llm,
tokens
});
console.log(tokens, '----');
concatUsage({
billId,
teamId,

View File

@ -1,6 +1,10 @@
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
import type { DatasetItemType, DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import type {
DatasetItemType,
DatasetListItemType,
DatasetSimpleItemType
} from '@fastgpt/global/core/dataset/type.d';
import type {
GetDatasetCollectionsProps,
GetDatasetDataListProps,
@ -39,13 +43,13 @@ import type { getDatasetTrainingQueueResponse } from '@/pages/api/core/dataset/t
import type { rebuildEmbeddingBody } from '@/pages/api/core/dataset/training/rebuildEmbedding';
/* ======================== dataset ======================= */
export const getDatasets = (data: { parentId?: string; type?: `${DatasetTypeEnum}` }) =>
export const getDatasets = (data: { parentId?: string; type?: DatasetTypeEnum }) =>
GET<DatasetListItemType[]>(`/core/dataset/list`, data);
/**
* get type=dataset list
*/
export const getAllDataset = () => GET<DatasetListItemType[]>(`/core/dataset/allDataset`);
export const getAllDataset = () => GET<DatasetSimpleItemType[]>(`/core/dataset/allDataset`);
export const getDatasetPaths = (parentId?: string) =>
GET<ParentTreePathItemType[]>('/core/dataset/paths', { parentId });

View File

@ -11,6 +11,8 @@ import { useQuery } from '@tanstack/react-query';
import React, { useMemo, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { useLoading } from '@fastgpt/web/hooks/useLoading';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '../context/datasetPageContext';
const SelectCollections = ({
datasetId,
@ -37,7 +39,8 @@ const SelectCollections = ({
}) => {
const { t } = useTranslation();
const theme = useTheme();
const { datasetDetail, loadDatasetDetail } = useDatasetStore();
const { loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { Loading } = useLoading();
const [selectedDatasetCollectionIds, setSelectedDatasetCollectionIds] =
useState<string[]>(defaultSelectedId);

View File

@ -1,4 +1,5 @@
import { defaultQAModels, defaultVectorModels } from '@fastgpt/global/core/ai/model';
import { DatasetTypeEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import type {
DatasetCollectionItemType,
DatasetItemType
@ -11,7 +12,7 @@ export const defaultDatasetDetail: DatasetItemType = {
teamId: '',
tmbId: '',
updateTime: new Date(),
type: 'dataset',
type: DatasetTypeEnum.dataset,
avatar: '/icon/logo.svg',
name: '',
intro: '',
@ -34,7 +35,7 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
teamId: '',
tmbId: '',
updateTime: new Date(),
type: 'dataset',
type: DatasetTypeEnum.dataset,
avatar: '/icon/logo.svg',
name: '',
intro: '',
@ -51,7 +52,7 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
sourceName: '',
sourceId: '',
createTime: new Date(),
trainingType: 'chunk',
trainingType: TrainingModeEnum.chunk,
chunkSize: 0
};

View File

@ -1,11 +1,23 @@
import { useQuery } from '@tanstack/react-query';
import { ReactNode, useMemo } from 'react';
import { ReactNode, useMemo, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { createContext } from 'use-context-selector';
import { getDatasetTrainingQueue, getTrainingQueueLen } from '../api';
import { useDatasetStore } from '../store/dataset';
import {
getDatasetById,
getDatasetTrainingQueue,
getTrainingQueueLen,
putDatasetById
} from '../api';
import { defaultDatasetDetail } from '../constants';
import { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
type DatasetPageContextType = {
datasetId: string;
datasetDetail: DatasetItemType;
loadDatasetDetail: (id: string) => Promise<DatasetItemType>;
updateDataset: (data: DatasetUpdateBody) => Promise<void>;
vectorTrainingMap: {
colorSchema: string;
tip: string;
@ -19,10 +31,6 @@ type DatasetPageContextType = {
refetchDatasetTraining: () => void;
};
type DatasetPageContextValueType = {
datasetId: string;
};
export const DatasetPageContext = createContext<DatasetPageContextType>({
vectorTrainingMap: {
colorSchema: '',
@ -36,19 +44,46 @@ export const DatasetPageContext = createContext<DatasetPageContextType>({
trainingCount: 0,
refetchDatasetTraining: function (): void {
throw new Error('Function not implemented.');
},
datasetId: '',
datasetDetail: defaultDatasetDetail,
loadDatasetDetail: function (id: string): Promise<DatasetItemType> {
throw new Error('Function not implemented.');
},
updateDataset: function (data: DatasetUpdateBody): Promise<void> {
throw new Error('Function not implemented.');
}
});
export const DatasetPageContextProvider = ({
children,
value
datasetId
}: {
children: ReactNode;
value: DatasetPageContextValueType;
datasetId: string;
}) => {
const { t } = useTranslation();
const { datasetId } = value;
const { datasetDetail } = useDatasetStore();
// dataset detail
const [datasetDetail, setDatasetDetail] = useState(defaultDatasetDetail);
const loadDatasetDetail = async (id: string) => {
const data = await getDatasetById(id);
setDatasetDetail(data);
return data;
};
const updateDataset = async (data: DatasetUpdateBody) => {
await putDatasetById(data);
if (datasetId === data.id) {
setDatasetDetail((state) => ({
...state,
...data
}));
}
};
// global queue
const { data: { vectorTrainingCount = 0, agentTrainingCount = 0 } = {} } = useQuery(
@ -108,6 +143,11 @@ export const DatasetPageContextProvider = ({
});
const contextValue: DatasetPageContextType = {
datasetId,
datasetDetail,
loadDatasetDetail,
updateDataset,
vectorTrainingMap,
agentTrainingMap,
rebuildingCount,

View File

@ -0,0 +1,11 @@
import { ReactNode } from 'react';
import { createContext } from 'use-context-selector';
type ContextType = {};
export const Context = createContext<ContextType>({});
export const ContextProvider = ({ children }: { children: ReactNode }) => {
const contextValue: ContextType = {};
return <Context.Provider value={contextValue}>{children}</Context.Provider>;
};

View File

@ -1,18 +0,0 @@
import { ReactNode } from 'react';
import { createContext } from 'use-context-selector';
type DatasetContextType = {};
type DatasetContextValueType = {};
export const DatasetContext = createContext<DatasetContextType>({});
export const DatasetContextProvider = ({
children,
value
}: {
children: ReactNode;
value: DatasetContextValueType;
}) => {
return <DatasetContext.Provider value={value}>{children}</DatasetContext.Provider>;
};

View File

@ -1,30 +1,18 @@
import { create } from 'zustand';
import { devtools, persist } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer';
import type { DatasetItemType, DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import {
getAllDataset,
getDatasets,
getDatasetById,
putDatasetById,
postWebsiteSync
} from '@/web/core/dataset/api';
import { defaultDatasetDetail } from '../constants';
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
import { DatasetStatusEnum } from '@fastgpt/global/core/dataset/constants';
import { postCreateTrainingUsage } from '@/web/support/wallet/usage/api';
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
import type {
DatasetListItemType,
DatasetSimpleItemType
} from '@fastgpt/global/core/dataset/type.d';
import { getAllDataset, getDatasets } from '@/web/core/dataset/api';
type State = {
allDatasets: DatasetListItemType[];
loadAllDatasets: () => Promise<DatasetListItemType[]>;
allDatasets: DatasetSimpleItemType[];
loadAllDatasets: () => Promise<DatasetSimpleItemType[]>;
myDatasets: DatasetListItemType[];
loadDatasets: (parentId?: string) => Promise<any>;
setDatasets(val: DatasetListItemType[]): void;
datasetDetail: DatasetItemType;
loadDatasetDetail: (id: string, init?: boolean) => Promise<DatasetItemType>;
updateDataset: (data: DatasetUpdateBody) => Promise<any>;
startWebsiteSync: () => Promise<any>;
loadMyDatasets: (parentId?: string) => Promise<any>;
setMyDatasets(val: DatasetListItemType[]): void;
};
export const useDatasetStore = create<State>()(
@ -40,66 +28,17 @@ export const useDatasetStore = create<State>()(
return res;
},
myDatasets: [],
async loadDatasets(parentId = '') {
async loadMyDatasets(parentId = '') {
const res = await getDatasets({ parentId });
set((state) => {
state.myDatasets = res;
});
return res;
},
setDatasets(val) {
setMyDatasets(val) {
set((state) => {
state.myDatasets = val;
});
},
datasetDetail: defaultDatasetDetail,
async loadDatasetDetail(id: string, init = false) {
if (!id || (id === get().datasetDetail._id && !init)) return get().datasetDetail;
const data = await getDatasetById(id);
set((state) => {
state.datasetDetail = data;
});
return data;
},
async updateDataset(data) {
await putDatasetById(data);
if (get().datasetDetail._id === data.id) {
set((state) => {
state.datasetDetail = {
...get().datasetDetail,
...data
};
});
}
set((state) => {
state.myDatasets = state.myDatasets = state.myDatasets.map((item) =>
item._id === data.id
? {
...item,
...data
}
: item
);
});
},
async startWebsiteSync() {
await checkTeamWebSyncLimit();
const billId = await postCreateTrainingUsage({
name: 'core.dataset.training.Website Sync',
datasetId: get().datasetDetail._id
});
return postWebsiteSync({ datasetId: get().datasetDetail._id, billId }).then(() => {
get().updateDataset({
id: get().datasetDetail._id,
status: DatasetStatusEnum.syncing
});
});
}
})),
{

View File

@ -1,13 +1,8 @@
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { ImportProcessWayEnum, ImportSourceTypeEnum } from './constants';
import { ImportProcessWayEnum } from './constants';
import { UseFormReturn } from 'react-hook-form';
export type ImportDataComponentProps = {
activeStep: number;
goToNext: () => void;
};
export type ImportSourceItemType = {
id: string;
@ -17,10 +12,10 @@ export type ImportSourceItemType = {
// source
sourceName: string;
sourceSize?: string;
icon: string;
// file
sourceSize?: string;
isUploading?: boolean;
uploadedFileRate?: number;
dbFileId?: string; // 存储在数据库里的文件Id这个 ID 还是图片和集合的 metadata 中 relateId
@ -31,6 +26,10 @@ export type ImportSourceItemType = {
// custom text
rawText?: string;
// external file
sourceUrl?: string;
externalId?: string;
};
export type ImportSourceParamsType = UseFormReturn<