FastGPT/packages/service/core/dataset/controller.ts
Archer 2da73a6555
Some checks failed
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
Document deploy / sync-images (push) Has been cancelled
Document deploy / generate-timestamp (push) Has been cancelled
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Has been cancelled
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Has been cancelled
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Has been cancelled
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Has been cancelled
V4.14.4 features (#6075)
* perf: faq

* index

* delete dataset

* delete dataset

* perf: delete dataset

* init

* fix: faq

* refresh

* empty tip

* perf: delete type

* fix: some bugs (#6071)

* fix: publish channel doc link

* fix: checkbox disable hover style

* fix: huggingface.svg missing; update doc

* chore: update doc

* fix: typo

* fix: export log dateend;feat: file selector render (#6072)

* fix: export log dateend

* feat: file selector render

* perf: s3 controller

* team qpm limit & plan tracks (#6066)

* team qpm limit & plan tracks

* api entry qpm

* perf: computed days

* Revert "api entry qpm"

This reverts commit 1210c07217.

* perf: code

* system qpm limit

* system qpm limit

---------

Co-authored-by: archer <545436317@qq.com>

* perf: track

* remove export chat test

* doc

* feat: global agent (#6057)

* feat: global agent

* fix: agent

* fix: order display

* CHORE

* feat: error page log

* fix: var update

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: Roy <whoeverimf5@gmail.com>
2025-12-10 20:07:05 +08:00

147 lines
4.1 KiB
TypeScript

import { type DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
import { MongoDatasetCollection } from './collection/schema';
import { MongoDataset } from './schema';
import { delCollectionRelatedSource } from './collection/controller';
import { type ClientSession } from '../../common/mongo';
import { MongoDatasetTraining } from './training/schema';
import { MongoDatasetData } from './data/schema';
import { deleteDatasetDataVector } from '../../common/vectorDB/controller';
import { MongoDatasetDataText } from './data/dataTextSchema';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { retryFn } from '@fastgpt/global/common/system/utils';
import { UserError } from '@fastgpt/global/common/error/utils';
import { getS3DatasetSource } from '../../common/s3/sources/dataset';
/* ============= dataset ========== */
/* find all datasetId by top datasetId */
export async function findDatasetAndAllChildren({
teamId,
datasetId,
fields
}: {
teamId: string;
datasetId: string;
fields?: string;
}): Promise<DatasetSchemaType[]> {
const find = async (id: string) => {
const children = await MongoDataset.find(
{
teamId,
parentId: id
},
fields
).lean();
let datasets = children;
for (const child of children) {
const grandChildrenIds = await find(child._id);
datasets = datasets.concat(grandChildrenIds);
}
return datasets;
};
const [dataset, childDatasets] = await Promise.all([
MongoDataset.findById(datasetId).lean(),
find(datasetId)
]);
if (!dataset) {
return Promise.reject(new UserError('Dataset not found'));
}
return [dataset, ...childDatasets];
}
export async function getCollectionWithDataset(collectionId: string) {
const data = await MongoDatasetCollection.findById(collectionId)
.populate<{ dataset: DatasetSchemaType }>('dataset')
.lean();
if (!data) {
return Promise.reject(DatasetErrEnum.unExistCollection);
}
return data;
}
/* delete all data by datasetIds */
export async function delDatasetRelevantData({
datasets,
session
}: {
datasets: DatasetSchemaType[];
session: ClientSession;
}) {
if (!datasets.length) return;
const teamId = datasets[0].teamId;
if (!teamId) {
return Promise.reject(new UserError('TeamId is required'));
}
const datasetIds = datasets.map((item) => item._id);
// Get _id, teamId, fileId, metadata.relatedImgId for all collections
const collections = await MongoDatasetCollection.find(
{
teamId,
datasetId: { $in: datasetIds }
},
'_id teamId datasetId fileId metadata'
).lean();
// delete training data
await MongoDatasetTraining.deleteMany({
teamId,
datasetId: { $in: datasetIds }
});
// Delete dataset_data_texts in batches by datasetId
for (const datasetId of datasetIds) {
await MongoDatasetDataText.deleteMany({
teamId,
datasetId
}).maxTimeMS(300000); // Reduce timeout for single batch
}
// Delete dataset_datas in batches by datasetId
for (const datasetId of datasetIds) {
await MongoDatasetData.deleteMany({
teamId,
datasetId
}).maxTimeMS(300000);
}
await delCollectionRelatedSource({ collections });
// Delete vector data
await deleteDatasetDataVector({ teamId, datasetIds });
for (const datasetId of datasetIds) {
// Delete dataset_data_texts in batches by datasetId
await MongoDatasetDataText.deleteMany({
teamId,
datasetId
}).maxTimeMS(300000); // Reduce timeout for single batch
// Delete dataset_datas in batches by datasetId
await MongoDatasetData.deleteMany({
teamId,
datasetId
}).maxTimeMS(300000);
}
// Delete source: 兼容旧版的图片
await delCollectionRelatedSource({ collections });
// Delete vector data
await deleteDatasetDataVector({ teamId, datasetIds });
// delete collections
await MongoDatasetCollection.deleteMany({
teamId,
datasetId: { $in: datasetIds }
}).session(session);
// Delete all dataset files
for (const datasetId of datasetIds) {
await getS3DatasetSource().deleteDatasetFilesByPrefix({ datasetId });
}
}