mirror of
https://github.com/labring/FastGPT.git
synced 2025-12-25 20:02:47 +00:00
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
* perf: faq * index * delete dataset * delete dataset * perf: delete dataset * init * fix: faq * doc * fix: share link auth (#6063) * standard plan add custom domain config (#6061) * standard plan add custom domain config * bill detail modal * perf: vector count api * feat: custom domain & wecom bot SaaS integration (#6047) * feat: custom Domain type define * feat: custom domain * feat: wecom custom domain * chore: i18n * chore: i18n; team auth * feat: wecom multi-model message support * chore: wecom edit modal * chore(doc): custom domain && wecom bot * fix: type * fix: type * fix: file detect * feat: fe * fix: img name * fix: test * compress img * rename * editor initial status * fix: chat url * perf: s3 upload by buffer * img * refresh * fix: custom domain selector (#6069) * empty tip * perf: s3 init * sort provider * fix: extend * perf: extract filename --------- Co-authored-by: Roy <whoeverimf5@gmail.com> Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
128 lines
3.5 KiB
TypeScript
128 lines
3.5 KiB
TypeScript
import { type DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
|
import { MongoDatasetCollection } from './collection/schema';
|
|
import { MongoDataset } from './schema';
|
|
import { delCollectionRelatedSource } from './collection/controller';
|
|
import { type ClientSession } from '../../common/mongo';
|
|
import { MongoDatasetTraining } from './training/schema';
|
|
import { MongoDatasetData } from './data/schema';
|
|
import { deleteDatasetDataVector } from '../../common/vectorDB/controller';
|
|
import { MongoDatasetDataText } from './data/dataTextSchema';
|
|
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
|
import { retryFn } from '@fastgpt/global/common/system/utils';
|
|
import { UserError } from '@fastgpt/global/common/error/utils';
|
|
import { getS3DatasetSource } from '../../common/s3/sources/dataset';
|
|
|
|
/* ============= dataset ========== */
|
|
/* find all datasetId by top datasetId */
|
|
export async function findDatasetAndAllChildren({
|
|
teamId,
|
|
datasetId,
|
|
fields
|
|
}: {
|
|
teamId: string;
|
|
datasetId: string;
|
|
fields?: string;
|
|
}): Promise<DatasetSchemaType[]> {
|
|
const find = async (id: string) => {
|
|
const children = await MongoDataset.find(
|
|
{
|
|
teamId,
|
|
parentId: id
|
|
},
|
|
fields
|
|
).lean();
|
|
|
|
let datasets = children;
|
|
|
|
for (const child of children) {
|
|
const grandChildrenIds = await find(child._id);
|
|
datasets = datasets.concat(grandChildrenIds);
|
|
}
|
|
|
|
return datasets;
|
|
};
|
|
const [dataset, childDatasets] = await Promise.all([
|
|
MongoDataset.findById(datasetId).lean(),
|
|
find(datasetId)
|
|
]);
|
|
|
|
if (!dataset) {
|
|
return Promise.reject(new UserError('Dataset not found'));
|
|
}
|
|
|
|
return [dataset, ...childDatasets];
|
|
}
|
|
|
|
export async function getCollectionWithDataset(collectionId: string) {
|
|
const data = await MongoDatasetCollection.findById(collectionId)
|
|
.populate<{ dataset: DatasetSchemaType }>('dataset')
|
|
.lean();
|
|
if (!data) {
|
|
return Promise.reject(DatasetErrEnum.unExistCollection);
|
|
}
|
|
return data;
|
|
}
|
|
|
|
/* delete all data by datasetIds */
|
|
export async function delDatasetRelevantData({
|
|
datasets,
|
|
session
|
|
}: {
|
|
datasets: DatasetSchemaType[];
|
|
session: ClientSession;
|
|
}) {
|
|
if (!datasets.length) return;
|
|
|
|
const teamId = datasets[0].teamId;
|
|
|
|
if (!teamId) {
|
|
return Promise.reject(new UserError('TeamId is required'));
|
|
}
|
|
|
|
const datasetIds = datasets.map((item) => item._id);
|
|
|
|
// Get _id, teamId, fileId, metadata.relatedImgId for all collections
|
|
const collections = await MongoDatasetCollection.find(
|
|
{
|
|
teamId,
|
|
datasetId: { $in: datasetIds }
|
|
},
|
|
'_id teamId datasetId fileId metadata'
|
|
).lean();
|
|
|
|
// delete training data
|
|
await MongoDatasetTraining.deleteMany({
|
|
teamId,
|
|
datasetId: { $in: datasetIds }
|
|
});
|
|
|
|
for (const datasetId of datasetIds) {
|
|
// Delete dataset_data_texts in batches by datasetId
|
|
await MongoDatasetDataText.deleteMany({
|
|
teamId,
|
|
datasetId
|
|
}).maxTimeMS(300000); // Reduce timeout for single batch
|
|
// Delete dataset_datas in batches by datasetId
|
|
await MongoDatasetData.deleteMany({
|
|
teamId,
|
|
datasetId
|
|
}).maxTimeMS(300000);
|
|
}
|
|
|
|
// Delete source: 兼容旧版的图片
|
|
await delCollectionRelatedSource({ collections });
|
|
// Delete vector data
|
|
await deleteDatasetDataVector({ teamId, datasetIds });
|
|
|
|
// delete collections
|
|
await MongoDatasetCollection.deleteMany({
|
|
teamId,
|
|
datasetId: { $in: datasetIds }
|
|
}).session(session);
|
|
|
|
// Delete all dataset files
|
|
for (const datasetId of datasetIds) {
|
|
await getS3DatasetSource().deleteDatasetFilesByPrefix({ datasetId });
|
|
}
|
|
}
|