Revert "fix: get file buffer and upload file to S3 (#6004)"

This reverts commit f1253a1f5a.
This commit is contained in:
Finley Ge 2025-11-28 17:11:37 +08:00 committed by GitHub
parent f1253a1f5a
commit a1827d5252
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 38 additions and 58 deletions

View File

@ -16,7 +16,7 @@ export type FileType = {
size: number;
};
/*
/*
maxSize: File max size (MB)
*/
export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
@ -146,43 +146,6 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
});
});
}
uploaderMemory = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: maxSize
}
}).single('file');
async getFileBuffer(req: NextApiRequest, res: NextApiResponse) {
return new Promise<{
buffer: Buffer;
originalname: string;
encoding: string;
mimetype: string;
}>((resolve, reject) => {
// @ts-ignore
this.uploaderMemory(req, res, (error) => {
if (error) {
return reject(error);
}
// @ts-ignore
const file = req.file;
if (!file?.buffer) {
return reject(new Error('File empty'));
}
resolve({
buffer: file.buffer,
originalname: decodeURIComponent(file.originalname),
encoding: file.encoding,
mimetype: file.mimetype
});
});
});
}
}
return new UploadModel();

View File

@ -1,26 +1,36 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { type FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { type CreateCollectionResponse } from '@/global/core/dataset/api';
import { getS3DatasetSource } from '@fastgpt/service/common/s3/sources/dataset';
import { removeS3TTL } from '@fastgpt/service/common/s3/utils';
async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCollectionResponse {
let filePaths: string[] = [];
try {
const upload = getUploadModel({ maxSize: global.feConfigs?.uploadFileMaxSize });
const { buffer, originalname } = await upload.getFileBuffer(req, res);
const data = (() => {
try {
return JSON.parse(req.body?.data || '{}');
} catch (error) {
return {};
}
})() as FileCreateDatasetCollectionParams;
// Create multer uploader
const upload = getUploadModel({
maxSize: global.feConfigs?.uploadFileMaxSize
});
const { file, data, bucketName } =
await upload.getUploadFile<FileCreateDatasetCollectionParams>(
req,
res,
BucketNameEnum.dataset
);
filePaths = [file.path];
if (!file || !bucketName) {
throw new Error('file is empty');
}
const { teamId, tmbId, dataset } = await authDataset({
req,
@ -30,24 +40,29 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
datasetId: data.datasetId
});
const s3DatasetSource = getS3DatasetSource();
const { fileMetadata, collectionMetadata, ...collectionData } = data;
const collectionName = file.originalname;
const fileId = await s3DatasetSource.uploadDatasetFileByBuffer({
datasetId: String(dataset._id),
buffer,
filename: originalname
// 1. upload file
const fileId = await uploadFile({
teamId,
uid: tmbId,
bucketName,
path: file.path,
filename: file.originalname,
contentType: file.mimetype,
metadata: fileMetadata
});
await removeS3TTL({ key: fileId, bucketName: 'private' });
const { fileMetadata, collectionMetadata, ...collectionData } = data;
// 2. delete tmp file
removeFilesByPaths(filePaths);
// 3. Create collection
const { collectionId, insertResults } = await createCollectionAndInsertData({
dataset,
createCollectionParams: {
...collectionData,
name: originalname,
name: collectionName,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.file,
@ -61,6 +76,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
return { collectionId, results: insertResults };
} catch (error) {
removeFilesByPaths(filePaths);
return Promise.reject(error);
}
}