fix: get file buffer and upload file to S3 (#6004)

This commit is contained in:
Roy 2025-11-28 15:02:59 +08:00 committed by GitHub
parent ac1a07d91c
commit f1253a1f5a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 58 additions and 38 deletions

View File

@ -16,7 +16,7 @@ export type FileType = {
size: number;
};
/*
/*
maxSize: File max size (MB)
*/
export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
@ -146,6 +146,43 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
});
});
}
uploaderMemory = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: maxSize
}
}).single('file');
async getFileBuffer(req: NextApiRequest, res: NextApiResponse) {
return new Promise<{
buffer: Buffer;
originalname: string;
encoding: string;
mimetype: string;
}>((resolve, reject) => {
// @ts-ignore
this.uploaderMemory(req, res, (error) => {
if (error) {
return reject(error);
}
// @ts-ignore
const file = req.file;
if (!file?.buffer) {
return reject(new Error('File empty'));
}
resolve({
buffer: file.buffer,
originalname: decodeURIComponent(file.originalname),
encoding: file.encoding,
mimetype: file.mimetype
});
});
});
}
}
return new UploadModel();

View File

@ -1,36 +1,26 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { type FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { type CreateCollectionResponse } from '@/global/core/dataset/api';
import { getS3DatasetSource } from '@fastgpt/service/common/s3/sources/dataset';
import { removeS3TTL } from '@fastgpt/service/common/s3/utils';
async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCollectionResponse {
let filePaths: string[] = [];
try {
// Create multer uploader
const upload = getUploadModel({
maxSize: global.feConfigs?.uploadFileMaxSize
});
const { file, data, bucketName } =
await upload.getUploadFile<FileCreateDatasetCollectionParams>(
req,
res,
BucketNameEnum.dataset
);
filePaths = [file.path];
if (!file || !bucketName) {
throw new Error('file is empty');
}
const upload = getUploadModel({ maxSize: global.feConfigs?.uploadFileMaxSize });
const { buffer, originalname } = await upload.getFileBuffer(req, res);
const data = (() => {
try {
return JSON.parse(req.body?.data || '{}');
} catch (error) {
return {};
}
})() as FileCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
@ -40,29 +30,24 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
datasetId: data.datasetId
});
const { fileMetadata, collectionMetadata, ...collectionData } = data;
const collectionName = file.originalname;
const s3DatasetSource = getS3DatasetSource();
// 1. upload file
const fileId = await uploadFile({
teamId,
uid: tmbId,
bucketName,
path: file.path,
filename: file.originalname,
contentType: file.mimetype,
metadata: fileMetadata
const fileId = await s3DatasetSource.uploadDatasetFileByBuffer({
datasetId: String(dataset._id),
buffer,
filename: originalname
});
// 2. delete tmp file
removeFilesByPaths(filePaths);
await removeS3TTL({ key: fileId, bucketName: 'private' });
const { fileMetadata, collectionMetadata, ...collectionData } = data;
// 3. Create collection
const { collectionId, insertResults } = await createCollectionAndInsertData({
dataset,
createCollectionParams: {
...collectionData,
name: collectionName,
name: originalname,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.file,
@ -76,8 +61,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCo
return { collectionId, results: insertResults };
} catch (error) {
removeFilesByPaths(filePaths);
return Promise.reject(error);
}
}