From f1253a1f5af0b58052c7c923bcce161caa31798f Mon Sep 17 00:00:00 2001 From: Roy Date: Fri, 28 Nov 2025 15:02:59 +0800 Subject: [PATCH] fix: get file buffer and upload file to S3 (#6004) --- packages/service/common/file/multer.ts | 39 ++++++++++++- .../dataset/collection/create/localFile.ts | 57 +++++++------------ 2 files changed, 58 insertions(+), 38 deletions(-) diff --git a/packages/service/common/file/multer.ts b/packages/service/common/file/multer.ts index 237407656..b31a1aacc 100644 --- a/packages/service/common/file/multer.ts +++ b/packages/service/common/file/multer.ts @@ -16,7 +16,7 @@ export type FileType = { size: number; }; -/* +/* maxSize: File max size (MB) */ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => { @@ -146,6 +146,43 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => { }); }); } + + uploaderMemory = multer({ + storage: multer.memoryStorage(), + limits: { + fileSize: maxSize + } + }).single('file'); + + async getFileBuffer(req: NextApiRequest, res: NextApiResponse) { + return new Promise<{ + buffer: Buffer; + originalname: string; + encoding: string; + mimetype: string; + }>((resolve, reject) => { + // @ts-ignore + this.uploaderMemory(req, res, (error) => { + if (error) { + return reject(error); + } + + // @ts-ignore + const file = req.file; + + if (!file?.buffer) { + return reject(new Error('File empty')); + } + + resolve({ + buffer: file.buffer, + originalname: decodeURIComponent(file.originalname), + encoding: file.encoding, + mimetype: file.mimetype + }); + }); + }); + } } return new UploadModel(); diff --git a/projects/app/src/pages/api/core/dataset/collection/create/localFile.ts b/projects/app/src/pages/api/core/dataset/collection/create/localFile.ts index 8a5629e65..38e996ccf 100644 --- a/projects/app/src/pages/api/core/dataset/collection/create/localFile.ts +++ b/projects/app/src/pages/api/core/dataset/collection/create/localFile.ts @@ -1,36 +1,26 @@ import type { NextApiRequest, NextApiResponse } from 'next'; -import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller'; import { getUploadModel } from '@fastgpt/service/common/file/multer'; import { authDataset } from '@fastgpt/service/support/permission/dataset/auth'; import { type FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api'; -import { removeFilesByPaths } from '@fastgpt/service/common/file/utils'; import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller'; import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants'; -import { getNanoid } from '@fastgpt/global/common/string/tools'; -import { BucketNameEnum } from '@fastgpt/global/common/file/constants'; import { NextAPI } from '@/service/middleware/entry'; import { WritePermissionVal } from '@fastgpt/global/support/permission/constant'; import { type CreateCollectionResponse } from '@/global/core/dataset/api'; +import { getS3DatasetSource } from '@fastgpt/service/common/s3/sources/dataset'; +import { removeS3TTL } from '@fastgpt/service/common/s3/utils'; async function handler(req: NextApiRequest, res: NextApiResponse): CreateCollectionResponse { - let filePaths: string[] = []; - try { - // Create multer uploader - const upload = getUploadModel({ - maxSize: global.feConfigs?.uploadFileMaxSize - }); - const { file, data, bucketName } = - await upload.getUploadFile( - req, - res, - BucketNameEnum.dataset - ); - filePaths = [file.path]; - - if (!file || !bucketName) { - throw new Error('file is empty'); - } + const upload = getUploadModel({ maxSize: global.feConfigs?.uploadFileMaxSize }); + const { buffer, originalname } = await upload.getFileBuffer(req, res); + const data = (() => { + try { + return JSON.parse(req.body?.data || '{}'); + } catch (error) { + return {}; + } + })() as FileCreateDatasetCollectionParams; const { teamId, tmbId, dataset } = await authDataset({ req, @@ -40,29 +30,24 @@ async function handler(req: NextApiRequest, res: NextApiResponse): CreateCo datasetId: data.datasetId }); - const { fileMetadata, collectionMetadata, ...collectionData } = data; - const collectionName = file.originalname; + const s3DatasetSource = getS3DatasetSource(); - // 1. upload file - const fileId = await uploadFile({ - teamId, - uid: tmbId, - bucketName, - path: file.path, - filename: file.originalname, - contentType: file.mimetype, - metadata: fileMetadata + const fileId = await s3DatasetSource.uploadDatasetFileByBuffer({ + datasetId: String(dataset._id), + buffer, + filename: originalname }); - // 2. delete tmp file - removeFilesByPaths(filePaths); + await removeS3TTL({ key: fileId, bucketName: 'private' }); + + const { fileMetadata, collectionMetadata, ...collectionData } = data; // 3. Create collection const { collectionId, insertResults } = await createCollectionAndInsertData({ dataset, createCollectionParams: { ...collectionData, - name: collectionName, + name: originalname, teamId, tmbId, type: DatasetCollectionTypeEnum.file, @@ -76,8 +61,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse): CreateCo return { collectionId, results: insertResults }; } catch (error) { - removeFilesByPaths(filePaths); - return Promise.reject(error); } }