mirror of
https://github.com/labring/FastGPT.git
synced 2025-12-25 20:02:47 +00:00
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
* feat: add query optimize and bill (#6021) * add query optimize and bill * perf: query extension * fix: embe model * remove log * remove log * fix: test --------- Co-authored-by: xxyyh <2289112474@qq> Co-authored-by: archer <545436317@qq.com> * feat: notice (#6013) * feat: record user's language * feat: notice points/dataset indexes; support count limit; update docker-compose.yml * fix: ts error * feat: send auth code i18n * chore: dataset notice limit * chore: adjust * fix: ts * fix: countLimit race condition; i18n en-prefix locale fallback to en --------- Co-authored-by: archer <545436317@qq.com> * perf: comment * perf: send inform code * fix: type error (#6029) * feat: add ip region for chat logs (#6010) * feat: add ip region for chat logs * refactor: use Geolite2.mmdb * fix: export chat logs * fix: return location directly * test: add unit test * perf: log show ip data * adjust commercial plans (#6008) * plan frontend * plan limit * coupon * discount coupon * fix * type * fix audit * type * plan name * legacy plan * track * feat: add discount coupon * fix * fix discount coupon * openapi * type * type * env * api type * fix * fix: simple agent plugin input & agent dashboard card (#6034) * refactor: remove gridfs (#6031) * fix: replace gridfs multer operations with s3 compatible ops * wip: s3 features * refactor: remove gridfs * fix * perf: mock test * doc * doc * doc * fix: test * fix: s3 * fix: mock s3 * remove invalid config * fix: init query extension * initv4144 (#6037) * chore: initv4144 * fix * version * fix: new plans (#6039) * fix: new plans * qr modal tip * fix: buffer raw text filename (#6040) * fix: initv4144 (#6041) * fix: pay refresh (#6042) * fix: migration shell * rename collection * clear timerlock * clear timerlock * perf: faq * perf: bill schema * fix: openapi * doc * fix: share var render * feat: delete dataset queue * plan usage display (#6043) * plan usage display * text * fix * fix: ts * perf: remove invalid code * perf: init shell * doc * perf: rename field * perf: avatar presign * init * custom plan text (#6045) * fix plans * fix * fixed * computed --------- Co-authored-by: archer <545436317@qq.com> * init shell * plan text & price page back button (#6046) * init * index * delete dataset * delete dataset * perf: delete dataset * init --------- Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com> Co-authored-by: xxyyh <2289112474@qq> Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com> Co-authored-by: Roy <whoeverimf5@gmail.com> Co-authored-by: heheer <heheer@sealos.io>
376 lines
11 KiB
TypeScript
376 lines
11 KiB
TypeScript
import jwt from 'jsonwebtoken';
|
|
import { isAfter, differenceInSeconds } from 'date-fns';
|
|
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
|
|
import type { ClientSession } from 'mongoose';
|
|
import { MongoS3TTL } from './schema';
|
|
import { S3Buckets } from './constants';
|
|
import { S3PrivateBucket } from './buckets/private';
|
|
import { S3Sources, type UploadImage2S3BucketParams } from './type';
|
|
import { S3PublicBucket } from './buckets/public';
|
|
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
|
import path from 'node:path';
|
|
import type { ParsedFileContentS3KeyParams } from './sources/dataset/type';
|
|
import { EndpointUrl } from '@fastgpt/global/common/file/constants';
|
|
import type { NextApiRequest } from 'next';
|
|
|
|
// S3文件名最大长度配置
|
|
export const S3_FILENAME_MAX_LENGTH = 50;
|
|
|
|
/**
|
|
* 截断文件名,确保不超过最大长度,同时保留扩展名
|
|
* @param filename 原始文件名
|
|
* @param maxLength 最大长度限制
|
|
* @returns 截断后的文件名
|
|
*/
|
|
export function truncateFilename(
|
|
filename: string,
|
|
maxLength: number = S3_FILENAME_MAX_LENGTH
|
|
): string {
|
|
if (!filename) return filename;
|
|
|
|
// 如果文件名长度已经符合要求,直接返回
|
|
if (filename.length <= maxLength) {
|
|
return filename;
|
|
}
|
|
|
|
const extension = path.extname(filename); // 包含点的扩展名,如 ".pdf"
|
|
const nameWithoutExt = path.basename(filename, extension); // 不包含扩展名的文件名
|
|
|
|
// 计算名称部分的最大长度(总长度减去扩展名长度)
|
|
const maxNameLength = maxLength - extension.length;
|
|
|
|
// 如果扩展名本身就很长导致没有空间放名称,则截断扩展名
|
|
if (maxNameLength <= 0) {
|
|
// 保留扩展名的开头部分,至少保留一个点
|
|
const truncatedExt = extension.substring(0, Math.min(maxLength, extension.length));
|
|
return truncatedExt;
|
|
}
|
|
|
|
// 截断文件名部分
|
|
const truncatedName = nameWithoutExt.substring(0, maxNameLength);
|
|
|
|
return truncatedName + extension;
|
|
}
|
|
|
|
/**
|
|
*
|
|
* @param objectKey
|
|
* @param expiredTime
|
|
* @returns
|
|
*/
|
|
export function jwtSignS3ObjectKey(objectKey: string, expiredTime: Date) {
|
|
const secret = process.env.FILE_TOKEN_KEY as string;
|
|
const expiresIn = differenceInSeconds(expiredTime, new Date());
|
|
const token = jwt.sign({ objectKey }, secret, { expiresIn });
|
|
|
|
return `${EndpointUrl}/api/system/file/${token}`;
|
|
}
|
|
|
|
export function jwtVerifyS3ObjectKey(token: string) {
|
|
const secret = process.env.FILE_TOKEN_KEY as string;
|
|
return new Promise<{ objectKey: string }>((resolve, reject) => {
|
|
jwt.verify(token, secret, (err, payload) => {
|
|
if (err || !payload || !(payload as jwt.JwtPayload).objectKey) {
|
|
reject(ERROR_ENUM.unAuthFile);
|
|
}
|
|
|
|
resolve(payload as { objectKey: string });
|
|
});
|
|
});
|
|
}
|
|
|
|
export function removeS3TTL({
|
|
key,
|
|
bucketName,
|
|
session
|
|
}: {
|
|
key: string[] | string;
|
|
bucketName: keyof typeof S3Buckets;
|
|
session?: ClientSession;
|
|
}) {
|
|
if (!key) return;
|
|
|
|
if (Array.isArray(key)) {
|
|
return MongoS3TTL.deleteMany(
|
|
{
|
|
minioKey: { $in: key },
|
|
bucketName: S3Buckets[bucketName]
|
|
},
|
|
{ session }
|
|
);
|
|
}
|
|
|
|
if (typeof key === 'string') {
|
|
return MongoS3TTL.deleteOne(
|
|
{
|
|
minioKey: key,
|
|
bucketName: S3Buckets[bucketName]
|
|
},
|
|
{ session }
|
|
);
|
|
}
|
|
}
|
|
|
|
export async function uploadImage2S3Bucket(
|
|
bucketName: keyof typeof S3Buckets,
|
|
params: UploadImage2S3BucketParams
|
|
) {
|
|
const { base64Img, filename, mimetype, uploadKey, expiredTime } = params;
|
|
|
|
const bucket = bucketName === 'private' ? new S3PrivateBucket() : new S3PublicBucket();
|
|
|
|
const base64Data = base64Img.split(',')[1] || base64Img;
|
|
const buffer = Buffer.from(base64Data, 'base64');
|
|
|
|
await bucket.putObject(uploadKey, buffer, buffer.length, {
|
|
'content-type': mimetype,
|
|
'upload-time': new Date().toISOString(),
|
|
'origin-filename': encodeURIComponent(filename)
|
|
});
|
|
|
|
const now = new Date();
|
|
if (expiredTime && isAfter(expiredTime, now)) {
|
|
await MongoS3TTL.create({
|
|
minioKey: uploadKey,
|
|
bucketName: bucket.name,
|
|
expiredTime: expiredTime
|
|
});
|
|
}
|
|
|
|
return uploadKey;
|
|
}
|
|
|
|
const getFormatedFilename = (filename?: string) => {
|
|
if (!filename) {
|
|
return {
|
|
formatedFilename: getNanoid(12),
|
|
extension: ''
|
|
};
|
|
}
|
|
|
|
const id = getNanoid(6);
|
|
// 先截断文件名,再进行格式化
|
|
const truncatedFilename = truncateFilename(filename);
|
|
const extension = path.extname(truncatedFilename); // 带.
|
|
const name = path.basename(truncatedFilename, extension);
|
|
return {
|
|
formatedFilename: `${id}-${name}`,
|
|
extension: extension.replace('.', '')
|
|
};
|
|
};
|
|
|
|
export const getFileS3Key = {
|
|
// 临时的文件路径(比如 evaluation)
|
|
temp: ({ teamId, filename }: { teamId: string; filename?: string }) => {
|
|
const { formatedFilename, extension } = getFormatedFilename(filename);
|
|
|
|
return {
|
|
fileKey: [
|
|
S3Sources.temp,
|
|
teamId,
|
|
`${formatedFilename}${extension ? `.${extension}` : ''}`
|
|
].join('/'),
|
|
fileParsedPrefix: [S3Sources.temp, teamId, `${formatedFilename}-parsed`].join('/')
|
|
};
|
|
},
|
|
|
|
avatar: ({ teamId, filename }: { teamId: string; filename?: string }) => {
|
|
const { formatedFilename, extension } = getFormatedFilename(filename);
|
|
return {
|
|
fileKey: [
|
|
S3Sources.avatar,
|
|
teamId,
|
|
`${formatedFilename}${extension ? `.${extension}` : ''}`
|
|
].join('/')
|
|
};
|
|
},
|
|
|
|
// 对话中上传的文件的解析结果的图片的 Key
|
|
chat: ({
|
|
appId,
|
|
chatId,
|
|
uId,
|
|
filename
|
|
}: {
|
|
chatId: string;
|
|
uId: string;
|
|
appId: string;
|
|
filename: string;
|
|
}) => {
|
|
const { formatedFilename, extension } = getFormatedFilename(filename);
|
|
const basePrefix = [S3Sources.chat, appId, uId, chatId].filter(Boolean).join('/');
|
|
|
|
return {
|
|
fileKey: [basePrefix, `${formatedFilename}${extension ? `.${extension}` : ''}`].join('/'),
|
|
fileParsedPrefix: [basePrefix, `${formatedFilename}-parsed`].join('/')
|
|
};
|
|
},
|
|
|
|
// 上传数据集的文件的解析结果的图片的 Key
|
|
dataset: (params: ParsedFileContentS3KeyParams) => {
|
|
const { datasetId, filename } = params;
|
|
const { formatedFilename, extension } = getFormatedFilename(filename);
|
|
|
|
return {
|
|
fileKey: [
|
|
S3Sources.dataset,
|
|
datasetId,
|
|
`${formatedFilename}${extension ? `.${extension}` : ''}`
|
|
].join('/'),
|
|
fileParsedPrefix: [S3Sources.dataset, datasetId, `${formatedFilename}-parsed`].join('/')
|
|
};
|
|
},
|
|
|
|
s3Key: (key: string) => {
|
|
const prefix = `${path.dirname(key)}/${path.basename(key, path.extname(key))}-parsed`;
|
|
return {
|
|
fileKey: key,
|
|
fileParsedPrefix: prefix
|
|
};
|
|
},
|
|
|
|
rawText: ({ hash, customPdfParse }: { hash: string; customPdfParse?: boolean }) => {
|
|
return [S3Sources.rawText, `${hash}${customPdfParse ? '-true' : ''}`].join('/');
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Check if a key is a valid S3 object key
|
|
* @param key - The key to check
|
|
* @param source - The source of the key
|
|
* @returns True if the key is a valid S3 object key
|
|
*/
|
|
export function isS3ObjectKey<T extends keyof typeof S3Sources>(
|
|
key: string | undefined | null,
|
|
source: T
|
|
): key is `${T}/${string}` {
|
|
return typeof key === 'string' && key.startsWith(`${S3Sources[source]}/`);
|
|
}
|
|
|
|
// export const multer = {
|
|
// _storage: multer.diskStorage({
|
|
// filename: (_, file, cb) => {
|
|
// if (!file?.originalname) {
|
|
// cb(new Error('File not found'), '');
|
|
// } else {
|
|
// const ext = path.extname(decodeURIComponent(file.originalname));
|
|
// cb(null, `${getNanoid()}${ext}`);
|
|
// }
|
|
// }
|
|
// }),
|
|
|
|
// singleStore(maxFileSize: number = 500) {
|
|
// const fileSize = maxFileSize * 1024 * 1024;
|
|
|
|
// return multer({
|
|
// limits: {
|
|
// fileSize
|
|
// },
|
|
// preservePath: true,
|
|
// storage: this._storage
|
|
// }).single('file');
|
|
// },
|
|
|
|
// multipleStore(maxFileSize: number = 500) {
|
|
// const fileSize = maxFileSize * 1024 * 1024;
|
|
|
|
// return multer({
|
|
// limits: {
|
|
// fileSize
|
|
// },
|
|
// preservePath: true,
|
|
// storage: this._storage
|
|
// }).array('file', global.feConfigs?.uploadFileMaxSize);
|
|
// },
|
|
|
|
// resolveFormData({ request, maxFileSize }: { request: NextApiRequest; maxFileSize?: number }) {
|
|
// return new Promise<{
|
|
// data: Record<string, any>;
|
|
// fileMetadata: Express.Multer.File;
|
|
// getBuffer: () => Buffer;
|
|
// getReadStream: () => fs.ReadStream;
|
|
// }>((resolve, reject) => {
|
|
// const handler = this.singleStore(maxFileSize);
|
|
|
|
// // @ts-expect-error it can accept a NextApiRequest
|
|
// handler(request, null, (error) => {
|
|
// if (error) {
|
|
// return reject(error);
|
|
// }
|
|
|
|
// // @ts-expect-error `file` will be injected by multer
|
|
// const file = request.file as Express.Multer.File;
|
|
|
|
// if (!file) {
|
|
// return reject(new Error('File not found'));
|
|
// }
|
|
|
|
// const data = (() => {
|
|
// if (!request.body?.data) return {};
|
|
// try {
|
|
// return JSON.parse(request.body.data);
|
|
// } catch {
|
|
// return {};
|
|
// }
|
|
// })();
|
|
|
|
// resolve({
|
|
// data,
|
|
// fileMetadata: file,
|
|
// getBuffer: () => fs.readFileSync(file.path),
|
|
// getReadStream: () => fs.createReadStream(file.path)
|
|
// });
|
|
// });
|
|
// });
|
|
// },
|
|
|
|
// resolveMultipleFormData({
|
|
// request,
|
|
// maxFileSize
|
|
// }: {
|
|
// request: NextApiRequest;
|
|
// maxFileSize?: number;
|
|
// }) {
|
|
// return new Promise<{
|
|
// data: Record<string, any>;
|
|
// fileMetadata: Array<Express.Multer.File>;
|
|
// }>((resolve, reject) => {
|
|
// const handler = this.multipleStore(maxFileSize);
|
|
|
|
// // @ts-expect-error it can accept a NextApiRequest
|
|
// handler(request, null, (error) => {
|
|
// if (error) {
|
|
// return reject(error);
|
|
// }
|
|
|
|
// // @ts-expect-error `files` will be injected by multer
|
|
// const files = request.files as Array<Express.Multer.File>;
|
|
|
|
// if (!files || files.length === 0) {
|
|
// return reject(new Error('File not found'));
|
|
// }
|
|
|
|
// const data = (() => {
|
|
// if (!request.body?.data) return {};
|
|
// try {
|
|
// return JSON.parse(request.body.data);
|
|
// } catch {
|
|
// return {};
|
|
// }
|
|
// })();
|
|
|
|
// resolve({
|
|
// data,
|
|
// fileMetadata: files
|
|
// });
|
|
// });
|
|
// });
|
|
// },
|
|
|
|
// clearDiskTempFiles(filepaths: string[]) {
|
|
// for (const filepath of filepaths) {
|
|
// fs.unlink(filepath, (_) => {});
|
|
// }
|
|
// }
|
|
// };
|