FastGPT/packages/service/common/file/image/controller.ts
Archer 2ccb5b50c6
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
V4.14.4 features (#6036)
* feat: add query optimize and bill (#6021)

* add query optimize and bill

* perf: query extension

* fix: embe model

* remove log

* remove log

* fix: test

---------

Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: archer <545436317@qq.com>

* feat: notice (#6013)

* feat: record user's language

* feat: notice points/dataset indexes; support count limit; update docker-compose.yml

* fix: ts error

* feat: send auth code i18n

* chore: dataset notice limit

* chore: adjust

* fix: ts

* fix: countLimit race condition; i18n en-prefix locale fallback to en

---------

Co-authored-by: archer <545436317@qq.com>

* perf: comment

* perf: send inform code

* fix: type error (#6029)

* feat: add ip region for chat logs (#6010)

* feat: add ip region for chat logs

* refactor: use Geolite2.mmdb

* fix: export chat logs

* fix: return location directly

* test: add unit test

* perf: log show ip data

* adjust commercial plans (#6008)

* plan frontend

* plan limit

* coupon

* discount coupon

* fix

* type

* fix audit

* type

* plan name

* legacy plan

* track

* feat: add discount coupon

* fix

* fix discount coupon

* openapi

* type

* type

* env

* api type

* fix

* fix: simple agent plugin input & agent dashboard card (#6034)

* refactor: remove gridfs (#6031)

* fix: replace gridfs multer operations with s3 compatible ops

* wip: s3 features

* refactor: remove gridfs

* fix

* perf: mock test

* doc

* doc

* doc

* fix: test

* fix: s3

* fix: mock s3

* remove invalid config

* fix: init query extension

* initv4144 (#6037)

* chore: initv4144

* fix

* version

* fix: new plans (#6039)

* fix: new plans

* qr modal tip

* fix: buffer raw text filename (#6040)

* fix: initv4144 (#6041)

* fix: pay refresh (#6042)

* fix: migration shell

* rename collection

* clear timerlock

* clear timerlock

* perf: faq

* perf: bill schema

* fix: openapi

* doc

* fix: share var render

* feat: delete dataset queue

* plan usage display (#6043)

* plan usage display

* text

* fix

* fix: ts

* perf: remove invalid code

* perf: init shell

* doc

* perf: rename field

* perf: avatar presign

* init

* custom plan text (#6045)

* fix plans

* fix

* fixed

* computed

---------

Co-authored-by: archer <545436317@qq.com>

* init shell

* plan text & price page back button (#6046)

* init

* index

* delete dataset

* delete dataset

* perf: delete dataset

* init

---------

Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com>
Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: Roy <whoeverimf5@gmail.com>
Co-authored-by: heheer <heheer@sealos.io>
2025-12-08 01:44:15 +08:00

184 lines
4.9 KiB
TypeScript

import { type preUploadImgProps } from '@fastgpt/global/common/file/api';
import { imageBaseUrl } from '@fastgpt/global/common/file/image/constants';
import { MongoImage } from './schema';
import { type ClientSession, Types } from '../../../common/mongo';
import { guessBase64ImageType } from '../utils';
import { readFromSecondary } from '../../mongo/utils';
import { addHours } from 'date-fns';
import { imageFileType } from '@fastgpt/global/common/file/constants';
import { retryFn } from '@fastgpt/global/common/system/utils';
import { UserError } from '@fastgpt/global/common/error/utils';
import { S3Sources } from '../../s3/type';
import { getS3AvatarSource } from '../../s3/sources/avatar';
import { isS3ObjectKey } from '../../s3/utils';
import path from 'path';
import { getNanoid } from '@fastgpt/global/common/string/tools';
export const maxImgSize = 1024 * 1024 * 12;
const base64MimeRegex = /data:image\/([^\)]+);base64/;
export async function uploadMongoImg({
base64Img,
teamId,
metadata,
shareId,
forever = false
}: preUploadImgProps & {
base64Img: string;
teamId: string;
forever?: Boolean;
}) {
if (base64Img.length > maxImgSize) {
return Promise.reject(new UserError('Image too large'));
}
const [base64Mime, base64Data] = base64Img.split(',');
// Check if mime type is valid
if (!base64MimeRegex.test(base64Mime)) {
return Promise.reject(new UserError('Invalid image base64'));
}
const mime = `image/${base64Mime.match(base64MimeRegex)?.[1] ?? 'image/jpeg'}`;
const binary = Buffer.from(base64Data, 'base64');
let extension = mime.split('/')[1];
if (extension.startsWith('x-')) {
extension = extension.substring(2); // Remove 'x-' prefix
}
if (!extension || !imageFileType.includes(`.${extension}`)) {
return Promise.reject(new UserError(`Invalid image file type: ${mime}`));
}
const { _id } = await retryFn(() =>
MongoImage.create({
teamId,
binary,
metadata: Object.assign({ mime }, metadata),
shareId,
expiredTime: forever ? undefined : addHours(new Date(), 1)
})
);
return `${process.env.NEXT_PUBLIC_BASE_URL || ''}${imageBaseUrl}${String(_id)}.${extension}`;
}
export const copyAvatarImage = async ({
teamId,
imageUrl,
temporary,
session
}: {
teamId: string;
imageUrl: string;
temporary: boolean;
session?: ClientSession;
}) => {
if (!imageUrl) return;
const avatarSource = getS3AvatarSource();
if (isS3ObjectKey(imageUrl?.slice(avatarSource.prefix.length), 'avatar')) {
const filename = (() => {
const last = imageUrl.split('/').pop();
if (!last) return getNanoid(6).concat(path.extname(imageUrl));
const firstDashIndex = last.indexOf('-');
return `${getNanoid(6)}-${firstDashIndex === -1 ? last : last.slice(firstDashIndex + 1)}`;
})();
const key = await getS3AvatarSource().copyAvatar({
key: imageUrl,
teamId,
filename,
temporary
});
return key;
}
const paths = imageUrl.split('/');
const name = paths[paths.length - 1];
const id = name.split('.')[0];
// Mongo
if (id && Types.ObjectId.isValid(id)) {
const image = await MongoImage.findOne(
{
_id: id,
teamId
},
undefined,
{
session
}
);
if (!image) return imageUrl;
const [newImage] = await MongoImage.create(
[
{
teamId,
binary: image.binary,
metadata: image.metadata
}
],
{
session,
ordered: true
}
);
return `${process.env.NEXT_PUBLIC_BASE_URL || ''}${imageBaseUrl}${String(newImage._id)}.${image.metadata?.mime?.split('/')[1]}`;
}
return imageUrl;
};
export const removeImageByPath = (path?: string, session?: ClientSession) => {
if (!path) return;
const paths = path.split('/');
const name = paths[paths.length - 1];
if (!name) return;
const id = name.split('.')[0];
if (!id) return;
if (Types.ObjectId.isValid(id)) {
return MongoImage.deleteOne({ _id: id }, { session });
} else if (isS3ObjectKey(path?.slice(getS3AvatarSource().prefix.length), 'avatar')) {
return getS3AvatarSource().deleteAvatar(path, session);
}
};
export async function readMongoImg({ id }: { id: string }) {
const formatId = id.replace(/\.[^/.]+$/, '');
const data = await MongoImage.findById(formatId, undefined, {
...readFromSecondary
});
if (!data) {
return Promise.reject(new UserError('Image not found'));
}
return {
binary: data.binary,
mime: data.metadata?.mime ?? guessBase64ImageType(data.binary.toString('base64'))
};
}
export async function delImgByRelatedId({
teamId,
relateIds,
session
}: {
teamId: string;
relateIds: string[];
session?: ClientSession;
}) {
if (relateIds.length === 0) return;
return MongoImage.deleteMany(
{
teamId,
'metadata.relatedId': { $in: relateIds.map((id) => String(id)) }
},
{ session }
);
}