mirror of
https://github.com/labring/FastGPT.git
synced 2025-12-25 20:02:47 +00:00
Some checks failed
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
Document deploy / sync-images (push) Has been cancelled
Document deploy / generate-timestamp (push) Has been cancelled
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Has been cancelled
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Has been cancelled
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Has been cancelled
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Has been cancelled
* perf: faq
* index
* delete dataset
* delete dataset
* perf: delete dataset
* init
* fix: faq
* refresh
* empty tip
* perf: delete type
* fix: some bugs (#6071)
* fix: publish channel doc link
* fix: checkbox disable hover style
* fix: huggingface.svg missing; update doc
* chore: update doc
* fix: typo
* fix: export log dateend;feat: file selector render (#6072)
* fix: export log dateend
* feat: file selector render
* perf: s3 controller
* team qpm limit & plan tracks (#6066)
* team qpm limit & plan tracks
* api entry qpm
* perf: computed days
* Revert "api entry qpm"
This reverts commit 1210c07217.
* perf: code
* system qpm limit
* system qpm limit
---------
Co-authored-by: archer <545436317@qq.com>
* perf: track
* remove export chat test
* doc
* feat: global agent (#6057)
* feat: global agent
* fix: agent
* fix: order display
* CHORE
* feat: error page log
* fix: var update
---------
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: Roy <whoeverimf5@gmail.com>
78 lines
2.1 KiB
TypeScript
78 lines
2.1 KiB
TypeScript
import { getGlobalRedisConnection } from './index';
|
|
import { addLog } from '../system/log';
|
|
import { retryFn } from '@fastgpt/global/common/system/utils';
|
|
|
|
const redisPrefix = 'cache:';
|
|
const getCacheKey = (key: string) => `${redisPrefix}${key}`;
|
|
|
|
export enum CacheKeyEnum {
|
|
team_vector_count = 'team_vector_count',
|
|
team_point_surplus = 'team_point_surplus',
|
|
team_point_total = 'team_point_total',
|
|
team_qpm_limit = 'team_qpm_limit'
|
|
}
|
|
|
|
// Seconds
|
|
export enum CacheKeyEnumTime {
|
|
team_vector_count = 30 * 60,
|
|
team_point_surplus = 1 * 60,
|
|
team_point_total = 1 * 60,
|
|
team_qpm_limit = 60 * 60
|
|
}
|
|
|
|
export const setRedisCache = async (
|
|
key: string,
|
|
data: string | Buffer | number,
|
|
expireSeconds?: number
|
|
) => {
|
|
return await retryFn(async () => {
|
|
try {
|
|
const redis = getGlobalRedisConnection();
|
|
if (expireSeconds) {
|
|
await redis.set(getCacheKey(key), data, 'EX', expireSeconds);
|
|
} else {
|
|
await redis.set(getCacheKey(key), data);
|
|
}
|
|
} catch (error) {
|
|
addLog.error('Set cache error:', error);
|
|
return Promise.reject(error);
|
|
}
|
|
});
|
|
};
|
|
|
|
export const getRedisCache = async (key: string) => {
|
|
const redis = getGlobalRedisConnection();
|
|
return retryFn(() => redis.get(getCacheKey(key)));
|
|
};
|
|
|
|
// Add value to cache
|
|
export const incrValueToCache = async (key: string, increment: number) => {
|
|
if (typeof increment !== 'number' || increment === 0) return;
|
|
const redis = getGlobalRedisConnection();
|
|
try {
|
|
await retryFn(() => redis.incrbyfloat(getCacheKey(key), increment));
|
|
} catch (error) {}
|
|
};
|
|
|
|
export const delRedisCache = async (key: string) => {
|
|
const redis = getGlobalRedisConnection();
|
|
await retryFn(() => redis.del(getCacheKey(key)));
|
|
};
|
|
|
|
export const appendRedisCache = async (
|
|
key: string,
|
|
value: string | Buffer | number,
|
|
expireSeconds?: number
|
|
) => {
|
|
try {
|
|
const redis = getGlobalRedisConnection();
|
|
await retryFn(() => redis.append(getCacheKey(key), value));
|
|
if (expireSeconds) {
|
|
await redis.expire(getCacheKey(key), expireSeconds);
|
|
}
|
|
} catch (error) {
|
|
addLog.error('Append cache error:', error);
|
|
return Promise.reject(error);
|
|
}
|
|
};
|