FastGPT/packages/service/common/middle/tracks/processor.ts
Archer 2ccb5b50c6
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
V4.14.4 features (#6036)
* feat: add query optimize and bill (#6021)

* add query optimize and bill

* perf: query extension

* fix: embe model

* remove log

* remove log

* fix: test

---------

Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: archer <545436317@qq.com>

* feat: notice (#6013)

* feat: record user's language

* feat: notice points/dataset indexes; support count limit; update docker-compose.yml

* fix: ts error

* feat: send auth code i18n

* chore: dataset notice limit

* chore: adjust

* fix: ts

* fix: countLimit race condition; i18n en-prefix locale fallback to en

---------

Co-authored-by: archer <545436317@qq.com>

* perf: comment

* perf: send inform code

* fix: type error (#6029)

* feat: add ip region for chat logs (#6010)

* feat: add ip region for chat logs

* refactor: use Geolite2.mmdb

* fix: export chat logs

* fix: return location directly

* test: add unit test

* perf: log show ip data

* adjust commercial plans (#6008)

* plan frontend

* plan limit

* coupon

* discount coupon

* fix

* type

* fix audit

* type

* plan name

* legacy plan

* track

* feat: add discount coupon

* fix

* fix discount coupon

* openapi

* type

* type

* env

* api type

* fix

* fix: simple agent plugin input & agent dashboard card (#6034)

* refactor: remove gridfs (#6031)

* fix: replace gridfs multer operations with s3 compatible ops

* wip: s3 features

* refactor: remove gridfs

* fix

* perf: mock test

* doc

* doc

* doc

* fix: test

* fix: s3

* fix: mock s3

* remove invalid config

* fix: init query extension

* initv4144 (#6037)

* chore: initv4144

* fix

* version

* fix: new plans (#6039)

* fix: new plans

* qr modal tip

* fix: buffer raw text filename (#6040)

* fix: initv4144 (#6041)

* fix: pay refresh (#6042)

* fix: migration shell

* rename collection

* clear timerlock

* clear timerlock

* perf: faq

* perf: bill schema

* fix: openapi

* doc

* fix: share var render

* feat: delete dataset queue

* plan usage display (#6043)

* plan usage display

* text

* fix

* fix: ts

* perf: remove invalid code

* perf: init shell

* doc

* perf: rename field

* perf: avatar presign

* init

* custom plan text (#6045)

* fix plans

* fix

* fixed

* computed

---------

Co-authored-by: archer <545436317@qq.com>

* init shell

* plan text & price page back button (#6046)

* init

* index

* delete dataset

* delete dataset

* perf: delete dataset

* init

---------

Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com>
Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: Roy <whoeverimf5@gmail.com>
Co-authored-by: heheer <heheer@sealos.io>
2025-12-08 01:44:15 +08:00

118 lines
3.2 KiB
TypeScript

import { delay } from '@fastgpt/global/common/system/utils';
import { addLog } from '../../system/log';
import { TrackModel } from './schema';
import { TrackEnum } from '@fastgpt/global/common/middle/tracks/constants';
const batchUpdateTime = Number(process.env.TRACK_BATCH_UPDATE_TIME || 10000);
const getCurrentTenMinuteBoundary = () => {
const now = new Date();
const minutes = now.getMinutes();
const tenMinuteBoundary = Math.floor(minutes / 10) * 10;
const boundary = new Date(now);
boundary.setMinutes(tenMinuteBoundary, 0, 0);
return boundary;
};
const getCurrentMinuteBoundary = () => {
const now = new Date();
const boundary = new Date(now);
boundary.setSeconds(0, 0);
return boundary;
};
export const trackTimerProcess = async () => {
while (true) {
await countTrackTimer();
await delay(batchUpdateTime);
}
};
export const countTrackTimer = async () => {
if (!global.countTrackQueue || global.countTrackQueue.size === 0) {
return;
}
const queuedItems = Array.from(global.countTrackQueue.values());
global.countTrackQueue = new Map();
try {
const currentTenMinuteBoundary = getCurrentTenMinuteBoundary();
const currentMinuteBoundary = getCurrentMinuteBoundary();
const bulkOps = queuedItems
.map(({ event, count, data }) => {
if (event === TrackEnum.datasetSearch) {
const { teamId, datasetId } = data;
return [
{
updateOne: {
filter: {
event,
teamId,
createTime: currentTenMinuteBoundary,
'data.datasetId': datasetId
},
update: [
{
$set: {
event,
teamId,
createTime: { $ifNull: ['$createTime', currentTenMinuteBoundary] },
data: {
datasetId,
count: { $add: [{ $ifNull: ['$data.count', 0] }, count] }
}
}
}
],
upsert: true
}
}
];
}
if (event === TrackEnum.teamChatQPM) {
const { teamId } = data;
return [
{
updateOne: {
filter: {
event,
teamId,
createTime: currentMinuteBoundary
},
update: [
{
$set: {
event,
teamId,
createTime: { $ifNull: ['$createTime', currentMinuteBoundary] },
data: {
requestCount: { $add: [{ $ifNull: ['$data.requestCount', 0] }, count] }
}
}
}
],
upsert: true
}
}
];
}
return [];
})
.flat();
if (bulkOps.length > 0) {
await TrackModel.bulkWrite(bulkOps);
addLog.info('Track timer processing success');
}
} catch (error) {
addLog.error('Track timer processing error', error);
}
};