FastGPT/test/cases/service/common/vectorDB/controller.test.ts
Archer 2ccb5b50c6
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
V4.14.4 features (#6036)
* feat: add query optimize and bill (#6021)

* add query optimize and bill

* perf: query extension

* fix: embe model

* remove log

* remove log

* fix: test

---------

Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: archer <545436317@qq.com>

* feat: notice (#6013)

* feat: record user's language

* feat: notice points/dataset indexes; support count limit; update docker-compose.yml

* fix: ts error

* feat: send auth code i18n

* chore: dataset notice limit

* chore: adjust

* fix: ts

* fix: countLimit race condition; i18n en-prefix locale fallback to en

---------

Co-authored-by: archer <545436317@qq.com>

* perf: comment

* perf: send inform code

* fix: type error (#6029)

* feat: add ip region for chat logs (#6010)

* feat: add ip region for chat logs

* refactor: use Geolite2.mmdb

* fix: export chat logs

* fix: return location directly

* test: add unit test

* perf: log show ip data

* adjust commercial plans (#6008)

* plan frontend

* plan limit

* coupon

* discount coupon

* fix

* type

* fix audit

* type

* plan name

* legacy plan

* track

* feat: add discount coupon

* fix

* fix discount coupon

* openapi

* type

* type

* env

* api type

* fix

* fix: simple agent plugin input & agent dashboard card (#6034)

* refactor: remove gridfs (#6031)

* fix: replace gridfs multer operations with s3 compatible ops

* wip: s3 features

* refactor: remove gridfs

* fix

* perf: mock test

* doc

* doc

* doc

* fix: test

* fix: s3

* fix: mock s3

* remove invalid config

* fix: init query extension

* initv4144 (#6037)

* chore: initv4144

* fix

* version

* fix: new plans (#6039)

* fix: new plans

* qr modal tip

* fix: buffer raw text filename (#6040)

* fix: initv4144 (#6041)

* fix: pay refresh (#6042)

* fix: migration shell

* rename collection

* clear timerlock

* clear timerlock

* perf: faq

* perf: bill schema

* fix: openapi

* doc

* fix: share var render

* feat: delete dataset queue

* plan usage display (#6043)

* plan usage display

* text

* fix

* fix: ts

* perf: remove invalid code

* perf: init shell

* doc

* perf: rename field

* perf: avatar presign

* init

* custom plan text (#6045)

* fix plans

* fix

* fixed

* computed

---------

Co-authored-by: archer <545436317@qq.com>

* init shell

* plan text & price page back button (#6046)

* init

* index

* delete dataset

* delete dataset

* perf: delete dataset

* init

---------

Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com>
Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: Roy <whoeverimf5@gmail.com>
Co-authored-by: heheer <heheer@sealos.io>
2025-12-08 01:44:15 +08:00

325 lines
9.1 KiB
TypeScript

import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest';
import {
mockVectorInsert,
mockVectorDelete,
mockVectorEmbRecall,
mockVectorInit,
mockGetVectorDataByTime,
mockGetVectorCountByTeamId,
mockGetVectorCountByDatasetId,
mockGetVectorCountByCollectionId,
resetVectorMocks
} from '@test/mocks/common/vector';
import { mockGetVectorsByText } from '@test/mocks/core/ai/embedding';
// Import controller functions after mocks are set up
import {
initVectorStore,
recallFromVectorStore,
getVectorDataByTime,
getVectorCountByTeamId,
getVectorCountByDatasetId,
getVectorCountByCollectionId,
insertDatasetDataVector,
deleteDatasetDataVector
} from '@fastgpt/service/common/vectorDB/controller';
// Mock redis cache functions
const mockGetRedisCache = vi.fn();
const mockSetRedisCache = vi.fn();
const mockDelRedisCache = vi.fn();
const mockIncrValueToCache = vi.fn();
vi.mock('@fastgpt/service/common/redis/cache', () => ({
setRedisCache: (...args: any[]) => mockSetRedisCache(...args),
getRedisCache: (...args: any[]) => mockGetRedisCache(...args),
delRedisCache: (...args: any[]) => mockDelRedisCache(...args),
incrValueToCache: (...args: any[]) => mockIncrValueToCache(...args),
CacheKeyEnum: {
team_vector_count: 'team_vector_count',
team_point_surplus: 'team_point_surplus',
team_point_total: 'team_point_total'
},
CacheKeyEnumTime: {
team_vector_count: 1800,
team_point_surplus: 60,
team_point_total: 60
}
}));
describe('VectorDB Controller', () => {
beforeEach(() => {
resetVectorMocks();
mockGetRedisCache.mockReset();
mockSetRedisCache.mockReset();
mockDelRedisCache.mockReset();
mockIncrValueToCache.mockReset();
mockGetVectorsByText.mockClear();
});
describe('initVectorStore', () => {
it('should call Vector.init', async () => {
await initVectorStore();
expect(mockVectorInit).toHaveBeenCalled();
});
});
describe('recallFromVectorStore', () => {
it('should call Vector.embRecall with correct props', async () => {
const props = {
teamId: 'team_123',
datasetIds: ['dataset_1', 'dataset_2'],
vector: [0.1, 0.2, 0.3],
limit: 10,
forbidCollectionIdList: ['col_forbidden']
};
const result = await recallFromVectorStore(props);
expect(mockVectorEmbRecall).toHaveBeenCalledWith(props);
expect(result).toEqual({
results: [
{ id: '1', collectionId: 'col_1', score: 0.95 },
{ id: '2', collectionId: 'col_2', score: 0.85 }
]
});
});
it('should handle filterCollectionIdList', async () => {
const props = {
teamId: 'team_123',
datasetIds: ['dataset_1'],
vector: [0.1, 0.2],
limit: 5,
forbidCollectionIdList: [],
filterCollectionIdList: ['col_1', 'col_2']
};
await recallFromVectorStore(props);
expect(mockVectorEmbRecall).toHaveBeenCalledWith(props);
});
});
describe('getVectorDataByTime', () => {
it('should call Vector.getVectorDataByTime with correct date range', async () => {
const start = new Date('2024-01-01');
const end = new Date('2024-01-31');
const result = await getVectorDataByTime(start, end);
expect(mockGetVectorDataByTime).toHaveBeenCalledWith(start, end);
expect(result).toEqual([
{ id: '1', teamId: 'team_1', datasetId: 'dataset_1' },
{ id: '2', teamId: 'team_1', datasetId: 'dataset_2' }
]);
});
});
describe('getVectorCountByTeamId', () => {
it('should return cached count if available', async () => {
mockGetRedisCache.mockResolvedValue('150');
const result = await getVectorCountByTeamId('team_123');
expect(result).toBe(150);
expect(mockGetVectorCountByTeamId).not.toHaveBeenCalled();
});
it('should fetch from Vector and cache if no cache exists', async () => {
mockGetRedisCache.mockResolvedValue(null);
mockGetVectorCountByTeamId.mockResolvedValue(200);
const result = await getVectorCountByTeamId('team_456');
expect(result).toBe(200);
expect(mockGetVectorCountByTeamId).toHaveBeenCalledWith('team_456');
});
it('should handle undefined cache value', async () => {
mockGetRedisCache.mockResolvedValue(undefined);
mockGetVectorCountByTeamId.mockResolvedValue(50);
const result = await getVectorCountByTeamId('team_789');
expect(result).toBe(50);
expect(mockGetVectorCountByTeamId).toHaveBeenCalled();
});
});
describe('getVectorCountByDatasetId', () => {
it('should call Vector.getVectorCountByDatasetId', async () => {
const result = await getVectorCountByDatasetId('team_1', 'dataset_1');
expect(mockGetVectorCountByDatasetId).toHaveBeenCalledWith('team_1', 'dataset_1');
expect(result).toBe(50);
});
});
describe('getVectorCountByCollectionId', () => {
it('should call Vector.getVectorCountByCollectionId', async () => {
const result = await getVectorCountByCollectionId('team_1', 'dataset_1', 'col_1');
expect(mockGetVectorCountByCollectionId).toHaveBeenCalledWith('team_1', 'dataset_1', 'col_1');
expect(result).toBe(25);
});
});
describe('insertDatasetDataVector', () => {
const mockModel = {
model: 'text-embedding-ada-002',
name: 'text-embedding-ada-002',
charsPointsPrice: 0,
maxToken: 8192,
weight: 100,
defaultToken: 512,
dbConfig: {},
queryExtensionModel: ''
};
it('should generate embeddings and insert vectors', async () => {
const mockVectors = [
[0.1, 0.2],
[0.3, 0.4]
];
mockGetVectorsByText.mockResolvedValue({
tokens: 100,
vectors: mockVectors
});
mockVectorInsert.mockResolvedValue({
insertIds: ['id_1', 'id_2']
});
const result = await insertDatasetDataVector({
teamId: 'team_123',
datasetId: 'dataset_456',
collectionId: 'col_789',
inputs: ['hello world', 'test text'],
model: mockModel as any
});
expect(mockGetVectorsByText).toHaveBeenCalledWith({
model: mockModel,
input: ['hello world', 'test text'],
type: 'db'
});
expect(mockVectorInsert).toHaveBeenCalledWith({
teamId: 'team_123',
datasetId: 'dataset_456',
collectionId: 'col_789',
vectors: mockVectors
});
expect(result).toEqual({
tokens: 100,
insertIds: ['id_1', 'id_2']
});
});
it('should increment team vector cache', async () => {
mockGetVectorsByText.mockResolvedValue({
tokens: 50,
vectors: [[0.1]]
});
mockVectorInsert.mockResolvedValue({
insertIds: ['id_1']
});
await insertDatasetDataVector({
teamId: 'team_abc',
datasetId: 'dataset_def',
collectionId: 'col_ghi',
inputs: ['single input'],
model: mockModel as any
});
// Cache increment is called asynchronously
await new Promise((resolve) => setTimeout(resolve, 10));
expect(mockIncrValueToCache).toHaveBeenCalled();
});
it('should handle empty inputs', async () => {
mockGetVectorsByText.mockResolvedValue({
tokens: 0,
vectors: []
});
mockVectorInsert.mockResolvedValue({
insertIds: []
});
const result = await insertDatasetDataVector({
teamId: 'team_123',
datasetId: 'dataset_456',
collectionId: 'col_789',
inputs: [],
model: mockModel as any
});
expect(result).toEqual({
tokens: 0,
insertIds: []
});
});
});
describe('deleteDatasetDataVector', () => {
it('should delete by single id', async () => {
const props = {
teamId: 'team_123',
id: 'vector_id_1'
};
await deleteDatasetDataVector(props);
expect(mockVectorDelete).toHaveBeenCalledWith(props);
});
it('should delete by datasetIds', async () => {
const props = {
teamId: 'team_123',
datasetIds: ['dataset_1', 'dataset_2']
};
await deleteDatasetDataVector(props);
expect(mockVectorDelete).toHaveBeenCalledWith(props);
});
it('should delete by datasetIds and collectionIds', async () => {
const props = {
teamId: 'team_123',
datasetIds: ['dataset_1'],
collectionIds: ['col_1', 'col_2']
};
await deleteDatasetDataVector(props);
expect(mockVectorDelete).toHaveBeenCalledWith(props);
});
it('should delete by idList', async () => {
const props = {
teamId: 'team_123',
idList: ['id_1', 'id_2', 'id_3']
};
await deleteDatasetDataVector(props);
expect(mockVectorDelete).toHaveBeenCalledWith(props);
});
it('should call delete and return result', async () => {
mockVectorDelete.mockResolvedValue({ deletedCount: 5 });
const props = {
teamId: 'team_cache_test',
id: 'some_id'
};
const result = await deleteDatasetDataVector(props);
expect(mockVectorDelete).toHaveBeenCalledWith(props);
expect(result).toEqual({ deletedCount: 5 });
});
});
});