mirror of
https://github.com/labring/FastGPT.git
synced 2025-12-26 04:14:56 +00:00
Some checks are pending
Document deploy / sync-images (push) Waiting to run
Document deploy / generate-timestamp (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.cn suffix:cn]) (push) Blocked by required conditions
Document deploy / build-images (map[domain:https://fastgpt.io suffix:io]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.cn kube_config:KUBE_CONFIG_CN suffix:cn]) (push) Blocked by required conditions
Document deploy / update-images (map[deployment:fastgpt-docs domain:https://fastgpt.io kube_config:KUBE_CONFIG_IO suffix:io]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / get-vars (push) Waiting to run
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:amd64 runs-on:ubuntu-24.04]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / build-fastgpt-images (map[arch:arm64 runs-on:ubuntu-24.04-arm]) (push) Blocked by required conditions
Build FastGPT images in Personal warehouse / release-fastgpt-images (push) Blocked by required conditions
* perf: faq * index * delete dataset * delete dataset * perf: delete dataset * init * fix: faq * doc * fix: share link auth (#6063) * standard plan add custom domain config (#6061) * standard plan add custom domain config * bill detail modal * perf: vector count api * feat: custom domain & wecom bot SaaS integration (#6047) * feat: custom Domain type define * feat: custom domain * feat: wecom custom domain * chore: i18n * chore: i18n; team auth * feat: wecom multi-model message support * chore: wecom edit modal * chore(doc): custom domain && wecom bot * fix: type * fix: type * fix: file detect * feat: fe * fix: img name * fix: test * compress img * rename * editor initial status * fix: chat url * perf: s3 upload by buffer * img * refresh * fix: custom domain selector (#6069) * empty tip * perf: s3 init * sort provider * fix: extend * perf: extract filename --------- Co-authored-by: Roy <whoeverimf5@gmail.com> Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
240 lines
7.9 KiB
TypeScript
240 lines
7.9 KiB
TypeScript
/* pg vector crud */
|
|
import { DatasetVectorTableName } from '../constants';
|
|
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
|
|
import { PgClient, connectPg } from './controller';
|
|
import { type PgSearchRawType } from '@fastgpt/global/core/dataset/api';
|
|
import type {
|
|
DelDatasetVectorCtrlProps,
|
|
EmbeddingRecallCtrlProps,
|
|
EmbeddingRecallResponse,
|
|
InsertVectorControllerProps
|
|
} from '../controller.d';
|
|
import dayjs from 'dayjs';
|
|
import { addLog } from '../../system/log';
|
|
|
|
export class PgVectorCtrl {
|
|
constructor() {}
|
|
init = async () => {
|
|
try {
|
|
await connectPg();
|
|
await PgClient.query(`
|
|
CREATE EXTENSION IF NOT EXISTS vector;
|
|
CREATE TABLE IF NOT EXISTS ${DatasetVectorTableName} (
|
|
id BIGSERIAL PRIMARY KEY,
|
|
vector VECTOR(1536) NOT NULL,
|
|
team_id VARCHAR(50) NOT NULL,
|
|
dataset_id VARCHAR(50) NOT NULL,
|
|
collection_id VARCHAR(50) NOT NULL,
|
|
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
`);
|
|
|
|
await PgClient.query(
|
|
`CREATE INDEX CONCURRENTLY IF NOT EXISTS vector_index ON ${DatasetVectorTableName} USING hnsw (vector vector_ip_ops) WITH (m = 32, ef_construction = 128);`
|
|
);
|
|
await PgClient.query(
|
|
`CREATE INDEX CONCURRENTLY IF NOT EXISTS team_dataset_collection_index ON ${DatasetVectorTableName} USING btree(team_id, dataset_id, collection_id);`
|
|
);
|
|
await PgClient.query(
|
|
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${DatasetVectorTableName} USING btree(createtime);`
|
|
);
|
|
// 10w rows
|
|
// await PgClient.query(`
|
|
// ALTER TABLE modeldata SET (
|
|
// autovacuum_vacuum_scale_factor = 0.1,
|
|
// autovacuum_analyze_scale_factor = 0.05,
|
|
// autovacuum_vacuum_threshold = 50,
|
|
// autovacuum_analyze_threshold = 50,
|
|
// autovacuum_vacuum_cost_delay = 20,
|
|
// autovacuum_vacuum_cost_limit = 200
|
|
// );`);
|
|
|
|
// 100w rows
|
|
// await PgClient.query(`
|
|
// ALTER TABLE modeldata SET (
|
|
// autovacuum_vacuum_scale_factor = 0.01,
|
|
// autovacuum_analyze_scale_factor = 0.02,
|
|
// autovacuum_vacuum_threshold = 1000,
|
|
// autovacuum_analyze_threshold = 1000,
|
|
// autovacuum_vacuum_cost_delay = 10,
|
|
// autovacuum_vacuum_cost_limit = 2000
|
|
// );`)
|
|
|
|
addLog.info('init pg successful');
|
|
} catch (error) {
|
|
addLog.error('init pg error', error);
|
|
}
|
|
};
|
|
insert = async (props: InsertVectorControllerProps): Promise<{ insertIds: string[] }> => {
|
|
const { teamId, datasetId, collectionId, vectors } = props;
|
|
|
|
const values = vectors.map((vector) => [
|
|
{ key: 'vector', value: `[${vector}]` },
|
|
{ key: 'team_id', value: String(teamId) },
|
|
{ key: 'dataset_id', value: String(datasetId) },
|
|
{ key: 'collection_id', value: String(collectionId) }
|
|
]);
|
|
|
|
const { rowCount, rows } = await PgClient.insert(DatasetVectorTableName, {
|
|
values
|
|
});
|
|
|
|
if (rowCount === 0) {
|
|
return Promise.reject('insertDatasetData: no insert');
|
|
}
|
|
|
|
return {
|
|
insertIds: rows.map((row) => row.id)
|
|
};
|
|
};
|
|
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
|
|
const { teamId } = props;
|
|
|
|
const teamIdWhere = `team_id='${String(teamId)}' AND`;
|
|
|
|
const where = await (() => {
|
|
if ('id' in props && props.id) return `${teamIdWhere} id=${props.id}`;
|
|
|
|
if ('datasetIds' in props && props.datasetIds) {
|
|
const datasetIdWhere = `dataset_id IN (${props.datasetIds
|
|
.map((id) => `'${String(id)}'`)
|
|
.join(',')})`;
|
|
|
|
if ('collectionIds' in props && props.collectionIds) {
|
|
return `${teamIdWhere} ${datasetIdWhere} AND collection_id IN (${props.collectionIds
|
|
.map((id) => `'${String(id)}'`)
|
|
.join(',')})`;
|
|
}
|
|
|
|
return `${teamIdWhere} ${datasetIdWhere}`;
|
|
}
|
|
|
|
if ('idList' in props && Array.isArray(props.idList)) {
|
|
if (props.idList.length === 0) return;
|
|
return `${teamIdWhere} id IN (${props.idList.map((id) => String(id)).join(',')})`;
|
|
}
|
|
return Promise.reject('deleteDatasetData: no where');
|
|
})();
|
|
|
|
if (!where) return;
|
|
|
|
await PgClient.delete(DatasetVectorTableName, {
|
|
where: [where]
|
|
});
|
|
};
|
|
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
|
|
const { teamId, datasetIds, vector, limit, forbidCollectionIdList, filterCollectionIdList } =
|
|
props;
|
|
|
|
// Get forbid collection
|
|
const formatForbidCollectionIdList = (() => {
|
|
if (!filterCollectionIdList) return forbidCollectionIdList;
|
|
const list = forbidCollectionIdList
|
|
.map((id) => String(id))
|
|
.filter((id) => !filterCollectionIdList.includes(id));
|
|
return list;
|
|
})();
|
|
const forbidCollectionSql =
|
|
formatForbidCollectionIdList.length > 0
|
|
? `AND collection_id NOT IN (${formatForbidCollectionIdList.map((id) => `'${id}'`).join(',')})`
|
|
: '';
|
|
|
|
// Filter by collectionId
|
|
const formatFilterCollectionId = (() => {
|
|
if (!filterCollectionIdList) return;
|
|
|
|
return filterCollectionIdList
|
|
.map((id) => String(id))
|
|
.filter((id) => !forbidCollectionIdList.includes(id));
|
|
})();
|
|
const filterCollectionIdSql = formatFilterCollectionId
|
|
? `AND collection_id IN (${formatFilterCollectionId.map((id) => `'${id}'`).join(',')})`
|
|
: '';
|
|
// Empty data
|
|
if (formatFilterCollectionId && formatFilterCollectionId.length === 0) {
|
|
return { results: [] };
|
|
}
|
|
|
|
const results: any = await PgClient.query(
|
|
`BEGIN;
|
|
SET LOCAL hnsw.ef_search = ${global.systemEnv?.hnswEfSearch || 100};
|
|
SET LOCAL hnsw.max_scan_tuples = ${global.systemEnv?.hnswMaxScanTuples || 100000};
|
|
SET LOCAL hnsw.iterative_scan = relaxed_order;
|
|
WITH relaxed_results AS MATERIALIZED (
|
|
select id, collection_id, vector <#> '[${vector}]' AS score
|
|
from ${DatasetVectorTableName}
|
|
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
|
|
${filterCollectionIdSql}
|
|
${forbidCollectionSql}
|
|
order by score limit ${limit}
|
|
) SELECT id, collection_id, score FROM relaxed_results ORDER BY score;
|
|
COMMIT;`
|
|
);
|
|
const rows = results?.[results.length - 2]?.rows as PgSearchRawType[];
|
|
|
|
if (!Array.isArray(rows)) {
|
|
return {
|
|
results: []
|
|
};
|
|
}
|
|
|
|
return {
|
|
results: rows.map((item) => ({
|
|
id: String(item.id),
|
|
collectionId: item.collection_id,
|
|
score: item.score * -1
|
|
}))
|
|
};
|
|
};
|
|
getVectorDataByTime = async (start: Date, end: Date) => {
|
|
const { rows } = await PgClient.query<{
|
|
id: string;
|
|
team_id: string;
|
|
dataset_id: string;
|
|
}>(`SELECT id, team_id, dataset_id
|
|
FROM ${DatasetVectorTableName}
|
|
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(
|
|
end
|
|
).format('YYYY-MM-DD HH:mm:ss')}';
|
|
`);
|
|
|
|
return rows.map((item) => ({
|
|
id: String(item.id),
|
|
teamId: item.team_id,
|
|
datasetId: item.dataset_id
|
|
}));
|
|
};
|
|
|
|
getVectorCount = async (props: {
|
|
teamId?: string;
|
|
datasetId?: string;
|
|
collectionId?: string;
|
|
}) => {
|
|
const { teamId, datasetId, collectionId } = props;
|
|
|
|
// Build where conditions dynamically
|
|
const whereConditions: any[] = [];
|
|
|
|
if (teamId) {
|
|
whereConditions.push(['team_id', String(teamId)]);
|
|
}
|
|
|
|
if (datasetId) {
|
|
if (whereConditions.length > 0) whereConditions.push('and');
|
|
whereConditions.push(['dataset_id', String(datasetId)]);
|
|
}
|
|
|
|
if (collectionId) {
|
|
if (whereConditions.length > 0) whereConditions.push('and');
|
|
whereConditions.push(['collection_id', String(collectionId)]);
|
|
}
|
|
|
|
// If no conditions provided, count all
|
|
const total = await PgClient.count(DatasetVectorTableName, {
|
|
where: whereConditions.length > 0 ? whereConditions : undefined
|
|
});
|
|
|
|
return total;
|
|
};
|
|
}
|