perf: 输入超长提示

This commit is contained in:
archer 2023-03-28 00:36:26 +08:00
parent 7a6d0ea650
commit c3ccbcb7f6
No known key found for this signature in database
GPG Key ID: 166CA6BF2383B2BB
5 changed files with 45 additions and 12 deletions

View File

@ -12,6 +12,7 @@ export type ModelConstantsData = {
model: `${ChatModelNameEnum}`; model: `${ChatModelNameEnum}`;
trainName: string; // 空字符串代表不能训练 trainName: string; // 空字符串代表不能训练
maxToken: number; maxToken: number;
contextMaxToken: number;
maxTemperature: number; maxTemperature: number;
trainedMaxToken: number; // 训练后最大多少tokens trainedMaxToken: number; // 训练后最大多少tokens
price: number; // 多少钱 / 1token单位: 0.00001元 price: number; // 多少钱 / 1token单位: 0.00001元
@ -24,6 +25,7 @@ export const modelList: ModelConstantsData[] = [
model: ChatModelNameEnum.GPT35, model: ChatModelNameEnum.GPT35,
trainName: '', trainName: '',
maxToken: 4000, maxToken: 4000,
contextMaxToken: 7500,
trainedMaxToken: 2000, trainedMaxToken: 2000,
maxTemperature: 2, maxTemperature: 2,
price: 3 price: 3
@ -34,6 +36,7 @@ export const modelList: ModelConstantsData[] = [
// model: ChatModelNameEnum.GPT3, // model: ChatModelNameEnum.GPT3,
// trainName: 'davinci', // trainName: 'davinci',
// maxToken: 4000, // maxToken: 4000,
// contextMaxToken: 7500,
// trainedMaxToken: 2000, // trainedMaxToken: 2000,
// maxTemperature: 2, // maxTemperature: 2,
// price: 30 // price: 30

View File

@ -44,6 +44,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization); const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
const model: ModelSchema = chat.modelId; const model: ModelSchema = chat.modelId;
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型异常,请用 chatgpt 模型');
}
// 读取对话内容 // 读取对话内容
const prompts = [...chat.content, prompt]; const prompts = [...chat.content, prompt];
@ -57,7 +61,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} }
// 控制在 tokens 数量,防止超出 // 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter(prompts, 7500); const filterPrompts = openaiChatFilter(prompts, modelConstantsData.contextMaxToken);
// 格式化文本内容成 chatgpt 格式 // 格式化文本内容成 chatgpt 格式
const map = { const map = {
@ -73,10 +77,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
); );
// console.log(formatPrompts); // console.log(formatPrompts);
// 计算温度 // 计算温度
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型异常');
}
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10); const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 获取 chatAPI // 获取 chatAPI

View File

@ -9,13 +9,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
if (!chatId || !index) { if (!chatId || !index) {
throw new Error('缺少参数'); throw new Error('缺少参数');
} }
console.log(index);
await connectToDatabase(); await connectToDatabase();
const chatRecord = await Chat.findById(chatId);
if (!chatRecord) {
throw new Error('找不到对话');
}
// 重新计算 index跳过已经被删除的内容
let unDeleteIndex = +index;
let deletedIndex = 0;
for (deletedIndex = 0; deletedIndex < chatRecord.content.length; deletedIndex++) {
if (!chatRecord.content[deletedIndex].deleted) {
unDeleteIndex--;
if (unDeleteIndex < 0) {
break;
}
}
}
// 删除最一条数据库记录, 也就是预发送的那一条 // 删除最一条数据库记录, 也就是预发送的那一条
await Chat.findByIdAndUpdate(chatId, { await Chat.findByIdAndUpdate(chatId, {
$set: { $set: {
[`content.${index}.deleted`]: true, [`content.${deletedIndex}.deleted`]: true,
updateTime: Date.now() updateTime: Date.now()
} }
}); });

View File

@ -62,7 +62,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// 计算温度 // 计算温度
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName); const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) { if (!modelConstantsData) {
throw new Error('模型异常'); throw new Error('模型异常,请用 chatgpt 模型');
} }
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10); const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);

View File

@ -37,6 +37,7 @@ import SlideBar from './components/SlideBar';
import Empty from './components/Empty'; import Empty from './components/Empty';
import Icon from '@/components/Icon'; import Icon from '@/components/Icon';
import { encode } from 'gpt-token-utils'; import { encode } from 'gpt-token-utils';
import { modelList } from '@/constants/model';
const Markdown = dynamic(() => import('@/components/Markdown')); const Markdown = dynamic(() => import('@/components/Markdown'));
@ -200,6 +201,18 @@ const Chat = ({ chatId }: { chatId: string }) => {
return; return;
} }
// 长度校验
const tokens = encode(val).length;
const model = modelList.find((item) => item.model === chatData.modelName);
if (model && tokens >= model.maxToken) {
toast({
title: '单次输入超出 4000 tokens',
status: 'warning'
});
return;
}
const newChatList: ChatSiteItemType[] = [ const newChatList: ChatSiteItemType[] = [
...chatData.history, ...chatData.history,
{ {
@ -252,15 +265,14 @@ const Chat = ({ chatId }: { chatId: string }) => {
} }
}, [ }, [
inputVal, inputVal,
chatData?.modelId, chatData,
chatData.history,
isChatting, isChatting,
resetInputVal, resetInputVal,
scrollToBottom, scrollToBottom,
toast,
gptChatPrompt, gptChatPrompt,
pushChatHistory, pushChatHistory,
chatId, chatId
toast
]); ]);
// 删除一句话 // 删除一句话