perf: token params

This commit is contained in:
archer
2023-05-12 23:24:25 +08:00
parent 651eb1bf6b
commit 9a0c92629b
3 changed files with 9 additions and 4 deletions

View File

@@ -73,7 +73,7 @@ export const chatResponse = async ({
const filterMessages = ChatContextFilter({
model,
prompts: messages,
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.9)
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85)
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
@@ -90,7 +90,7 @@ export const chatResponse = async ({
stop: ['.!?。']
},
{
timeout: stream ? 40000 : 240000,
timeout: stream ? 60000 : 240000,
responseType: stream ? 'stream' : 'json',
...axiosConfig()
}