perf: logger (#186)

* feat: finish response

* perf: logger

* docs

* perf: log

* docs
This commit is contained in:
Archer
2023-08-17 23:19:19 +08:00
committed by GitHub
parent 324e4a0e75
commit 40168c56ea
26 changed files with 500 additions and 93 deletions

View File

@@ -17,6 +17,7 @@ import { ChatModelItemType } from '@/types/model';
import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/service/api/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';
export type ChatProps = {
res: NextApiResponse;
@@ -31,6 +32,7 @@ export type ChatProps = {
systemPrompt?: string;
limitPrompt?: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
outputs: AppModuleItemType['outputs'];
};
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
@@ -52,8 +54,12 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
userChatInput,
systemPrompt = '',
limitPrompt = '',
userOpenaiAccount
userOpenaiAccount,
outputs
} = props as ChatProps;
if (!userChatInput) {
return Promise.reject('Question is empty');
}
// temperature adapt
const modelConstantsData = getChatModel(model);
@@ -142,6 +148,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
messages: completeMessages
});
targetResponse({ res, detail, outputs });
return {
answerText: answer,
totalTokens,
@@ -304,6 +312,28 @@ function getMaxTokens({
};
}
function targetResponse({
res,
outputs,
detail
}: {
res: NextApiResponse;
outputs: AppModuleItemType['outputs'];
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === TaskResponseKeyEnum.answerText)?.targets || [];
if (targets.length === 0) return;
sseResponse({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}
async function streamResponse({
res,
detail,