add model test log (#4272)
* sync collection * remove lock * add model test log * update ui * update log * fix: channel test * preview chunk ui * test model ux * test model log * perf: dataset selector * fix: system plugin auth * update nextjs
This commit is contained in:
@@ -35,11 +35,17 @@ async function handler(
|
||||
|
||||
if (!modelData) return Promise.reject('Model not found');
|
||||
|
||||
if (channelId) {
|
||||
delete modelData.requestUrl;
|
||||
delete modelData.requestAuth;
|
||||
}
|
||||
|
||||
const headers: Record<string, string> = channelId
|
||||
? {
|
||||
'Aiproxy-Channel': String(channelId)
|
||||
}
|
||||
: {};
|
||||
addLog.debug(`Test model`, modelData);
|
||||
|
||||
if (modelData.type === 'llm') {
|
||||
return testLLMModel(modelData, headers);
|
||||
@@ -63,10 +69,6 @@ async function handler(
|
||||
export default NextAPI(handler);
|
||||
|
||||
const testLLMModel = async (model: LLMModelItemType, headers: Record<string, string>) => {
|
||||
const ai = getAIApi({
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
const requestBody = llmCompletionsBodyFormat(
|
||||
{
|
||||
model: model.model,
|
||||
@@ -75,6 +77,7 @@ const testLLMModel = async (model: LLMModelItemType, headers: Record<string, str
|
||||
},
|
||||
model
|
||||
);
|
||||
|
||||
const { response, isStreamResponse } = await createChatCompletion({
|
||||
body: requestBody,
|
||||
options: {
|
||||
@@ -144,7 +147,7 @@ const testTTSModel = async (model: TTSModelType, headers: Record<string, string>
|
||||
const testSTTModel = async (model: STTModelType, headers: Record<string, string>) => {
|
||||
const path = isProduction ? '/app/data/test.mp3' : 'data/test.mp3';
|
||||
const { text } = await aiTranscriptions({
|
||||
model: model.model,
|
||||
model,
|
||||
fileStream: fs.createReadStream(path),
|
||||
headers
|
||||
});
|
||||
|
||||
@@ -43,9 +43,12 @@ export type PostPreviewFilesChunksProps = {
|
||||
externalFileId?: string;
|
||||
};
|
||||
export type PreviewChunksResponse = {
|
||||
q: string;
|
||||
a: string;
|
||||
}[];
|
||||
chunks: {
|
||||
q: string;
|
||||
a: string;
|
||||
}[];
|
||||
total: number;
|
||||
};
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<PostPreviewFilesChunksProps>
|
||||
@@ -123,13 +126,17 @@ async function handler(
|
||||
customPdfParse
|
||||
});
|
||||
|
||||
return rawText2Chunks({
|
||||
const chunks = rawText2Chunks({
|
||||
rawText,
|
||||
chunkSize,
|
||||
maxSize: getLLMMaxChunkSize(getLLMModel(dataset.agentModel)),
|
||||
overlapRatio,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : [],
|
||||
isQAImport: isQAImport
|
||||
}).slice(0, 10);
|
||||
});
|
||||
return {
|
||||
chunks: chunks.slice(0, 10),
|
||||
total: chunks.length
|
||||
};
|
||||
}
|
||||
export default NextAPI(handler);
|
||||
|
||||
@@ -66,7 +66,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
// }
|
||||
|
||||
const result = await aiTranscriptions({
|
||||
model: getDefaultSTTModel().model,
|
||||
model: getDefaultSTTModel(),
|
||||
fileStream: fs.createReadStream(file.path)
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user