fix: model test channel id;fix: quote reader (#4123)

* fix: model test channel id

* fix: quote reader
This commit is contained in:
Archer
2025-03-12 16:51:00 +08:00
committed by archer
parent d1ce3e2936
commit d052d0de53
11 changed files with 151 additions and 65 deletions

View File

@@ -1,7 +1,7 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { findModelFromAlldata, getReRankModel } from '@fastgpt/service/core/ai/model';
import { findModelFromAlldata } from '@fastgpt/service/core/ai/model';
import {
EmbeddingModelItemType,
LLMModelItemType,
@@ -9,7 +9,7 @@ import {
STTModelType,
TTSModelType
} from '@fastgpt/global/core/ai/model.d';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import { createChatCompletion, getAIApi } from '@fastgpt/service/core/ai/config';
import { addLog } from '@fastgpt/service/common/system/log';
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { reRankRecall } from '@fastgpt/service/core/ai/rerank';
@@ -18,7 +18,7 @@ import { isProduction } from '@fastgpt/global/common/system/constants';
import * as fs from 'fs';
import { llmCompletionsBodyFormat } from '@fastgpt/service/core/ai/utils';
export type testQuery = { model: string; channelId?: string };
export type testQuery = { model: string; channelId?: number };
export type testBody = {};
@@ -37,7 +37,7 @@ async function handler(
const headers: Record<string, string> = channelId
? {
'Aiproxy-Channel': channelId
'Aiproxy-Channel': String(channelId)
}
: {};
@@ -75,26 +75,33 @@ const testLLMModel = async (model: LLMModelItemType, headers: Record<string, str
},
model
);
const response = await ai.chat.completions.create(requestBody, {
...(model.requestUrl ? { path: model.requestUrl } : {}),
headers: model.requestAuth
? {
Authorization: `Bearer ${model.requestAuth}`,
...headers
}
: headers
const { response, isStreamResponse } = await createChatCompletion({
body: requestBody,
options: {
headers: {
Accept: 'application/json, text/plain, */*',
...headers
}
}
});
for await (const part of response) {
const content = part.choices?.[0]?.delta?.content || '';
// @ts-ignore
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
if (content || reasoningContent) {
response?.controller?.abort();
return;
if (isStreamResponse) {
for await (const part of response) {
const content = part.choices?.[0]?.delta?.content || '';
// @ts-ignore
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
if (content || reasoningContent) {
response?.controller?.abort();
return;
}
}
} else {
addLog.info(`Model not stream response`);
const answer = response.choices?.[0]?.message?.content || '';
if (answer) {
return answer;
}
}
addLog.info(`Model not stream response`);
return Promise.reject('Model response empty');
};

View File

@@ -52,7 +52,7 @@ async function handler(
const limitedPageSize = Math.min(pageSize, 30);
const [{ chat }, { chatItem }] = await Promise.all([
const [{ chat, showRawSource }, { chatItem }] = await Promise.all([
authChatCrud({
req,
authToken: true,
@@ -65,6 +65,9 @@ async function handler(
}),
authCollectionInChat({ appId, chatId, chatItemDataId, collectionIds: [collectionId] })
]);
if (!showRawSource) {
return Promise.reject(ChatErrEnum.unAuthChat);
}
if (!chat) return Promise.reject(ChatErrEnum.unAuthChat);
const baseMatch: BaseMatchType = {

View File

@@ -245,16 +245,15 @@ const OutLink = (props: Props) => {
desc={props.appIntro || data?.app?.intro}
icon={props.appAvatar || data?.app?.avatar}
/>
<Flex h={'full'}>
<Flex
h={'full'}
gap={4}
{...(isEmbed
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
: { p: [0, 5] })}
>
{(!quoteData || isPc) && (
<PageContainer
flex={'1 0 0'}
w={0}
isLoading={loading}
{...(isEmbed
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
: { p: [0, 5] })}
>
<PageContainer flex={'1 0 0'} w={0} isLoading={loading} p={'0 !important'}>
<Flex h={'100%'} flexDirection={['column', 'row']}>
{RenderHistoryList}
@@ -302,7 +301,7 @@ const OutLink = (props: Props) => {
)}
{quoteData && (
<PageContainer flex={'1 0 0'} w={0} maxW={'560px'}>
<PageContainer flex={'1 0 0'} w={0} maxW={'560px'} p={'0 !important'}>
<ChatQuoteList
rawSearch={quoteData.rawSearch}
metadata={quoteData.metadata}