fix: plugin select files and ai response check (#3104)

* fix: plugin select files and ai response check

* perf: text editor selector;tool call tip;remove invalid image url;

* perf: select file

* perf: drop files
This commit is contained in:
Archer
2024-11-09 14:43:15 +08:00
committed by archer
parent 8bd0749afe
commit 72836402be
27 changed files with 162 additions and 80 deletions

View File

@@ -118,34 +118,47 @@ export const loadRequestMessages = async ({
return item.image_url.url;
})();
// If imgUrl is a local path, load image from local, and set url to base64
if (imgUrl.startsWith('/')) {
addLog.debug('Load image from local server', {
baseUrl: serverRequestBaseUrl,
requestUrl: imgUrl
});
const response = await axios.get(imgUrl, {
baseURL: serverRequestBaseUrl,
responseType: 'arraybuffer',
proxy: false
});
const base64 = Buffer.from(response.data, 'binary').toString('base64');
const imageType =
getFileContentTypeFromHeader(response.headers['content-type']) ||
guessBase64ImageType(base64);
try {
// If imgUrl is a local path, load image from local, and set url to base64
if (imgUrl.startsWith('/')) {
addLog.debug('Load image from local server', {
baseUrl: serverRequestBaseUrl,
requestUrl: imgUrl
});
const response = await axios.get(imgUrl, {
baseURL: serverRequestBaseUrl,
responseType: 'arraybuffer',
proxy: false
});
const base64 = Buffer.from(response.data, 'binary').toString('base64');
const imageType =
getFileContentTypeFromHeader(response.headers['content-type']) ||
guessBase64ImageType(base64);
return {
...item,
image_url: {
...item.image_url,
url: `data:${imageType};base64,${base64}`
}
};
return {
...item,
image_url: {
...item.image_url,
url: `data:${imageType};base64,${base64}`
}
};
}
// 检查下这个图片是否可以被访问,如果不行的话,则过滤掉
const response = await axios.head(imgUrl, {
timeout: 10000
});
if (response.status < 200 || response.status >= 400) {
addLog.info(`Filter invalid image: ${imgUrl}`);
return;
}
} catch (error) {
return;
}
}
return item;
})
);
).then((res) => res.filter(Boolean) as ChatCompletionContentPart[]);
};
// Split question text and image
const parseStringWithImages = (input: string): ChatCompletionContentPart[] => {

View File

@@ -28,6 +28,7 @@ import { computedMaxToken, llmCompletionsBodyFormat } from '../../../../ai/utils
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { i18nT } from '../../../../../../web/i18n/utils';
type FunctionRunResponseType = {
toolRunResponse: DispatchFlowResponse;
@@ -549,7 +550,7 @@ async function streamResponse({
}
if (!textAnswer && functionCalls.length === 0) {
return Promise.reject('LLM api response empty');
return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer, functionCalls };

View File

@@ -96,7 +96,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
const globalFiles = chatValue2RuntimePrompt(query).files;
const { documentQuoteText, userFiles } = await getMultiInput({
histories,
histories: chatHistories,
requestOrigin,
maxFiles: chatConfig?.fileSelectConfig?.maxFiles || 20,
teamId,

View File

@@ -29,6 +29,7 @@ import { WorkflowResponseType } from '../../type';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { i18nT } from '../../../../../../web/i18n/utils';
type FunctionCallCompletion = {
id: string;
@@ -537,7 +538,7 @@ async function streamResponse({
}
if (!textAnswer) {
return Promise.reject('LLM api response empty');
return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer.trim() };
}

View File

@@ -28,6 +28,7 @@ import { addLog } from '../../../../../common/system/log';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { i18nT } from '../../../../../../web/i18n/utils';
type ToolRunResponseType = {
toolRunResponse: DispatchFlowResponse;
@@ -656,7 +657,7 @@ async function streamResponse({
}
if (!textAnswer && toolCalls.length === 0) {
return Promise.reject('LLM api response empty');
return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer, toolCalls };

View File

@@ -5,11 +5,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { getAIApi } from '../../../ai/config';
import type {
ChatCompletion,
ChatCompletionMessageParam,
StreamChatType
} from '@fastgpt/global/core/ai/type.d';
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '../../../../common/api/requestPlusApi';
@@ -48,6 +44,7 @@ import { AiChatQuoteRoleType } from '@fastgpt/global/core/workflow/template/syst
import { getErrText } from '@fastgpt/global/common/error/utils';
import { getFileContentFromLinks, getHistoryFileLinks } from '../tools/readFiles';
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
import { i18nT } from '../../../../../web/i18n/utils';
export type ChatProps = ModuleDispatchProps<
AIChatNodeProps & {
@@ -93,9 +90,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
} = props;
const { files: inputFiles } = chatValue2RuntimePrompt(query); // Chat box input files
if (!userChatInput && inputFiles.length === 0) {
return Promise.reject('Question is empty');
}
stream = stream && isResponseAnswerText;
const chatHistories = getHistories(history, histories);
@@ -112,7 +106,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
quoteTemplate
}),
getMultiInput({
histories,
histories: chatHistories,
inputFiles,
fileLinks,
stringQuoteText,
@@ -122,6 +116,10 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
})
]);
if (!userChatInput && !documentQuoteText && userFiles.length === 0) {
return Promise.reject(i18nT('chat:AI_input_is_empty'));
}
const [{ filterMessages }] = await Promise.all([
getChatMessages({
model: modelConstantsData,
@@ -198,7 +196,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
});
if (!answer) {
throw new Error('LLM model response empty');
return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return {

View File

@@ -1,17 +1,21 @@
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
import type {
DispatchNodeResultType,
ModuleDispatchProps
} from '@fastgpt/global/core/workflow/runtime/type';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
import { filterSearchResultsByMaxChars } from '../../utils';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
type DatasetConcatProps = ModuleDispatchProps<
{
[NodeInputKeyEnum.datasetMaxTokens]: number;
} & { [key: string]: SearchDataResponseItemType[] }
>;
type DatasetConcatResponse = {
type DatasetConcatResponse = DispatchNodeResultType<{
[NodeOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
};
}>;
export async function dispatchDatasetConcat(
props: DatasetConcatProps
@@ -30,6 +34,12 @@ export async function dispatchDatasetConcat(
);
return {
[NodeOutputKeyEnum.datasetQuoteQA]: await filterSearchResultsByMaxChars(rrfConcatResults, limit)
[NodeOutputKeyEnum.datasetQuoteQA]: await filterSearchResultsByMaxChars(
rrfConcatResults,
limit
),
[DispatchNodeResponseKeyEnum.nodeResponse]: {
concatLength: rrfConcatResults.length
}
};
}

View File

@@ -16,6 +16,7 @@ import { datasetSearchQueryExtension } from '../../../dataset/search/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '../../../../support/permission/teamLimit';
import { MongoDataset } from '../../../dataset/schema';
import { i18nT } from '../../../../../web/i18n/utils';
type DatasetSearchProps = ModuleDispatchProps<{
[NodeInputKeyEnum.datasetSelectList]: SelectedDatasetType;
@@ -56,15 +57,15 @@ export async function dispatchDatasetSearch(
} = props as DatasetSearchProps;
if (!Array.isArray(datasets)) {
return Promise.reject('Quote type error');
return Promise.reject(i18nT('chat:dataset_quote_type error'));
}
if (datasets.length === 0) {
return Promise.reject('core.chat.error.Select dataset empty');
return Promise.reject(i18nT('common:core.chat.error.Select dataset empty'));
}
if (!userChatInput) {
return Promise.reject('core.chat.error.User input empty');
return Promise.reject(i18nT('common:core.chat.error.User input empty'));
}
// query extension

View File

@@ -112,15 +112,24 @@ export const getFileContentFromLinks = async ({
teamId: string;
}) => {
const parseUrlList = urls
// Remove invalid urls
.filter((url) => {
if (typeof url !== 'string') return false;
// 检查相对路径
const validPrefixList = ['/', 'http', 'ws'];
if (validPrefixList.some((prefix) => url.startsWith(prefix))) {
return true;
}
return false;
})
// Just get the document type file
.filter((url) => parseUrlToFileType(url)?.type === 'file')
.map((url) => {
try {
// Check is system upload file
if (url.startsWith('/') || (requestOrigin && url.startsWith(requestOrigin))) {
// Parse url, get filename query. Keep only documents that can be parsed
if (parseUrlToFileType(url)?.type !== 'file') {
return '';
}
// Remove the origin(Make intranet requests directly)
if (requestOrigin && url.startsWith(requestOrigin)) {
url = url.replace(requestOrigin, '');
@@ -152,7 +161,7 @@ export const getFileContentFromLinks = async ({
}
try {
// Get file buffer
// Get file buffer data
const response = await axios.get(url, {
baseURL: serverRequestBaseUrl,
responseType: 'arraybuffer'