New file upload (#3058)
* feat: toolNode aiNode readFileNode adapt new version * update docker-compose * update tip * feat: adapt new file version * perf: file input * fix: ts
This commit is contained in:
@@ -109,7 +109,7 @@ export const loadRequestMessages = async ({
|
||||
}
|
||||
return Promise.all(
|
||||
messages.map(async (item) => {
|
||||
if (item.type === 'image_url') {
|
||||
if (item.type === 'image_url' && process.env.MULTIPLE_DATA_TO_BASE64 === 'true') {
|
||||
// Remove url origin
|
||||
const imgUrl = (() => {
|
||||
if (origin && item.image_url.url.startsWith(origin)) {
|
||||
@@ -149,7 +149,7 @@ export const loadRequestMessages = async ({
|
||||
};
|
||||
// Split question text and image
|
||||
const parseStringWithImages = (input: string): ChatCompletionContentPart[] => {
|
||||
if (!useVision) {
|
||||
if (!useVision || input.length > 500) {
|
||||
return [{ type: 'text', text: input || '' }];
|
||||
}
|
||||
|
||||
@@ -170,8 +170,8 @@ export const loadRequestMessages = async ({
|
||||
});
|
||||
});
|
||||
|
||||
// Too many images or too long text, return text
|
||||
if (httpsImages.length > 4 || input.length > 1000) {
|
||||
// Too many images return text
|
||||
if (httpsImages.length > 4) {
|
||||
return [{ type: 'text', text: input || '' }];
|
||||
}
|
||||
|
||||
@@ -179,7 +179,7 @@ export const loadRequestMessages = async ({
|
||||
result.push({ type: 'text', text: input });
|
||||
return result;
|
||||
};
|
||||
// Parse user content(text and img)
|
||||
// Parse user content(text and img) Store history => api messages
|
||||
const parseUserContent = async (content: string | ChatCompletionContentPart[]) => {
|
||||
if (typeof content === 'string') {
|
||||
return loadImageToBase64(parseStringWithImages(content));
|
||||
|
||||
@@ -25,45 +25,16 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getMultiplePrompt, Prompt_Tool_Call } from './constants';
|
||||
import { filterToolResponseToPreview } from './utils';
|
||||
import { InteractiveNodeResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { getFileContentFromLinks, getHistoryFileLinks } from '../../tools/readFiles';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
import { Prompt_DocumentQuote } from '@fastgpt/global/core/ai/prompt/AIChat';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
|
||||
type Response = DispatchNodeResultType<{
|
||||
[NodeOutputKeyEnum.answerText]: string;
|
||||
[DispatchNodeResponseKeyEnum.interactive]?: InteractiveNodeResponseType;
|
||||
}>;
|
||||
|
||||
/*
|
||||
Tool call, auth add file prompt to question。
|
||||
Guide the LLM to call tool.
|
||||
*/
|
||||
export const toolCallMessagesAdapt = ({
|
||||
userInput
|
||||
}: {
|
||||
userInput: UserChatItemValueItemType[];
|
||||
}) => {
|
||||
const files = userInput.filter((item) => item.type === 'file');
|
||||
|
||||
if (files.length > 0) {
|
||||
return userInput.map((item) => {
|
||||
if (item.type === 'text') {
|
||||
const filesCount = files.filter((file) => file.file?.type === 'file').length;
|
||||
const imgCount = files.filter((file) => file.file?.type === 'image').length;
|
||||
const text = item.text?.content || '';
|
||||
|
||||
return {
|
||||
...item,
|
||||
text: {
|
||||
content: getMultiplePrompt({ fileCount: filesCount, imgCount, question: text })
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
return userInput;
|
||||
};
|
||||
|
||||
export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<Response> => {
|
||||
const {
|
||||
node: { nodeId, name, isEntry },
|
||||
@@ -71,11 +42,21 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
runtimeEdges,
|
||||
histories,
|
||||
query,
|
||||
|
||||
params: { model, systemPrompt, userChatInput, history = 6 }
|
||||
requestOrigin,
|
||||
chatConfig,
|
||||
runningAppInfo: { teamId },
|
||||
params: {
|
||||
model,
|
||||
systemPrompt,
|
||||
userChatInput,
|
||||
history = 6,
|
||||
fileUrlList: fileLinks,
|
||||
aiChatVision
|
||||
}
|
||||
} = props;
|
||||
|
||||
const toolModel = getLLMModel(model);
|
||||
const useVision = aiChatVision && toolModel.vision;
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const toolNodeIds = filterToolNodeIdByEdges({ nodeId, edges: runtimeEdges });
|
||||
@@ -109,18 +90,43 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
}
|
||||
})();
|
||||
props.node.isEntry = false;
|
||||
const hasReadFilesTool = toolNodes.some(
|
||||
(item) => item.flowNodeType === FlowNodeTypeEnum.readFiles
|
||||
);
|
||||
|
||||
const globalFiles = chatValue2RuntimePrompt(query).files;
|
||||
const { documentQuoteText, userFiles } = await getMultiInput({
|
||||
histories,
|
||||
requestOrigin,
|
||||
maxFiles: chatConfig?.fileSelectConfig?.maxFiles || 20,
|
||||
teamId,
|
||||
fileLinks,
|
||||
inputFiles: globalFiles
|
||||
});
|
||||
|
||||
const concatenateSystemPrompt = [
|
||||
toolModel.defaultSystemChatPrompt,
|
||||
systemPrompt,
|
||||
documentQuoteText
|
||||
? replaceVariable(Prompt_DocumentQuote, {
|
||||
quote: documentQuoteText
|
||||
})
|
||||
: ''
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join('\n\n===---===---===\n\n');
|
||||
|
||||
const messages: ChatItemType[] = (() => {
|
||||
const value: ChatItemType[] = [
|
||||
...getSystemPrompt_ChatItemType(toolModel.defaultSystemChatPrompt),
|
||||
...getSystemPrompt_ChatItemType(systemPrompt),
|
||||
...getSystemPrompt_ChatItemType(concatenateSystemPrompt),
|
||||
// Add file input prompt to histories
|
||||
...chatHistories.map((item) => {
|
||||
if (item.obj === ChatRoleEnum.Human) {
|
||||
return {
|
||||
...item,
|
||||
value: toolCallMessagesAdapt({
|
||||
userInput: item.value
|
||||
userInput: item.value,
|
||||
skip: !hasReadFilesTool
|
||||
})
|
||||
};
|
||||
}
|
||||
@@ -129,9 +135,10 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: toolCallMessagesAdapt({
|
||||
skip: !hasReadFilesTool,
|
||||
userInput: runtimePrompt2ChatsValue({
|
||||
text: userChatInput,
|
||||
files: chatValue2RuntimePrompt(query).files
|
||||
files: userFiles
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -237,7 +244,11 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
childTotalPoints: flatUsages.reduce((sum, item) => sum + item.totalPoints, 0),
|
||||
model: modelName,
|
||||
query: userChatInput,
|
||||
historyPreview: getHistoryPreview(GPTMessages2Chats(completeMessages, false), 10000),
|
||||
historyPreview: getHistoryPreview(
|
||||
GPTMessages2Chats(completeMessages, false),
|
||||
10000,
|
||||
useVision
|
||||
),
|
||||
toolDetail: childToolResponse,
|
||||
mergeSignId: nodeId
|
||||
},
|
||||
@@ -253,3 +264,88 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
[DispatchNodeResponseKeyEnum.interactive]: toolWorkflowInteractiveResponse
|
||||
};
|
||||
};
|
||||
|
||||
const getMultiInput = async ({
|
||||
histories,
|
||||
fileLinks,
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId,
|
||||
inputFiles
|
||||
}: {
|
||||
histories: ChatItemType[];
|
||||
fileLinks?: string[];
|
||||
requestOrigin?: string;
|
||||
maxFiles: number;
|
||||
teamId: string;
|
||||
inputFiles: UserChatItemValueItemType['file'][];
|
||||
}) => {
|
||||
// Not file quote
|
||||
if (!fileLinks) {
|
||||
return {
|
||||
documentQuoteText: '',
|
||||
userFiles: inputFiles
|
||||
};
|
||||
}
|
||||
|
||||
const filesFromHistories = getHistoryFileLinks(histories);
|
||||
const urls = [...fileLinks, ...filesFromHistories];
|
||||
|
||||
if (urls.length === 0) {
|
||||
return {
|
||||
documentQuoteText: '',
|
||||
userFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
// Get files from histories
|
||||
const { text } = await getFileContentFromLinks({
|
||||
// Concat fileUrlList and filesFromHistories; remove not supported files
|
||||
urls,
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId
|
||||
});
|
||||
|
||||
return {
|
||||
documentQuoteText: text,
|
||||
userFiles: fileLinks.map((url) => parseUrlToFileType(url))
|
||||
};
|
||||
};
|
||||
|
||||
/*
|
||||
Tool call, auth add file prompt to question。
|
||||
Guide the LLM to call tool.
|
||||
*/
|
||||
const toolCallMessagesAdapt = ({
|
||||
userInput,
|
||||
skip
|
||||
}: {
|
||||
userInput: UserChatItemValueItemType[];
|
||||
skip?: boolean;
|
||||
}) => {
|
||||
if (skip) return userInput;
|
||||
|
||||
const files = userInput.filter((item) => item.type === 'file');
|
||||
|
||||
if (files.length > 0) {
|
||||
return userInput.map((item) => {
|
||||
if (item.type === 'text') {
|
||||
const filesCount = files.filter((file) => file.file?.type === 'file').length;
|
||||
const imgCount = files.filter((file) => file.file?.type === 'image').length;
|
||||
const text = item.text?.content || '';
|
||||
|
||||
return {
|
||||
...item,
|
||||
text: {
|
||||
content: getMultiplePrompt({ fileCount: filesCount, imgCount, question: text })
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
return userInput;
|
||||
};
|
||||
|
||||
@@ -21,6 +21,7 @@ export type DispatchToolModuleProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.aiChatTemperature]: number;
|
||||
[NodeInputKeyEnum.aiChatMaxToken]: number;
|
||||
[NodeInputKeyEnum.aiChatVision]?: boolean;
|
||||
[NodeInputKeyEnum.fileUrlList]?: string[];
|
||||
}> & {
|
||||
messages: ChatCompletionMessageParam[];
|
||||
toolNodes: ToolNodeItemType[];
|
||||
|
||||
@@ -46,6 +46,8 @@ import { WorkflowResponseType } from '../type';
|
||||
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
|
||||
import { AiChatQuoteRoleType } from '@fastgpt/global/core/workflow/template/system/aiChat/type';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { getFileContentFromLinks, getHistoryFileLinks } from '../tools/readFiles';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
|
||||
export type ChatProps = ModuleDispatchProps<
|
||||
AIChatNodeProps & {
|
||||
@@ -69,7 +71,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
histories,
|
||||
node: { name },
|
||||
query,
|
||||
runningAppInfo: { teamId },
|
||||
workflowStreamResponse,
|
||||
chatConfig,
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
@@ -83,10 +87,11 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
quoteTemplate,
|
||||
quotePrompt,
|
||||
aiChatVision,
|
||||
stringQuoteText
|
||||
fileUrlList: fileLinks, // node quote file links
|
||||
stringQuoteText //abandon
|
||||
}
|
||||
} = props;
|
||||
const { files: inputFiles } = chatValue2RuntimePrompt(query);
|
||||
const { files: inputFiles } = chatValue2RuntimePrompt(query); // Chat box input files
|
||||
|
||||
if (!userChatInput && inputFiles.length === 0) {
|
||||
return Promise.reject('Question is empty');
|
||||
@@ -100,11 +105,22 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
return Promise.reject('The chat model is undefined, you need to select a chat model.');
|
||||
}
|
||||
|
||||
const { datasetQuoteText } = await filterDatasetQuote({
|
||||
quoteQA,
|
||||
model: modelConstantsData,
|
||||
quoteTemplate
|
||||
});
|
||||
const [{ datasetQuoteText }, { documentQuoteText, userFiles }] = await Promise.all([
|
||||
filterDatasetQuote({
|
||||
quoteQA,
|
||||
model: modelConstantsData,
|
||||
quoteTemplate
|
||||
}),
|
||||
getMultiInput({
|
||||
histories,
|
||||
inputFiles,
|
||||
fileLinks,
|
||||
stringQuoteText,
|
||||
requestOrigin,
|
||||
maxFiles: chatConfig?.fileSelectConfig?.maxFiles || 20,
|
||||
teamId
|
||||
})
|
||||
]);
|
||||
|
||||
const [{ filterMessages }] = await Promise.all([
|
||||
getChatMessages({
|
||||
@@ -115,9 +131,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
aiChatQuoteRole,
|
||||
datasetQuotePrompt: quotePrompt,
|
||||
userChatInput,
|
||||
inputFiles,
|
||||
systemPrompt,
|
||||
stringQuoteText
|
||||
userFiles,
|
||||
documentQuoteText
|
||||
}),
|
||||
(() => {
|
||||
// censor model and system key
|
||||
@@ -132,22 +148,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
})()
|
||||
]);
|
||||
|
||||
// Get the request messages
|
||||
const concatMessages = [
|
||||
...(modelConstantsData.defaultSystemChatPrompt
|
||||
? [
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||
content: modelConstantsData.defaultSystemChatPrompt
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...filterMessages
|
||||
] as ChatCompletionMessageParam[];
|
||||
|
||||
const [requestMessages, max_tokens] = await Promise.all([
|
||||
loadRequestMessages({
|
||||
messages: concatMessages,
|
||||
messages: filterMessages,
|
||||
useVision: modelConstantsData.vision && aiChatVision,
|
||||
origin: requestOrigin
|
||||
}),
|
||||
@@ -242,7 +245,11 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
tokens,
|
||||
query: `${userChatInput}`,
|
||||
maxToken: max_tokens,
|
||||
historyPreview: getHistoryPreview(chatCompleteMessages, 10000),
|
||||
historyPreview: getHistoryPreview(
|
||||
chatCompleteMessages,
|
||||
10000,
|
||||
modelConstantsData.vision && aiChatVision
|
||||
),
|
||||
contextTotalLen: completeMessages.length
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
@@ -302,7 +309,70 @@ async function filterDatasetQuote({
|
||||
datasetQuoteText
|
||||
};
|
||||
}
|
||||
|
||||
async function getMultiInput({
|
||||
histories,
|
||||
inputFiles,
|
||||
fileLinks,
|
||||
stringQuoteText,
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId
|
||||
}: {
|
||||
histories: ChatItemType[];
|
||||
inputFiles: UserChatItemValueItemType['file'][];
|
||||
fileLinks?: string[];
|
||||
stringQuoteText?: string; // file quote
|
||||
requestOrigin?: string;
|
||||
maxFiles: number;
|
||||
teamId: string;
|
||||
}) {
|
||||
// 旧版本适配====>
|
||||
if (stringQuoteText) {
|
||||
return {
|
||||
documentQuoteText: stringQuoteText,
|
||||
userFiles: inputFiles
|
||||
};
|
||||
}
|
||||
|
||||
// 没有引用文件参考,但是可能用了图片识别
|
||||
if (!fileLinks) {
|
||||
return {
|
||||
documentQuoteText: '',
|
||||
userFiles: inputFiles
|
||||
};
|
||||
}
|
||||
// 旧版本适配<====
|
||||
|
||||
// If fileLinks params is not empty, it means it is a new version, not get the global file.
|
||||
|
||||
// Get files from histories
|
||||
const filesFromHistories = getHistoryFileLinks(histories);
|
||||
const urls = [...fileLinks, ...filesFromHistories];
|
||||
|
||||
if (urls.length === 0) {
|
||||
return {
|
||||
documentQuoteText: '',
|
||||
userFiles: []
|
||||
};
|
||||
}
|
||||
|
||||
const { text } = await getFileContentFromLinks({
|
||||
// Concat fileUrlList and filesFromHistories; remove not supported files
|
||||
urls,
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId
|
||||
});
|
||||
|
||||
return {
|
||||
documentQuoteText: text,
|
||||
userFiles: fileLinks.map((url) => parseUrlToFileType(url))
|
||||
};
|
||||
}
|
||||
|
||||
async function getChatMessages({
|
||||
model,
|
||||
aiChatQuoteRole,
|
||||
datasetQuotePrompt = '',
|
||||
datasetQuoteText,
|
||||
@@ -310,10 +380,10 @@ async function getChatMessages({
|
||||
histories = [],
|
||||
systemPrompt,
|
||||
userChatInput,
|
||||
inputFiles,
|
||||
model,
|
||||
stringQuoteText
|
||||
userFiles,
|
||||
documentQuoteText
|
||||
}: {
|
||||
model: LLMModelItemType;
|
||||
// dataset quote
|
||||
aiChatQuoteRole: AiChatQuoteRoleType; // user: replace user prompt; system: replace system prompt
|
||||
datasetQuotePrompt?: string;
|
||||
@@ -323,10 +393,11 @@ async function getChatMessages({
|
||||
histories: ChatItemType[];
|
||||
systemPrompt: string;
|
||||
userChatInput: string;
|
||||
inputFiles: UserChatItemValueItemType['file'][];
|
||||
model: LLMModelItemType;
|
||||
stringQuoteText?: string; // file quote
|
||||
|
||||
userFiles: UserChatItemValueItemType['file'][];
|
||||
documentQuoteText?: string; // document quote
|
||||
}) {
|
||||
// Dataset prompt ====>
|
||||
// User role or prompt include question
|
||||
const quoteRole =
|
||||
aiChatQuoteRole === 'user' || datasetQuotePrompt.includes('{{question}}') ? 'user' : 'system';
|
||||
@@ -337,6 +408,7 @@ async function getChatMessages({
|
||||
? Prompt_userQuotePromptList[0].value
|
||||
: Prompt_systemQuotePromptList[0].value;
|
||||
|
||||
// Reset user input, add dataset quote to user input
|
||||
const replaceInputValue =
|
||||
useDatasetQuote && quoteRole === 'user'
|
||||
? replaceVariable(datasetQuotePromptTemplate, {
|
||||
@@ -344,31 +416,33 @@ async function getChatMessages({
|
||||
question: userChatInput
|
||||
})
|
||||
: userChatInput;
|
||||
// Dataset prompt <====
|
||||
|
||||
const replaceSystemPrompt =
|
||||
// Concat system prompt
|
||||
const concatenateSystemPrompt = [
|
||||
model.defaultSystemChatPrompt,
|
||||
systemPrompt,
|
||||
useDatasetQuote && quoteRole === 'system'
|
||||
? `${systemPrompt ? systemPrompt + '\n\n------\n\n' : ''}${replaceVariable(
|
||||
datasetQuotePromptTemplate,
|
||||
{
|
||||
quote: datasetQuoteText
|
||||
}
|
||||
)}`
|
||||
: systemPrompt;
|
||||
? replaceVariable(datasetQuotePromptTemplate, {
|
||||
quote: datasetQuoteText
|
||||
})
|
||||
: '',
|
||||
documentQuoteText
|
||||
? replaceVariable(Prompt_DocumentQuote, {
|
||||
quote: documentQuoteText
|
||||
})
|
||||
: ''
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join('\n\n===---===---===\n\n');
|
||||
|
||||
const messages: ChatItemType[] = [
|
||||
...getSystemPrompt_ChatItemType(replaceSystemPrompt),
|
||||
...(stringQuoteText // file quote
|
||||
? getSystemPrompt_ChatItemType(
|
||||
replaceVariable(Prompt_DocumentQuote, {
|
||||
quote: stringQuoteText
|
||||
})
|
||||
)
|
||||
: []),
|
||||
...getSystemPrompt_ChatItemType(concatenateSystemPrompt),
|
||||
...histories,
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: runtimePrompt2ChatsValue({
|
||||
files: inputFiles,
|
||||
files: userFiles,
|
||||
text: replaceInputValue
|
||||
})
|
||||
}
|
||||
|
||||
@@ -17,12 +17,14 @@ import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/ty
|
||||
import { authAppByTmbId } from '../../../../support/permission/app/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { getAppVersionById } from '../../../app/version/controller';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
[NodeInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
[NodeInputKeyEnum.fileUrlList]?: string[];
|
||||
[NodeInputKeyEnum.forbidStream]?: boolean;
|
||||
[NodeInputKeyEnum.fileUrlList]?: string[];
|
||||
}>;
|
||||
type Response = DispatchNodeResultType<{
|
||||
[NodeOutputKeyEnum.answerText]: string;
|
||||
@@ -40,8 +42,24 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
variables
|
||||
} = props;
|
||||
|
||||
const { system_forbid_stream = false, userChatInput, history, ...childrenAppVariables } = params;
|
||||
if (!userChatInput) {
|
||||
const {
|
||||
system_forbid_stream = false,
|
||||
userChatInput,
|
||||
history,
|
||||
fileUrlList,
|
||||
...childrenAppVariables
|
||||
} = params;
|
||||
const { files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
const userInputFiles = (() => {
|
||||
if (fileUrlList) {
|
||||
return fileUrlList.map((url) => parseUrlToFileType(url));
|
||||
}
|
||||
// Adapt version 4.8.13 upgrade
|
||||
return files;
|
||||
})();
|
||||
|
||||
if (!userChatInput && !userInputFiles) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
if (!appId) {
|
||||
@@ -72,7 +90,6 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
}
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
const { files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
// Rewrite children app variables
|
||||
const systemVariables = filterSystemVariables(variables);
|
||||
@@ -102,7 +119,7 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
histories: chatHistories,
|
||||
variables: childrenRunVariables,
|
||||
query: runtimePrompt2ChatsValue({
|
||||
files,
|
||||
files: userInputFiles,
|
||||
text: userChatInput
|
||||
}),
|
||||
chatConfig
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
@@ -11,6 +12,24 @@ export const dispatchPluginInput = (props: PluginInputProps) => {
|
||||
const { params, query } = props;
|
||||
const { files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
/*
|
||||
对 params 中文件类型数据进行处理
|
||||
* 插件单独运行时,这里会是一个特殊的数组
|
||||
* 插件调用的话,这个参数是一个 string[] 不会进行处理
|
||||
* 硬性要求:API 单独调用插件时,要避免这种特殊类型冲突
|
||||
*/
|
||||
for (const key in params) {
|
||||
const val = params[key];
|
||||
if (
|
||||
Array.isArray(val) &&
|
||||
val.every(
|
||||
(item) => item.type === ChatFileTypeEnum.file || item.type === ChatFileTypeEnum.image
|
||||
)
|
||||
) {
|
||||
params[key] = val.map((item) => item.url);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...params,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {},
|
||||
|
||||
@@ -2,16 +2,15 @@ import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runti
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { documentFileType } from '@fastgpt/global/common/file/constants';
|
||||
import axios from 'axios';
|
||||
import { serverRequestBaseUrl } from '../../../../common/api/serverRequest';
|
||||
import { MongoRawTextBuffer } from '../../../../common/buffer/rawText/schema';
|
||||
import { readFromSecondary } from '../../../../common/mongo/utils';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
|
||||
import { detectFileEncoding, parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
import { readRawContentByFileBuffer } from '../../../../common/file/read/utils';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ChatItemType, UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { parseFileExtensionFromUrl } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
@@ -48,12 +47,41 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
|
||||
runningAppInfo: { teamId },
|
||||
histories,
|
||||
chatConfig,
|
||||
node: { version },
|
||||
params: { fileUrlList = [] }
|
||||
} = props;
|
||||
const maxFiles = chatConfig?.fileSelectConfig?.maxFiles || 20;
|
||||
|
||||
// Get files from histories
|
||||
const filesFromHistories = histories
|
||||
const filesFromHistories = version !== '489' ? [] : getHistoryFileLinks(histories);
|
||||
|
||||
const { text, readFilesResult } = await getFileContentFromLinks({
|
||||
// Concat fileUrlList and filesFromHistories; remove not supported files
|
||||
urls: [...fileUrlList, ...filesFromHistories],
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId
|
||||
});
|
||||
|
||||
return {
|
||||
[NodeOutputKeyEnum.text]: text,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
readFiles: readFilesResult.map((item) => ({
|
||||
name: item?.filename || '',
|
||||
url: item?.url || ''
|
||||
})),
|
||||
readFilesResult: readFilesResult
|
||||
.map((item) => item?.nodeResponsePreviewText ?? '')
|
||||
.join('\n******\n')
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: {
|
||||
fileContent: text
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const getHistoryFileLinks = (histories: ChatItemType[]) => {
|
||||
return histories
|
||||
.filter((item) => {
|
||||
if (item.obj === ChatRoleEnum.Human) {
|
||||
return item.value.filter((value) => value.type === 'file');
|
||||
@@ -70,26 +98,27 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
|
||||
return files;
|
||||
})
|
||||
.flat();
|
||||
};
|
||||
|
||||
// Concat fileUrlList and filesFromHistories; remove not supported files
|
||||
const parseUrlList = [...fileUrlList, ...filesFromHistories]
|
||||
export const getFileContentFromLinks = async ({
|
||||
urls,
|
||||
requestOrigin,
|
||||
maxFiles,
|
||||
teamId
|
||||
}: {
|
||||
urls: string[];
|
||||
requestOrigin?: string;
|
||||
maxFiles: number;
|
||||
teamId: string;
|
||||
}) => {
|
||||
const parseUrlList = urls
|
||||
.map((url) => {
|
||||
try {
|
||||
// Avoid "/api/xxx" file error.
|
||||
const origin = requestOrigin ?? 'http://localhost:3000';
|
||||
|
||||
// Check is system upload file
|
||||
if (url.startsWith('/') || (requestOrigin && url.startsWith(requestOrigin))) {
|
||||
// Parse url, get filename query. Keep only documents that can be parsed
|
||||
const parseUrl = new URL(url, origin);
|
||||
const filenameQuery = parseUrl.searchParams.get('filename');
|
||||
|
||||
// Not document
|
||||
if (filenameQuery) {
|
||||
const extensionQuery = filenameQuery.split('.').pop()?.toLowerCase() || '';
|
||||
if (!documentFileType.includes(extensionQuery)) {
|
||||
return '';
|
||||
}
|
||||
if (parseUrlToFileType(url)?.type !== 'file') {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Remove the origin(Make intranet requests directly)
|
||||
@@ -197,18 +226,7 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
|
||||
const text = readFilesResult.map((item) => item?.text ?? '').join('\n******\n');
|
||||
|
||||
return {
|
||||
[NodeOutputKeyEnum.text]: text,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
readFiles: readFilesResult.map((item) => ({
|
||||
name: item?.filename || '',
|
||||
url: item?.url || ''
|
||||
})),
|
||||
readFilesResult: readFilesResult
|
||||
.map((item) => item?.nodeResponsePreviewText ?? '')
|
||||
.join('\n******\n')
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: {
|
||||
fileContent: text
|
||||
}
|
||||
text,
|
||||
readFilesResult
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,14 +1,5 @@
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { countPromptTokens } from '../../common/string/tiktoken/index';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
getPluginInputsFromStoreNodes,
|
||||
getPluginRunContent
|
||||
} from '@fastgpt/global/core/app/plugin/utils';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
|
||||
import { RuntimeUserPromptType, UserChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { runtimePrompt2ChatsValue } from '@fastgpt/global/core/chat/adapt';
|
||||
|
||||
/* filter search result */
|
||||
export const filterSearchResultsByMaxChars = async (
|
||||
|
||||
Reference in New Issue
Block a user