diff --git a/packages/global/common/file/tools.ts b/packages/global/common/file/tools.ts
index cf26762e7..8cf1d3e95 100644
--- a/packages/global/common/file/tools.ts
+++ b/packages/global/common/file/tools.ts
@@ -19,6 +19,7 @@ export const detectFileEncoding = (buffer: Buffer) => {
// Url => user upload file type
export const parseUrlToFileType = (url: string): UserChatItemValueItemType['file'] | undefined => {
+ if (typeof url !== 'string') return;
const parseUrl = new URL(url, 'https://locaohost:3000');
const filename = (() => {
diff --git a/packages/global/core/ai/prompt/AIChat.ts b/packages/global/core/ai/prompt/AIChat.ts
index b0c47cfe9..36abbaeca 100644
--- a/packages/global/core/ai/prompt/AIChat.ts
+++ b/packages/global/core/ai/prompt/AIChat.ts
@@ -207,8 +207,8 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
];
// Document quote prompt
-export const Prompt_DocumentQuote = `将 中的内容作为本次对话的参考:
-
+export const Prompt_DocumentQuote = `将 中的内容作为本次对话的参考:
+
{{quote}}
-
+
`;
diff --git a/packages/global/core/workflow/runtime/type.d.ts b/packages/global/core/workflow/runtime/type.d.ts
index 29088a77e..ae0d29bda 100644
--- a/packages/global/core/workflow/runtime/type.d.ts
+++ b/packages/global/core/workflow/runtime/type.d.ts
@@ -135,6 +135,9 @@ export type DispatchNodeResponseType = {
extensionResult?: string;
extensionTokens?: number;
+ // dataset concat
+ concatLength?: number;
+
// cq
cqList?: ClassifyQuestionAgentItemType[];
cqResult?: string;
diff --git a/packages/global/core/workflow/runtime/utils.ts b/packages/global/core/workflow/runtime/utils.ts
index 43a43ce1c..8ce3c8bb9 100644
--- a/packages/global/core/workflow/runtime/utils.ts
+++ b/packages/global/core/workflow/runtime/utils.ts
@@ -335,7 +335,12 @@ export function replaceEditorVariable({
for (const key in allVariables) {
const variable = allVariables[key];
const val = variable.value;
- const formatVal = typeof val === 'object' ? JSON.stringify(val) : String(val);
+ const formatVal = (() => {
+ if (val === undefined) return '';
+ if (val === null) return 'null';
+
+ return typeof val === 'object' ? JSON.stringify(val) : String(val);
+ })();
const regex = new RegExp(`\\{\\{\\$(${variable.nodeId}\\.${variable.id})\\$\\}\\}`, 'g');
text = text.replace(regex, formatVal);
diff --git a/packages/global/core/workflow/template/system/aiChat/index.ts b/packages/global/core/workflow/template/system/aiChat/index.ts
index 249210dc6..3ac36e6bb 100644
--- a/packages/global/core/workflow/template/system/aiChat/index.ts
+++ b/packages/global/core/workflow/template/system/aiChat/index.ts
@@ -90,7 +90,7 @@ export const AiChatModule: FlowNodeTemplateType = {
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
valueType: WorkflowIOValueTypeEnum.boolean,
- value: false
+ value: true
},
// settings modal ---
{
diff --git a/packages/global/core/workflow/template/system/datasetConcat.ts b/packages/global/core/workflow/template/system/datasetConcat.ts
index 185293b81..904033a09 100644
--- a/packages/global/core/workflow/template/system/datasetConcat.ts
+++ b/packages/global/core/workflow/template/system/datasetConcat.ts
@@ -25,7 +25,7 @@ export const getOneQuoteInputTemplate = ({
}): FlowNodeInputItemType => ({
key,
renderTypeList: [FlowNodeInputTypeEnum.reference],
- label: `${i18nT('workflow:quote_num')}`,
+ label: `${i18nT('workflow:quote_num')}-${index}`,
debugLabel: i18nT('workflow:knowledge_base_reference'),
canEdit: true,
valueType: WorkflowIOValueTypeEnum.datasetQuote
diff --git a/packages/global/core/workflow/template/system/textEditor.ts b/packages/global/core/workflow/template/system/textEditor.ts
index c7f97d2e9..bd1c84afc 100644
--- a/packages/global/core/workflow/template/system/textEditor.ts
+++ b/packages/global/core/workflow/template/system/textEditor.ts
@@ -24,17 +24,8 @@ export const TextEditorNode: FlowNodeTemplateType = {
name: i18nT('workflow:text_concatenation'),
intro: i18nT('workflow:intro_text_concatenation'),
courseUrl: '/docs/workflow/modules/text_editor/',
- version: '486',
+ version: '4813',
inputs: [
- {
- ...Input_Template_DynamicInput,
- description: i18nT('workflow:dynamic_input_description_concat'),
- customInputConfig: {
- selectValueTypeList: Object.values(WorkflowIOValueTypeEnum),
- showDescription: false,
- showDefaultValue: false
- }
- },
{
key: NodeInputKeyEnum.textareaInput,
renderTypeList: [FlowNodeInputTypeEnum.textarea],
diff --git a/packages/global/core/workflow/template/system/tools.ts b/packages/global/core/workflow/template/system/tools.ts
index 7526214f8..a5ed862ce 100644
--- a/packages/global/core/workflow/template/system/tools.ts
+++ b/packages/global/core/workflow/template/system/tools.ts
@@ -58,7 +58,7 @@ export const ToolModule: FlowNodeTemplateType = {
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
valueType: WorkflowIOValueTypeEnum.boolean,
- value: false
+ value: true
},
{
diff --git a/packages/service/core/chat/utils.ts b/packages/service/core/chat/utils.ts
index d6097d55d..c68d9227b 100644
--- a/packages/service/core/chat/utils.ts
+++ b/packages/service/core/chat/utils.ts
@@ -118,34 +118,47 @@ export const loadRequestMessages = async ({
return item.image_url.url;
})();
- // If imgUrl is a local path, load image from local, and set url to base64
- if (imgUrl.startsWith('/')) {
- addLog.debug('Load image from local server', {
- baseUrl: serverRequestBaseUrl,
- requestUrl: imgUrl
- });
- const response = await axios.get(imgUrl, {
- baseURL: serverRequestBaseUrl,
- responseType: 'arraybuffer',
- proxy: false
- });
- const base64 = Buffer.from(response.data, 'binary').toString('base64');
- const imageType =
- getFileContentTypeFromHeader(response.headers['content-type']) ||
- guessBase64ImageType(base64);
+ try {
+ // If imgUrl is a local path, load image from local, and set url to base64
+ if (imgUrl.startsWith('/')) {
+ addLog.debug('Load image from local server', {
+ baseUrl: serverRequestBaseUrl,
+ requestUrl: imgUrl
+ });
+ const response = await axios.get(imgUrl, {
+ baseURL: serverRequestBaseUrl,
+ responseType: 'arraybuffer',
+ proxy: false
+ });
+ const base64 = Buffer.from(response.data, 'binary').toString('base64');
+ const imageType =
+ getFileContentTypeFromHeader(response.headers['content-type']) ||
+ guessBase64ImageType(base64);
- return {
- ...item,
- image_url: {
- ...item.image_url,
- url: `data:${imageType};base64,${base64}`
- }
- };
+ return {
+ ...item,
+ image_url: {
+ ...item.image_url,
+ url: `data:${imageType};base64,${base64}`
+ }
+ };
+ }
+
+ // 检查下这个图片是否可以被访问,如果不行的话,则过滤掉
+ const response = await axios.head(imgUrl, {
+ timeout: 10000
+ });
+ if (response.status < 200 || response.status >= 400) {
+ addLog.info(`Filter invalid image: ${imgUrl}`);
+ return;
+ }
+ } catch (error) {
+ return;
}
}
return item;
})
- );
+ ).then((res) => res.filter(Boolean) as ChatCompletionContentPart[]);
};
// Split question text and image
const parseStringWithImages = (input: string): ChatCompletionContentPart[] => {
diff --git a/packages/service/core/workflow/dispatch/agent/runTool/functionCall.ts b/packages/service/core/workflow/dispatch/agent/runTool/functionCall.ts
index 9ebcb8ddb..30d33e52b 100644
--- a/packages/service/core/workflow/dispatch/agent/runTool/functionCall.ts
+++ b/packages/service/core/workflow/dispatch/agent/runTool/functionCall.ts
@@ -28,6 +28,7 @@ import { computedMaxToken, llmCompletionsBodyFormat } from '../../../../ai/utils
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
+import { i18nT } from '../../../../../../web/i18n/utils';
type FunctionRunResponseType = {
toolRunResponse: DispatchFlowResponse;
@@ -549,7 +550,7 @@ async function streamResponse({
}
if (!textAnswer && functionCalls.length === 0) {
- return Promise.reject('LLM api response empty');
+ return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer, functionCalls };
diff --git a/packages/service/core/workflow/dispatch/agent/runTool/index.ts b/packages/service/core/workflow/dispatch/agent/runTool/index.ts
index 7463ba583..ef6e7aca2 100644
--- a/packages/service/core/workflow/dispatch/agent/runTool/index.ts
+++ b/packages/service/core/workflow/dispatch/agent/runTool/index.ts
@@ -96,7 +96,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
const globalFiles = chatValue2RuntimePrompt(query).files;
const { documentQuoteText, userFiles } = await getMultiInput({
- histories,
+ histories: chatHistories,
requestOrigin,
maxFiles: chatConfig?.fileSelectConfig?.maxFiles || 20,
teamId,
diff --git a/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts b/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts
index 9c176ecc9..fa580c95d 100644
--- a/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts
+++ b/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts
@@ -29,6 +29,7 @@ import { WorkflowResponseType } from '../../type';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
+import { i18nT } from '../../../../../../web/i18n/utils';
type FunctionCallCompletion = {
id: string;
@@ -537,7 +538,7 @@ async function streamResponse({
}
if (!textAnswer) {
- return Promise.reject('LLM api response empty');
+ return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer.trim() };
}
diff --git a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts
index b730ae66a..10fe57150 100644
--- a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts
+++ b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts
@@ -28,6 +28,7 @@ import { addLog } from '../../../../../common/system/log';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
+import { i18nT } from '../../../../../../web/i18n/utils';
type ToolRunResponseType = {
toolRunResponse: DispatchFlowResponse;
@@ -656,7 +657,7 @@ async function streamResponse({
}
if (!textAnswer && toolCalls.length === 0) {
- return Promise.reject('LLM api response empty');
+ return Promise.reject(i18nT('chat:LLM_model_response_empty'));
}
return { answer: textAnswer, toolCalls };
diff --git a/packages/service/core/workflow/dispatch/chat/oneapi.ts b/packages/service/core/workflow/dispatch/chat/oneapi.ts
index 3605f1a29..14d47ecef 100644
--- a/packages/service/core/workflow/dispatch/chat/oneapi.ts
+++ b/packages/service/core/workflow/dispatch/chat/oneapi.ts
@@ -5,11 +5,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { getAIApi } from '../../../ai/config';
-import type {
- ChatCompletion,
- ChatCompletionMessageParam,
- StreamChatType
-} from '@fastgpt/global/core/ai/type.d';
+import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '../../../../common/api/requestPlusApi';
@@ -48,6 +44,7 @@ import { AiChatQuoteRoleType } from '@fastgpt/global/core/workflow/template/syst
import { getErrText } from '@fastgpt/global/common/error/utils';
import { getFileContentFromLinks, getHistoryFileLinks } from '../tools/readFiles';
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
+import { i18nT } from '../../../../../web/i18n/utils';
export type ChatProps = ModuleDispatchProps<
AIChatNodeProps & {
@@ -93,9 +90,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise;
-type DatasetConcatResponse = {
+type DatasetConcatResponse = DispatchNodeResultType<{
[NodeOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
-};
+}>;
export async function dispatchDatasetConcat(
props: DatasetConcatProps
@@ -30,6 +34,12 @@ export async function dispatchDatasetConcat(
);
return {
- [NodeOutputKeyEnum.datasetQuoteQA]: await filterSearchResultsByMaxChars(rrfConcatResults, limit)
+ [NodeOutputKeyEnum.datasetQuoteQA]: await filterSearchResultsByMaxChars(
+ rrfConcatResults,
+ limit
+ ),
+ [DispatchNodeResponseKeyEnum.nodeResponse]: {
+ concatLength: rrfConcatResults.length
+ }
};
}
diff --git a/packages/service/core/workflow/dispatch/dataset/search.ts b/packages/service/core/workflow/dispatch/dataset/search.ts
index c355f4c9f..dce339587 100644
--- a/packages/service/core/workflow/dispatch/dataset/search.ts
+++ b/packages/service/core/workflow/dispatch/dataset/search.ts
@@ -16,6 +16,7 @@ import { datasetSearchQueryExtension } from '../../../dataset/search/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '../../../../support/permission/teamLimit';
import { MongoDataset } from '../../../dataset/schema';
+import { i18nT } from '../../../../../web/i18n/utils';
type DatasetSearchProps = ModuleDispatchProps<{
[NodeInputKeyEnum.datasetSelectList]: SelectedDatasetType;
@@ -56,15 +57,15 @@ export async function dispatchDatasetSearch(
} = props as DatasetSearchProps;
if (!Array.isArray(datasets)) {
- return Promise.reject('Quote type error');
+ return Promise.reject(i18nT('chat:dataset_quote_type error'));
}
if (datasets.length === 0) {
- return Promise.reject('core.chat.error.Select dataset empty');
+ return Promise.reject(i18nT('common:core.chat.error.Select dataset empty'));
}
if (!userChatInput) {
- return Promise.reject('core.chat.error.User input empty');
+ return Promise.reject(i18nT('common:core.chat.error.User input empty'));
}
// query extension
diff --git a/packages/service/core/workflow/dispatch/tools/readFiles.ts b/packages/service/core/workflow/dispatch/tools/readFiles.ts
index 777d267ce..702dfda93 100644
--- a/packages/service/core/workflow/dispatch/tools/readFiles.ts
+++ b/packages/service/core/workflow/dispatch/tools/readFiles.ts
@@ -112,15 +112,24 @@ export const getFileContentFromLinks = async ({
teamId: string;
}) => {
const parseUrlList = urls
+ // Remove invalid urls
+ .filter((url) => {
+ if (typeof url !== 'string') return false;
+
+ // 检查相对路径
+ const validPrefixList = ['/', 'http', 'ws'];
+ if (validPrefixList.some((prefix) => url.startsWith(prefix))) {
+ return true;
+ }
+
+ return false;
+ })
+ // Just get the document type file
+ .filter((url) => parseUrlToFileType(url)?.type === 'file')
.map((url) => {
try {
// Check is system upload file
if (url.startsWith('/') || (requestOrigin && url.startsWith(requestOrigin))) {
- // Parse url, get filename query. Keep only documents that can be parsed
- if (parseUrlToFileType(url)?.type !== 'file') {
- return '';
- }
-
// Remove the origin(Make intranet requests directly)
if (requestOrigin && url.startsWith(requestOrigin)) {
url = url.replace(requestOrigin, '');
@@ -152,7 +161,7 @@ export const getFileContentFromLinks = async ({
}
try {
- // Get file buffer
+ // Get file buffer data
const response = await axios.get(url, {
baseURL: serverRequestBaseUrl,
responseType: 'arraybuffer'
diff --git a/packages/web/components/common/MySelect/MultipleRowSelect.tsx b/packages/web/components/common/MySelect/MultipleRowSelect.tsx
index a2a6ae97c..6ab271058 100644
--- a/packages/web/components/common/MySelect/MultipleRowSelect.tsx
+++ b/packages/web/components/common/MySelect/MultipleRowSelect.tsx
@@ -1,4 +1,4 @@
-import React, { useRef, useCallback, useState } from 'react';
+import React, { useRef, useCallback, useState, useMemo } from 'react';
import { Button, useDisclosure, Box, Flex, useOutsideClick, Checkbox } from '@chakra-ui/react';
import { ListItemType, MultipleArraySelectProps, MultipleSelectProps } from './type';
import EmptyTip from '../EmptyTip';
@@ -177,6 +177,10 @@ export const MultipleRowArraySelect = ({
const [navigationPath, setNavigationPath] = useState([]);
+ const formatValue = useMemo(() => {
+ return Array.isArray(value) ? value : [];
+ }, [value]);
+
// Close when clicking outside
useOutsideClick({
ref: ref,
@@ -198,7 +202,7 @@ export const MultipleRowArraySelect = ({
setNavigationPath(newPath);
} else {
const parentValue = navigationPath[0];
- const newValues = [...value];
+ const newValues = [...formatValue];
const newValue = [parentValue, item.value];
if (newValues.some((v) => v[0] === parentValue && v[1] === item.value)) {
@@ -225,7 +229,7 @@ export const MultipleRowArraySelect = ({
const showCheckbox = !hasChildren;
const isChecked =
showCheckbox &&
- value.some((v) => v[1] === item.value && v[0] === navigationPath[0]);
+ formatValue.some((v) => v[1] === item.value && v[0] === navigationPath[0]);
return (
);
},
- [navigationPath, value, onSelect]
+ [navigationPath, formatValue, onSelect]
);
const onOpenSelect = useCallback(() => {
setNavigationPath([]);
onOpen();
- }, [value, onOpen]);
+ }, []);
return (
diff --git a/packages/web/i18n/en/chat.json b/packages/web/i18n/en/chat.json
index 7a419671d..27d940150 100644
--- a/packages/web/i18n/en/chat.json
+++ b/packages/web/i18n/en/chat.json
@@ -1,5 +1,7 @@
{
+ "AI_input_is_empty": "The content passed to the AI node is empty",
"Delete_all": "Clear All Lexicon",
+ "LLM_model_response_empty": "The model flow response is empty, please check whether the model flow output is normal.",
"chat_history": "Conversation History",
"chat_input_guide_lexicon_is_empty": "Lexicon not configured yet",
"citations": "{{num}} References",
@@ -12,6 +14,7 @@
"contextual_preview": "Contextual Preview {{num}} Items",
"csv_input_lexicon_tip": "Only CSV batch import is supported, click to download the template",
"custom_input_guide_url": "Custom Lexicon URL",
+ "dataset_quote_type error": "Knowledge base reference type is wrong, correct type: { datasetId: string }[]",
"delete_all_input_guide_confirm": "Are you sure you want to clear the input guide lexicon?",
"empty_directory": "This directory is empty~",
"file_amount_over": "Exceeded maximum file quantity {{max}}",
@@ -32,12 +35,14 @@
"not_select_file": "No file selected",
"plugins_output": "Plugin Output",
"question_tip": "From top to bottom, the response order of each module",
+ "response.dataset_concat_length": "Combined total",
"response.node_inputs": "Node Inputs",
"select": "Select",
"select_file": "Upload File",
"select_file_img": "Upload file / image",
"select_img": "Upload Image",
"stream_output": "Stream Output",
+ "unsupported_file_type": "Unsupported file types",
"upload": "Upload",
"view_citations": "View References",
"web_site_sync": "Web Site Sync"
diff --git a/packages/web/i18n/en/workflow.json b/packages/web/i18n/en/workflow.json
index 9112f3a26..2f250675f 100644
--- a/packages/web/i18n/en/workflow.json
+++ b/packages/web/i18n/en/workflow.json
@@ -131,7 +131,7 @@
"question_optimization": "Question Optimization",
"quote_content_placeholder": "The structure of the reference content can be customized to better suit different scenarios. \nSome variables can be used for template configuration\n\n{{q}} - main content\n\n{{a}} - auxiliary data\n\n{{source}} - source name\n\n{{sourceId}} - source ID\n\n{{index}} - nth reference",
"quote_content_tip": "The structure of the reference content can be customized to better suit different scenarios. Some variables can be used for template configuration:\n\n{{q}} - main content\n{{a}} - auxiliary data\n{{source}} - source name\n{{sourceId}} - source ID\n{{index}} - nth reference\nThey are all optional and the following are the default values:\n\n{{default}}",
- "quote_num": "Quote {{num}}",
+ "quote_num": "Dataset",
"quote_prompt_tip": "You can use {{quote}} to insert a quote content template and {{question}} to insert a question (Role=user).\n\nThe following are the default values:\n\n{{default}}",
"quote_role_system_tip": "Please note that the {{question}} variable is removed from the \"Quote Template Prompt Words\"",
"quote_role_user_tip": "Please pay attention to adding the {{question}} variable in the \"Quote Template Prompt Word\"",
diff --git a/packages/web/i18n/zh/chat.json b/packages/web/i18n/zh/chat.json
index 7dea64c4e..28ebfb909 100644
--- a/packages/web/i18n/zh/chat.json
+++ b/packages/web/i18n/zh/chat.json
@@ -1,5 +1,7 @@
{
+ "AI_input_is_empty": "传入AI 节点的内容为空",
"Delete_all": "清空词库",
+ "LLM_model_response_empty": "模型流响应为空,请检查模型流输出是否正常",
"chat_history": "聊天记录",
"chat_input_guide_lexicon_is_empty": "还没有配置词库",
"citations": "{{num}}条引用",
@@ -12,6 +14,7 @@
"contextual_preview": "上下文预览 {{num}} 条",
"csv_input_lexicon_tip": "仅支持 CSV 批量导入,点击下载模板",
"custom_input_guide_url": "自定义词库地址",
+ "dataset_quote_type error": "知识库引用类型错误,正确类型:{ datasetId: string }[]",
"delete_all_input_guide_confirm": "确定要清空输入引导词库吗?",
"empty_directory": "这个目录已经没东西可选了~",
"file_amount_over": "超出最大文件数量 {{max}}",
@@ -33,12 +36,14 @@
"plugins_output": "插件输出",
"question_tip": "从上到下,为各个模块的响应顺序",
"response.child total points": "子工作流积分消耗",
+ "response.dataset_concat_length": "合并后总数",
"response.node_inputs": "节点输入",
"select": "选择",
"select_file": "上传文件",
"select_file_img": "上传文件/图片",
"select_img": "上传图片",
"stream_output": "流输出",
+ "unsupported_file_type": "不支持的文件类型",
"upload": "上传",
"view_citations": "查看引用",
"web_site_sync": "Web站点同步"
diff --git a/packages/web/i18n/zh/workflow.json b/packages/web/i18n/zh/workflow.json
index 1c54c3d8e..74310d681 100644
--- a/packages/web/i18n/zh/workflow.json
+++ b/packages/web/i18n/zh/workflow.json
@@ -132,7 +132,7 @@
"question_optimization": "问题优化",
"quote_content_placeholder": "可以自定义引用内容的结构,以更好的适配不同场景。可以使用一些变量来进行模板配置\n{{q}} - 主要内容\n{{a}} - 辅助数据\n{{source}} - 来源名\n{{sourceId}} - 来源ID\n{{index}} - 第 n 个引用",
"quote_content_tip": "可以自定义引用内容的结构,以更好的适配不同场景。可以使用一些变量来进行模板配置\n{{q}} - 主要内容\n{{a}} - 辅助数据\n{{source}} - 来源名\n{{sourceId}} - 来源ID\n{{index}} - 第 n 个引用\n他们都是可选的,下面是默认值:\n{{default}}",
- "quote_num": "引用{{num}}",
+ "quote_num": "引用",
"quote_prompt_tip": "可以用 {{quote}} 来插入引用内容模板,使用 {{question}} 来插入问题(Role=user)。\n下面是默认值:\n{{default}}",
"quote_role_system_tip": "请注意从“引用模板提示词”中移除 {{question}} 变量",
"quote_role_user_tip": "请注意在“引用模板提示词”中添加 {{question}} 变量",
diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx
index bbc295dc5..35154bcf4 100644
--- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx
+++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx
@@ -17,6 +17,7 @@ import { documentFileType } from '@fastgpt/global/common/file/constants';
import FilePreview from '../../components/FilePreview';
import { useFileUpload } from '../hooks/useFileUpload';
import ComplianceTip from '@/components/common/ComplianceTip/index';
+import { useToast } from '@fastgpt/web/hooks/useToast';
const InputGuideBox = dynamic(() => import('./InputGuideBox'));
@@ -44,6 +45,7 @@ const ChatInput = ({
}) => {
const { isPc } = useSystem();
const { t } = useTranslation();
+ const { toast } = useToast();
const { setValue, watch, control } = chatForm;
const inputValue = watch('input');
@@ -285,6 +287,7 @@ const ChatInput = ({
onSelectFile({ files });
if (files.length > 0) {
+ e.preventDefault();
e.stopPropagation();
}
}
@@ -435,7 +438,36 @@ const ChatInput = ({
);
return (
-
+ e.preventDefault()}
+ onDrop={(e) => {
+ e.preventDefault();
+
+ if (!(showSelectFile || showSelectImg)) return;
+ const files = Array.from(e.dataTransfer.files);
+
+ const droppedFiles = files.filter((file) => fileTypeFilter(file));
+ if (droppedFiles.length > 0) {
+ onSelectFile({ files: droppedFiles });
+ }
+
+ const invalidFileName = files
+ .filter((file) => !fileTypeFilter(file))
+ .map((file) => file.name)
+ .join(', ');
+ if (invalidFileName) {
+ toast({
+ status: 'warning',
+ title: t('chat:unsupported_file_type'),
+ description: invalidFileName
+ });
+ }
+ }}
+ >
0 ? '0' : ['14px', '18px']}
pb={['14px', '18px']}
diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx
index f54a42531..b4472de21 100644
--- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx
+++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx
@@ -527,7 +527,7 @@ const ChatBox = (
});
if (responseData?.[responseData.length - 1]?.error) {
toast({
- title: responseData[responseData.length - 1].error?.message,
+ title: t(responseData[responseData.length - 1].error?.message),
status: 'error'
});
}
diff --git a/projects/app/src/components/core/chat/components/WholeResponseModal.tsx b/projects/app/src/components/core/chat/components/WholeResponseModal.tsx
index 6757ae20d..cd07de624 100644
--- a/projects/app/src/components/core/chat/components/WholeResponseModal.tsx
+++ b/projects/app/src/components/core/chat/components/WholeResponseModal.tsx
@@ -237,6 +237,10 @@ export const WholeResponseContent = ({
/>
)}
>
+ {/* dataset concat */}
+ <>
+
+ >
{/* classify question */}
<>
}
- my={3}
color={'primary.600'}
>
{t('common:core.module.input.add')}