V4.7-alpha (#985)
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"llmModels": [
|
||||
{
|
||||
"model": "gpt-3.5-turbo-1106",
|
||||
"model": "gpt-3.5-turbo",
|
||||
"name": "gpt-3.5-turbo",
|
||||
"maxContext": 16000,
|
||||
"maxResponse": 4000,
|
||||
@@ -17,6 +17,10 @@
|
||||
"censor": false,
|
||||
"vision": false,
|
||||
"datasetProcess": false,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"useInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
@@ -35,6 +39,10 @@
|
||||
"censor": false,
|
||||
"vision": false,
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"useInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
@@ -53,6 +61,10 @@
|
||||
"censor": false,
|
||||
"vision": false,
|
||||
"datasetProcess": false,
|
||||
"usedInClassify": true,
|
||||
"usedInExtractFields": true,
|
||||
"useInToolCall": true,
|
||||
"usedInQueryExtension": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
@@ -71,6 +83,10 @@
|
||||
"censor": false,
|
||||
"vision": true,
|
||||
"datasetProcess": false,
|
||||
"usedInClassify": false,
|
||||
"usedInExtractFields": false,
|
||||
"useInToolCall": false,
|
||||
"usedInQueryExtension": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"customCQPrompt": "",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "app",
|
||||
"version": "4.6.9",
|
||||
"version": "4.7",
|
||||
"private": false,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
### Fast GPT V4.6.9
|
||||
### FastGPT V4.7
|
||||
|
||||
1. 新增 - 知识库新增“增强处理”训练模式,可生成更多类型索引。
|
||||
2. 新增 - 完善了HTTP模块的变量提示。
|
||||
3. 新增 - HTTP模块支持OpenAI单接口导入。
|
||||
4. 新增 - 全局变量支持增加外部变量。可通过分享链接的Query或 API 的 variables 参数传入。
|
||||
5. 新增 - 内容提取模块增加默认值。
|
||||
6. 优化 - 问题补全。增加英文类型。同时可以设置为单独模块,方便复用。
|
||||
7. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro)
|
||||
8. [使用文档](https://doc.fastgpt.in/docs/intro/)
|
||||
9. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/)
|
||||
1. 新增 - 工具调用模块,可以让LLM模型根据用户意图,动态的选择其他模型或插件执行。
|
||||
2. 优化 - 高级编排性能
|
||||
3. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro)
|
||||
4. [使用文档](https://doc.fastgpt.in/docs/intro/)
|
||||
5. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/)
|
||||
12
projects/app/public/imgs/module/tool.svg
Normal file
12
projects/app/public/imgs/module/tool.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none">
|
||||
<path
|
||||
d="M7.16966 2.66659H8.98677C9.57588 2.66659 10.0534 3.14415 10.0534 3.73325V3.9589H11.3332C11.3512 3.9589 11.3691 3.9589 11.3868 3.9589V3.73325C11.3868 2.40777 10.3123 1.33325 8.98677 1.33325H7.16966C5.84418 1.33325 4.76966 2.40777 4.76966 3.73325V3.9589H6.10299V3.73325C6.10299 3.14415 6.58056 2.66659 7.16966 2.66659Z"
|
||||
fill="#6F5DD7" />
|
||||
<path
|
||||
d="M14.9573 6.83844H1.04263C1.05186 6.04556 1.09409 5.57531 1.28709 5.19652C1.5044 4.77002 1.85116 4.42327 2.27766 4.20596C2.76253 3.9589 3.39725 3.9589 4.66671 3.9589H11.3332C12.6027 3.9589 13.2374 3.9589 13.7223 4.20596C14.1488 4.42327 14.4955 4.77002 14.7129 5.19652C14.9059 5.57531 14.9481 6.04556 14.9573 6.83844Z"
|
||||
fill="#6F5DD7" />
|
||||
<path
|
||||
d="M9.75703 8.02511H14.9599V11.04C14.9599 12.3094 14.9599 12.9442 14.7129 13.429C14.4955 13.8555 14.1488 14.2023 13.7223 14.4196C13.2374 14.6666 12.6027 14.6666 11.3332 14.6666H4.6667C3.39725 14.6666 2.76253 14.6666 2.27766 14.4196C1.85116 14.2023 1.5044 13.8555 1.28709 13.429C1.04004 12.9442 1.04004 12.3094 1.04004 11.04V8.02511H6.24291V10.2151C6.24291 10.436 6.422 10.6151 6.64291 10.6151H9.35703C9.57794 10.6151 9.75703 10.436 9.75703 10.2151V8.02511Z"
|
||||
fill="#6F5DD7" />
|
||||
<path d="M7.30958 8.02511H8.69036V9.54843H7.30958V8.02511Z" fill="#6F5DD7" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
@@ -89,6 +89,7 @@
|
||||
"Edit": "Edit",
|
||||
"Exit": "Exit",
|
||||
"Expired Time": "Expired",
|
||||
"Field": "Field",
|
||||
"File": "File",
|
||||
"Filed is repeat": "Filed is repeated",
|
||||
"Filed is repeated": "",
|
||||
@@ -409,7 +410,7 @@
|
||||
"Speaking": "I'm listening...",
|
||||
"Start Chat": "Start Chat",
|
||||
"Stop Speak": "Stop Speak",
|
||||
"Type a message": "Input problem",
|
||||
"Type a message": "Enter your question here",
|
||||
"Unpin": "Unpin",
|
||||
"You need to a chat app": "You don't have apps available",
|
||||
"error": {
|
||||
@@ -444,9 +445,11 @@
|
||||
"response": {
|
||||
"Complete Response": "Complete Response",
|
||||
"Extension model": "Extension model",
|
||||
"Plugin Resonse Detail": "Plugin Detail",
|
||||
"Plugin response detail": "Plugin Detail",
|
||||
"Read complete response": "Read Detail",
|
||||
"Read complete response tips": "Click to see the detailed process",
|
||||
"Tool call response detail": "Tool call detail",
|
||||
"Tool call tokens": "Tool call tokens",
|
||||
"context total length": "Context Length",
|
||||
"module cq": "Question classification list",
|
||||
"module cq result": "Classification Result",
|
||||
@@ -776,6 +779,7 @@
|
||||
},
|
||||
"Default value": "Default ",
|
||||
"Default value placeholder": "Null characters are returned by default",
|
||||
"Edit intro": "Edit",
|
||||
"Field Description": "Description",
|
||||
"Field Name": "Name",
|
||||
"Field Type": "Type",
|
||||
@@ -933,6 +937,9 @@
|
||||
"textEditor": {
|
||||
"Text Edit": "Text Edit"
|
||||
},
|
||||
"tool": {
|
||||
"Tool input": "Tool input"
|
||||
},
|
||||
"valueType": {
|
||||
"any": "Any",
|
||||
"boolean": "Boolean",
|
||||
@@ -942,7 +949,8 @@
|
||||
"number": "Number",
|
||||
"selectApp": "Select App",
|
||||
"selectDataset": "Select Dataset",
|
||||
"string": "String"
|
||||
"string": "String",
|
||||
"tools": "tools"
|
||||
},
|
||||
"variable": {
|
||||
"External type": "External",
|
||||
|
||||
@@ -89,6 +89,7 @@
|
||||
"Edit": "编辑",
|
||||
"Exit": "退出",
|
||||
"Expired Time": "过期时间",
|
||||
"Field": "字段",
|
||||
"File": "文件",
|
||||
"Filed is repeat": "",
|
||||
"Filed is repeated": "字段重复了",
|
||||
@@ -444,9 +445,11 @@
|
||||
"response": {
|
||||
"Complete Response": "完整响应",
|
||||
"Extension model": "问题优化模型",
|
||||
"Plugin Resonse Detail": "插件详情",
|
||||
"Plugin response detail": "插件详情",
|
||||
"Read complete response": "查看详情",
|
||||
"Read complete response tips": "点击查看详细流程",
|
||||
"Tool call response detail": "工具运行详情",
|
||||
"Tool call tokens": "工具调用Tokens消耗",
|
||||
"context total length": "上下文总长度",
|
||||
"module cq": "问题分类列表",
|
||||
"module cq result": "分类结果",
|
||||
@@ -778,10 +781,11 @@
|
||||
},
|
||||
"Default value": "默认值",
|
||||
"Default value placeholder": "不填则默认返回空字符",
|
||||
"Edit intro": "编辑描述",
|
||||
"Field Description": "字段描述",
|
||||
"Field Name": "字段名",
|
||||
"Field Type": "字段类型",
|
||||
"Field key": "字段 Key",
|
||||
"Field key": "字段Key",
|
||||
"Http request props": "请求参数",
|
||||
"Http request settings": "请求配置",
|
||||
"Input Type": "输入类型",
|
||||
@@ -935,6 +939,9 @@
|
||||
"textEditor": {
|
||||
"Text Edit": "文本加工"
|
||||
},
|
||||
"tool": {
|
||||
"Tool input": "工具输入"
|
||||
},
|
||||
"valueType": {
|
||||
"any": "任意",
|
||||
"boolean": "布尔",
|
||||
@@ -944,7 +951,8 @@
|
||||
"number": "数字",
|
||||
"selectApp": "应用选择",
|
||||
"selectDataset": "知识库选择",
|
||||
"string": "字符串"
|
||||
"string": "字符串",
|
||||
"tools": "工具调用"
|
||||
},
|
||||
"variable": {
|
||||
"External type": "外部传入",
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import React from 'react';
|
||||
import { ModalBody, Box, useTheme, Flex, Image } from '@chakra-ui/react';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ModalBody, Box, useTheme } from '@chakra-ui/react';
|
||||
import MyModal from '../MyModal';
|
||||
import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d';
|
||||
|
||||
const ContextModal = ({
|
||||
context = [],
|
||||
onClose
|
||||
}: {
|
||||
context: ChatItemType[];
|
||||
context: DispatchNodeResponseType['historyPreview'];
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
@@ -17,7 +17,7 @@ const ContextModal = ({
|
||||
isOpen={true}
|
||||
onClose={onClose}
|
||||
iconSrc="/imgs/modal/chatHistory.svg"
|
||||
title={`完整对话记录(${context.length}条)`}
|
||||
title={`上下文预览(${context.length}条)`}
|
||||
h={['90vh', '80vh']}
|
||||
minW={['90vw', '600px']}
|
||||
isCentered
|
||||
|
||||
@@ -1,36 +1,24 @@
|
||||
import { useSpeech } from '@/web/common/hooks/useSpeech';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
|
||||
import React, { useRef, useEffect, useCallback, useState, useTransition } from 'react';
|
||||
import React, { useRef, useEffect, useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyTooltip from '../MyTooltip';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useRouter } from 'next/router';
|
||||
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
|
||||
import { compressImgFileAndUpload } from '@/web/common/file/controller';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { addDays } from 'date-fns';
|
||||
import { useRequest } from '@/web/common/hooks/useRequest';
|
||||
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from './type';
|
||||
import { textareaMinH } from './constants';
|
||||
import { UseFormReturn, useFieldArray } from 'react-hook-form';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
|
||||
|
||||
enum FileTypeEnum {
|
||||
image = 'image',
|
||||
file = 'file'
|
||||
}
|
||||
type FileItemType = {
|
||||
id: string;
|
||||
rawFile: File;
|
||||
type: `${FileTypeEnum}`;
|
||||
name: string;
|
||||
icon: string; // img is base64
|
||||
src?: string;
|
||||
};
|
||||
|
||||
const MessageInput = ({
|
||||
onChange,
|
||||
onSendMessage,
|
||||
onStop,
|
||||
isChatting,
|
||||
@@ -40,17 +28,29 @@ const MessageInput = ({
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
teamToken,
|
||||
chatForm
|
||||
}: OutLinkChatAuthProps & {
|
||||
onChange?: (e: string) => void;
|
||||
onSendMessage: (e: string) => void;
|
||||
onSendMessage: (val: ChatBoxInputType) => void;
|
||||
onStop: () => void;
|
||||
isChatting: boolean;
|
||||
showFileSelector?: boolean;
|
||||
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
|
||||
resetInputVal: (val: string) => void;
|
||||
resetInputVal: (val: ChatBoxInputType) => void;
|
||||
chatForm: UseFormReturn<ChatBoxInputFormType>;
|
||||
}) => {
|
||||
const [, startSts] = useTransition();
|
||||
const { setValue, watch, control } = chatForm;
|
||||
const inputValue = watch('input');
|
||||
const {
|
||||
update: updateFile,
|
||||
remove: removeFile,
|
||||
fields: fileList,
|
||||
append: appendFile,
|
||||
replace: replaceFile
|
||||
} = useFieldArray({
|
||||
control,
|
||||
name: 'files'
|
||||
});
|
||||
|
||||
const {
|
||||
isSpeaking,
|
||||
@@ -64,45 +64,38 @@ const MessageInput = ({
|
||||
const { isPc } = useSystemStore();
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const { t } = useTranslation();
|
||||
const textareaMinH = '22px';
|
||||
const [fileList, setFileList] = useState<FileItemType[]>([]);
|
||||
const havInput = !!TextareaDom.current?.value || fileList.length > 0;
|
||||
|
||||
const havInput = !!inputValue || fileList.length > 0;
|
||||
|
||||
/* file selector and upload */
|
||||
const { File, onOpen: onOpenSelectFile } = useSelectFile({
|
||||
fileType: 'image/*',
|
||||
multiple: true,
|
||||
maxCount: 10
|
||||
});
|
||||
|
||||
const { mutate: uploadFile } = useRequest({
|
||||
mutationFn: async (file: FileItemType) => {
|
||||
if (file.type === FileTypeEnum.image) {
|
||||
mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => {
|
||||
if (file.type === ChatFileTypeEnum.image && file.rawFile) {
|
||||
try {
|
||||
const src = await compressImgFileAndUpload({
|
||||
const url = await compressImgFileAndUpload({
|
||||
type: MongoImageTypeEnum.chatImage,
|
||||
file: file.rawFile,
|
||||
maxW: 4329,
|
||||
maxH: 4329,
|
||||
maxSize: 1024 * 1024 * 5,
|
||||
// 30 day expired.
|
||||
// 7 day expired.
|
||||
expiredTime: addDays(new Date(), 7),
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
});
|
||||
setFileList((state) =>
|
||||
state.map((item) =>
|
||||
item.id === file.id
|
||||
? {
|
||||
...item,
|
||||
src: `${location.origin}${src}`
|
||||
}
|
||||
: item
|
||||
)
|
||||
);
|
||||
updateFile(fileIndex, {
|
||||
...file,
|
||||
url: `${location.origin}${url}`
|
||||
});
|
||||
} catch (error) {
|
||||
setFileList((state) => state.filter((item) => item.id !== file.id));
|
||||
removeFile(fileIndex);
|
||||
console.log(error);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
@@ -110,7 +103,6 @@ const MessageInput = ({
|
||||
},
|
||||
errorToast: t('common.Upload File Failed')
|
||||
});
|
||||
|
||||
const onSelectFile = useCallback(
|
||||
async (files: File[]) => {
|
||||
if (!files || files.length === 0) {
|
||||
@@ -119,7 +111,7 @@ const MessageInput = ({
|
||||
const loadFiles = await Promise.all(
|
||||
files.map(
|
||||
(file) =>
|
||||
new Promise<FileItemType>((resolve, reject) => {
|
||||
new Promise<UserInputFileItemType>((resolve, reject) => {
|
||||
if (file.type.includes('image')) {
|
||||
const reader = new FileReader();
|
||||
reader.readAsDataURL(file);
|
||||
@@ -127,11 +119,10 @@ const MessageInput = ({
|
||||
const item = {
|
||||
id: nanoid(),
|
||||
rawFile: file,
|
||||
type: FileTypeEnum.image,
|
||||
type: ChatFileTypeEnum.image,
|
||||
name: file.name,
|
||||
icon: reader.result as string
|
||||
};
|
||||
uploadFile(item);
|
||||
resolve(item);
|
||||
};
|
||||
reader.onerror = () => {
|
||||
@@ -141,7 +132,7 @@ const MessageInput = ({
|
||||
resolve({
|
||||
id: nanoid(),
|
||||
rawFile: file,
|
||||
type: FileTypeEnum.file,
|
||||
type: ChatFileTypeEnum.file,
|
||||
name: file.name,
|
||||
icon: 'file/pdf'
|
||||
});
|
||||
@@ -149,29 +140,28 @@ const MessageInput = ({
|
||||
})
|
||||
)
|
||||
);
|
||||
appendFile(loadFiles);
|
||||
|
||||
setFileList((state) => [...state, ...loadFiles]);
|
||||
loadFiles.forEach((file, i) =>
|
||||
uploadFile({
|
||||
file,
|
||||
fileIndex: i + fileList.length
|
||||
})
|
||||
);
|
||||
},
|
||||
[uploadFile]
|
||||
[appendFile, fileList.length, uploadFile]
|
||||
);
|
||||
|
||||
/* on send */
|
||||
const handleSend = useCallback(async () => {
|
||||
const textareaValue = TextareaDom.current?.value || '';
|
||||
|
||||
const images = fileList.filter((item) => item.type === FileTypeEnum.image);
|
||||
const imagesText =
|
||||
images.length === 0
|
||||
? ''
|
||||
: `\`\`\`${IMG_BLOCK_KEY}
|
||||
${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
\`\`\`
|
||||
`;
|
||||
|
||||
const inputMessage = `${imagesText}${textareaValue}`;
|
||||
|
||||
onSendMessage(inputMessage);
|
||||
setFileList([]);
|
||||
}, [TextareaDom, fileList, onSendMessage]);
|
||||
onSendMessage({
|
||||
text: textareaValue.trim(),
|
||||
files: fileList
|
||||
});
|
||||
replaceFile([]);
|
||||
}, [TextareaDom, fileList, onSendMessage, replaceFile]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!stream) {
|
||||
@@ -231,7 +221,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
|
||||
{/* file preview */}
|
||||
<Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
|
||||
{fileList.map((item) => (
|
||||
{fileList.map((item, index) => (
|
||||
<Box
|
||||
key={item.id}
|
||||
border={'1px solid rgba(0,0,0,0.12)'}
|
||||
@@ -240,11 +230,11 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
rounded={'md'}
|
||||
position={'relative'}
|
||||
_hover={{
|
||||
'.close-icon': { display: item.src ? 'block' : 'none' }
|
||||
'.close-icon': { display: item.url ? 'block' : 'none' }
|
||||
}}
|
||||
>
|
||||
{/* uploading */}
|
||||
{!item.src && (
|
||||
{!item.url && (
|
||||
<Flex
|
||||
position={'absolute'}
|
||||
alignItems={'center'}
|
||||
@@ -272,12 +262,12 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
right={'-8px'}
|
||||
top={'-8px'}
|
||||
onClick={() => {
|
||||
setFileList((state) => state.filter((file) => file.id !== item.id));
|
||||
removeFile(index);
|
||||
}}
|
||||
className="close-icon"
|
||||
display={['', 'none']}
|
||||
/>
|
||||
{item.type === FileTypeEnum.image && (
|
||||
{item.type === ChatFileTypeEnum.image && (
|
||||
<Image
|
||||
alt={'img'}
|
||||
src={item.icon}
|
||||
@@ -335,14 +325,12 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
boxShadow={'none !important'}
|
||||
color={'myGray.900'}
|
||||
isDisabled={isSpeaking}
|
||||
value={inputValue}
|
||||
onChange={(e) => {
|
||||
const textarea = e.target;
|
||||
textarea.style.height = textareaMinH;
|
||||
textarea.style.height = `${textarea.scrollHeight}px`;
|
||||
|
||||
startSts(() => {
|
||||
onChange?.(textarea.value);
|
||||
});
|
||||
setValue('input', textarea.value);
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
// enter send.(pc or iframe && enter and unPress shift)
|
||||
@@ -406,7 +394,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
if (isSpeaking) {
|
||||
return stopSpeak();
|
||||
}
|
||||
startSpeak(resetInputVal);
|
||||
startSpeak((text) => resetInputVal({ text }));
|
||||
}}
|
||||
>
|
||||
<MyTooltip label={isSpeaking ? t('core.chat.Stop Speak') : t('core.chat.Record')}>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { Flex, BoxProps, useDisclosure, useTheme, Box } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
@@ -14,15 +15,18 @@ import ChatBoxDivider from '@/components/core/chat/Divider';
|
||||
import { strIsLink } from '@fastgpt/global/common/string/tools';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
|
||||
const QuoteModal = dynamic(() => import('./QuoteModal'), { ssr: false });
|
||||
const ContextModal = dynamic(() => import('./ContextModal'), { ssr: false });
|
||||
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'), { ssr: false });
|
||||
const QuoteModal = dynamic(() => import('./QuoteModal'));
|
||||
const ContextModal = dynamic(() => import('./ContextModal'));
|
||||
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'));
|
||||
|
||||
const isLLMNode = (item: ChatHistoryItemResType) =>
|
||||
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
|
||||
|
||||
const ResponseTags = ({
|
||||
responseData = [],
|
||||
flowResponses = [],
|
||||
showDetail
|
||||
}: {
|
||||
responseData?: ChatHistoryItemResType[];
|
||||
flowResponses?: ChatHistoryItemResType[];
|
||||
showDetail: boolean;
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
@@ -36,7 +40,8 @@ const ResponseTags = ({
|
||||
sourceName: string;
|
||||
};
|
||||
}>();
|
||||
const [contextModalData, setContextModalData] = useState<ChatItemType[]>();
|
||||
const [contextModalData, setContextModalData] =
|
||||
useState<DispatchNodeResponseType['historyPreview']>();
|
||||
const {
|
||||
isOpen: isOpenWholeModal,
|
||||
onOpen: onOpenWholeModal,
|
||||
@@ -44,18 +49,29 @@ const ResponseTags = ({
|
||||
} = useDisclosure();
|
||||
|
||||
const {
|
||||
chatAccount,
|
||||
llmModuleAccount,
|
||||
quoteList = [],
|
||||
sourceList = [],
|
||||
historyPreview = [],
|
||||
runningTime = 0
|
||||
} = useMemo(() => {
|
||||
const chatData = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.chatNode);
|
||||
const quoteList = responseData
|
||||
.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode)
|
||||
const flatResponse = flowResponses
|
||||
.map((item) => {
|
||||
if (item.pluginDetail || item.toolDetail) {
|
||||
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
|
||||
}
|
||||
return item;
|
||||
})
|
||||
.flat();
|
||||
|
||||
const chatData = flatResponse.find(isLLMNode);
|
||||
|
||||
const quoteList = flatResponse
|
||||
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
|
||||
.map((item) => item.quoteList)
|
||||
.flat()
|
||||
.filter(Boolean) as SearchDataResponseItemType[];
|
||||
|
||||
const sourceList = quoteList.reduce(
|
||||
(acc: Record<string, SearchDataResponseItemType[]>, cur) => {
|
||||
if (!acc[cur.collectionId]) {
|
||||
@@ -67,8 +83,7 @@ const ResponseTags = ({
|
||||
);
|
||||
|
||||
return {
|
||||
chatAccount: responseData.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode)
|
||||
.length,
|
||||
llmModuleAccount: flatResponse.filter(isLLMNode).length,
|
||||
quoteList,
|
||||
sourceList: Object.values(sourceList)
|
||||
.flat()
|
||||
@@ -80,16 +95,16 @@ const ResponseTags = ({
|
||||
collectionId: item.collectionId
|
||||
})),
|
||||
historyPreview: chatData?.historyPreview,
|
||||
runningTime: +responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
|
||||
runningTime: +flowResponses.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
|
||||
};
|
||||
}, [showDetail, responseData]);
|
||||
}, [showDetail, flowResponses]);
|
||||
|
||||
const TagStyles: BoxProps = {
|
||||
mr: 2,
|
||||
bg: 'transparent'
|
||||
};
|
||||
|
||||
return responseData.length === 0 ? null : (
|
||||
return flowResponses.length === 0 ? null : (
|
||||
<>
|
||||
{sourceList.length > 0 && (
|
||||
<>
|
||||
@@ -148,10 +163,10 @@ const ResponseTags = ({
|
||||
</Tag>
|
||||
</MyTooltip>
|
||||
)}
|
||||
{chatAccount === 1 && (
|
||||
{llmModuleAccount === 1 && (
|
||||
<>
|
||||
{historyPreview.length > 0 && (
|
||||
<MyTooltip label={'点击查看完整对话记录'}>
|
||||
<MyTooltip label={'点击查看上下文预览'}>
|
||||
<Tag
|
||||
colorSchema="green"
|
||||
cursor={'pointer'}
|
||||
@@ -164,7 +179,7 @@ const ResponseTags = ({
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{chatAccount > 1 && (
|
||||
{llmModuleAccount > 1 && (
|
||||
<Tag colorSchema="blue" {...TagStyles}>
|
||||
多组 AI 对话
|
||||
</Tag>
|
||||
@@ -196,7 +211,7 @@ const ResponseTags = ({
|
||||
)}
|
||||
{isOpenWholeModal && (
|
||||
<WholeResponseModal
|
||||
response={responseData}
|
||||
response={flowResponses}
|
||||
showDetail={showDetail}
|
||||
onClose={onCloseWholeModal}
|
||||
/>
|
||||
|
||||
@@ -2,7 +2,7 @@ import React, { useMemo, useState } from 'react';
|
||||
import { Box, useTheme, Flex, Image } from '@chakra-ui/react';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { moduleTemplatesFlat } from '@/web/core/modules/template/system';
|
||||
import { moduleTemplatesFlat } from '@fastgpt/global/core/module/template/constants';
|
||||
|
||||
import Tabs from '../Tabs';
|
||||
import MyModal from '../MyModal';
|
||||
@@ -143,6 +143,11 @@ const ResponseBox = React.memo(function ResponseBox({
|
||||
/>
|
||||
<Row label={t('core.chat.response.module model')} value={activeModule?.model} />
|
||||
<Row label={t('core.chat.response.module tokens')} value={`${activeModule?.tokens}`} />
|
||||
<Row
|
||||
label={t('core.chat.response.Tool call tokens')}
|
||||
value={`${activeModule?.toolCallTokens}`}
|
||||
/>
|
||||
|
||||
<Row label={t('core.chat.response.module query')} value={activeModule?.query} />
|
||||
<Row
|
||||
label={t('core.chat.response.context total length')}
|
||||
@@ -182,12 +187,6 @@ const ResponseBox = React.memo(function ResponseBox({
|
||||
)
|
||||
}
|
||||
/>
|
||||
{activeModule.quoteList && activeModule.quoteList.length > 0 && (
|
||||
<Row
|
||||
label={t('core.chat.response.module quoteList')}
|
||||
rawDom={<QuoteList showDetail={showDetail} rawSearch={activeModule.quoteList} />}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
{/* dataset search */}
|
||||
@@ -213,6 +212,12 @@ const ResponseBox = React.memo(function ResponseBox({
|
||||
label={t('support.wallet.usage.Extension result')}
|
||||
value={`${activeModule?.extensionResult}`}
|
||||
/>
|
||||
{activeModule.quoteList && activeModule.quoteList.length > 0 && (
|
||||
<Row
|
||||
label={t('core.chat.response.module quoteList')}
|
||||
rawDom={<QuoteList showDetail={showDetail} rawSearch={activeModule.quoteList} />}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
{/* classify question */}
|
||||
@@ -276,7 +281,7 @@ const ResponseBox = React.memo(function ResponseBox({
|
||||
)}
|
||||
{activeModule?.pluginDetail && activeModule?.pluginDetail.length > 0 && (
|
||||
<Row
|
||||
label={t('core.chat.response.Plugin Resonse Detail')}
|
||||
label={t('core.chat.response.Plugin response detail')}
|
||||
rawDom={<ResponseBox response={activeModule.pluginDetail} showDetail={showDetail} />}
|
||||
/>
|
||||
)}
|
||||
@@ -284,6 +289,14 @@ const ResponseBox = React.memo(function ResponseBox({
|
||||
|
||||
{/* text output */}
|
||||
<Row label={t('core.chat.response.text output')} value={activeModule?.textOutput} />
|
||||
|
||||
{/* tool call */}
|
||||
{activeModule?.toolDetail && activeModule?.toolDetail.length > 0 && (
|
||||
<Row
|
||||
label={t('core.chat.response.Tool call response detail')}
|
||||
rawDom={<ResponseBox response={activeModule.toolDetail} showDetail={showDetail} />}
|
||||
/>
|
||||
)}
|
||||
</Box>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import Avatar from '@/components/Avatar';
|
||||
import { Box } from '@chakra-ui/react';
|
||||
import { useTheme } from '@chakra-ui/system';
|
||||
import React from 'react';
|
||||
|
||||
const ChatAvatar = ({ src, type }: { src?: string; type: 'Human' | 'AI' }) => {
|
||||
const theme = useTheme();
|
||||
return (
|
||||
<Box
|
||||
w={['28px', '34px']}
|
||||
h={['28px', '34px']}
|
||||
p={'2px'}
|
||||
borderRadius={'sm'}
|
||||
border={theme.borders.base}
|
||||
boxShadow={'0 0 5px rgba(0,0,0,0.1)'}
|
||||
bg={type === 'Human' ? 'white' : 'primary.50'}
|
||||
>
|
||||
<Avatar src={src} w={'100%'} h={'100%'} />
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(ChatAvatar);
|
||||
@@ -0,0 +1,236 @@
|
||||
import { useCopyData } from '@/web/common/hooks/useCopyData';
|
||||
import { useAudioPlay } from '@/web/common/utils/voice';
|
||||
import { Flex, FlexProps, Image, css, useTheme } from '@chakra-ui/react';
|
||||
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { AppTTSConfigType } from '@fastgpt/global/core/module/type';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { formatChatValue2InputType } from '../utils';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
|
||||
export type ChatControllerProps = {
|
||||
isChatting: boolean;
|
||||
chat: ChatSiteItemType;
|
||||
setChatHistories?: React.Dispatch<React.SetStateAction<ChatSiteItemType[]>>;
|
||||
showVoiceIcon?: boolean;
|
||||
ttsConfig?: AppTTSConfigType;
|
||||
onRetry?: () => void;
|
||||
onDelete?: () => void;
|
||||
onMark?: () => void;
|
||||
onReadUserDislike?: () => void;
|
||||
onCloseUserLike?: () => void;
|
||||
onAddUserLike?: () => void;
|
||||
onAddUserDislike?: () => void;
|
||||
};
|
||||
|
||||
const ChatController = ({
|
||||
isChatting,
|
||||
chat,
|
||||
setChatHistories,
|
||||
showVoiceIcon,
|
||||
ttsConfig,
|
||||
onReadUserDislike,
|
||||
onCloseUserLike,
|
||||
onMark,
|
||||
onRetry,
|
||||
onDelete,
|
||||
onAddUserDislike,
|
||||
onAddUserLike,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
}: OutLinkChatAuthProps & ChatControllerProps & FlexProps) => {
|
||||
const theme = useTheme();
|
||||
const { t } = useTranslation();
|
||||
const { copyData } = useCopyData();
|
||||
const { audioLoading, audioPlaying, hasAudio, playAudio, cancelAudio } = useAudioPlay({
|
||||
ttsConfig,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
});
|
||||
const controlIconStyle = {
|
||||
w: '14px',
|
||||
cursor: 'pointer',
|
||||
p: '5px',
|
||||
bg: 'white',
|
||||
borderRight: theme.borders.base
|
||||
};
|
||||
const controlContainerStyle = {
|
||||
className: 'control',
|
||||
color: 'myGray.400',
|
||||
display: 'flex'
|
||||
};
|
||||
|
||||
return (
|
||||
<Flex
|
||||
{...controlContainerStyle}
|
||||
borderRadius={'sm'}
|
||||
overflow={'hidden'}
|
||||
border={theme.borders.base}
|
||||
// 最后一个子元素,没有border
|
||||
css={css({
|
||||
'& > *:last-child, & > *:last-child svg': {
|
||||
borderRight: 'none',
|
||||
borderRadius: 'md'
|
||||
}
|
||||
})}
|
||||
>
|
||||
<MyTooltip label={t('common.Copy')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
name={'copy'}
|
||||
_hover={{ color: 'primary.600' }}
|
||||
onClick={() => copyData(formatChatValue2InputType(chat.value).text || '')}
|
||||
/>
|
||||
</MyTooltip>
|
||||
{!!onDelete && !isChatting && (
|
||||
<>
|
||||
{onRetry && (
|
||||
<MyTooltip label={t('core.chat.retry')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
name={'common/retryLight'}
|
||||
_hover={{ color: 'green.500' }}
|
||||
onClick={onRetry}
|
||||
/>
|
||||
</MyTooltip>
|
||||
)}
|
||||
<MyTooltip label={t('common.Delete')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
name={'delete'}
|
||||
_hover={{ color: 'red.600' }}
|
||||
onClick={onDelete}
|
||||
/>
|
||||
</MyTooltip>
|
||||
</>
|
||||
)}
|
||||
{showVoiceIcon &&
|
||||
hasAudio &&
|
||||
(audioLoading ? (
|
||||
<MyTooltip label={t('common.Loading')}>
|
||||
<MyIcon {...controlIconStyle} name={'common/loading'} />
|
||||
</MyTooltip>
|
||||
) : audioPlaying ? (
|
||||
<Flex alignItems={'center'}>
|
||||
<MyTooltip label={t('core.chat.tts.Stop Speech')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
borderRight={'none'}
|
||||
name={'core/chat/stopSpeech'}
|
||||
color={'#E74694'}
|
||||
onClick={() => cancelAudio()}
|
||||
/>
|
||||
</MyTooltip>
|
||||
<Image src="/icon/speaking.gif" w={'23px'} alt={''} borderRight={theme.borders.base} />
|
||||
</Flex>
|
||||
) : (
|
||||
<MyTooltip label={t('core.app.TTS')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
name={'common/voiceLight'}
|
||||
_hover={{ color: '#E74694' }}
|
||||
onClick={async () => {
|
||||
const response = await playAudio({
|
||||
buffer: chat.ttsBuffer,
|
||||
chatItemId: chat.dataId,
|
||||
text: formatChatValue2InputType(chat.value).text || ''
|
||||
});
|
||||
|
||||
if (!setChatHistories || !response.buffer) return;
|
||||
setChatHistories((state) =>
|
||||
state.map((item) =>
|
||||
item.dataId === chat.dataId
|
||||
? {
|
||||
...item,
|
||||
ttsBuffer: response.buffer
|
||||
}
|
||||
: item
|
||||
)
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</MyTooltip>
|
||||
))}
|
||||
{!!onMark && (
|
||||
<MyTooltip label={t('core.chat.Mark')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
name={'core/app/markLight'}
|
||||
_hover={{ color: '#67c13b' }}
|
||||
onClick={onMark}
|
||||
/>
|
||||
</MyTooltip>
|
||||
)}
|
||||
{chat.obj === ChatRoleEnum.AI && (
|
||||
<>
|
||||
{!!onCloseUserLike && chat.userGoodFeedback && (
|
||||
<MyTooltip label={t('core.chat.feedback.Close User Like')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
color={'white'}
|
||||
bg={'green.500'}
|
||||
fontWeight={'bold'}
|
||||
name={'core/chat/feedback/goodLight'}
|
||||
onClick={onCloseUserLike}
|
||||
/>
|
||||
</MyTooltip>
|
||||
)}
|
||||
{!!onReadUserDislike && chat.userBadFeedback && (
|
||||
<MyTooltip label={t('core.chat.feedback.Read User dislike')}>
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
color={'white'}
|
||||
bg={'#FC9663'}
|
||||
fontWeight={'bold'}
|
||||
name={'core/chat/feedback/badLight'}
|
||||
onClick={onReadUserDislike}
|
||||
/>
|
||||
</MyTooltip>
|
||||
)}
|
||||
{!!onAddUserLike && (
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
{...(!!chat.userGoodFeedback
|
||||
? {
|
||||
color: 'white',
|
||||
bg: 'green.500',
|
||||
fontWeight: 'bold'
|
||||
}
|
||||
: {
|
||||
_hover: { color: 'green.600' }
|
||||
})}
|
||||
name={'core/chat/feedback/goodLight'}
|
||||
onClick={onAddUserLike}
|
||||
/>
|
||||
)}
|
||||
{!!onAddUserDislike && (
|
||||
<MyIcon
|
||||
{...controlIconStyle}
|
||||
{...(!!chat.userBadFeedback
|
||||
? {
|
||||
color: 'white',
|
||||
bg: '#FC9663',
|
||||
fontWeight: 'bold',
|
||||
onClick: onAddUserDislike
|
||||
}
|
||||
: {
|
||||
_hover: { color: '#FB7C3C' },
|
||||
onClick: onAddUserDislike
|
||||
})}
|
||||
name={'core/chat/feedback/badLight'}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(ChatController);
|
||||
236
projects/app/src/components/ChatBox/components/ChatItem.tsx
Normal file
236
projects/app/src/components/ChatBox/components/ChatItem.tsx
Normal file
@@ -0,0 +1,236 @@
|
||||
import {
|
||||
Box,
|
||||
BoxProps,
|
||||
Card,
|
||||
Flex,
|
||||
useTheme,
|
||||
Accordion,
|
||||
AccordionItem,
|
||||
AccordionButton,
|
||||
AccordionPanel,
|
||||
AccordionIcon,
|
||||
Button,
|
||||
Image,
|
||||
Grid
|
||||
} from '@chakra-ui/react';
|
||||
import React, { useMemo } from 'react';
|
||||
import ChatController, { type ChatControllerProps } from './ChatController';
|
||||
import ChatAvatar from './ChatAvatar';
|
||||
import { MessageCardStyle } from '../constants';
|
||||
import { formatChatValue2InputType } from '../utils';
|
||||
import Markdown, { CodeClassName } from '@/components/Markdown';
|
||||
import styles from '../index.module.scss';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import {
|
||||
ChatItemValueTypeEnum,
|
||||
ChatRoleEnum,
|
||||
ChatStatusEnum
|
||||
} from '@fastgpt/global/core/chat/constants';
|
||||
import FilesBlock from './FilesBox';
|
||||
|
||||
const colorMap = {
|
||||
[ChatStatusEnum.loading]: {
|
||||
bg: 'myGray.100',
|
||||
color: 'myGray.600'
|
||||
},
|
||||
[ChatStatusEnum.running]: {
|
||||
bg: 'green.50',
|
||||
color: 'green.700'
|
||||
},
|
||||
[ChatStatusEnum.finish]: {
|
||||
bg: 'green.50',
|
||||
color: 'green.700'
|
||||
}
|
||||
};
|
||||
|
||||
const ChatItem = ({
|
||||
type,
|
||||
avatar,
|
||||
statusBoxData,
|
||||
children,
|
||||
isLastChild,
|
||||
questionGuides = [],
|
||||
...chatControllerProps
|
||||
}: {
|
||||
type: ChatRoleEnum.Human | ChatRoleEnum.AI;
|
||||
avatar?: string;
|
||||
statusBoxData?: {
|
||||
status: `${ChatStatusEnum}`;
|
||||
name: string;
|
||||
};
|
||||
isLastChild?: boolean;
|
||||
questionGuides?: string[];
|
||||
children?: React.ReactNode;
|
||||
} & ChatControllerProps) => {
|
||||
const theme = useTheme();
|
||||
const styleMap: BoxProps =
|
||||
type === ChatRoleEnum.Human
|
||||
? {
|
||||
order: 0,
|
||||
borderRadius: '8px 0 8px 8px',
|
||||
justifyContent: 'flex-end',
|
||||
textAlign: 'right',
|
||||
bg: 'primary.100'
|
||||
}
|
||||
: {
|
||||
order: 1,
|
||||
borderRadius: '0 8px 8px 8px',
|
||||
justifyContent: 'flex-start',
|
||||
textAlign: 'left',
|
||||
bg: 'myGray.50'
|
||||
};
|
||||
const { chat, isChatting } = chatControllerProps;
|
||||
|
||||
const ContentCard = useMemo(() => {
|
||||
if (type === 'Human') {
|
||||
const { text, files = [] } = formatChatValue2InputType(chat.value);
|
||||
|
||||
return (
|
||||
<>
|
||||
{files.length > 0 && <FilesBlock files={files} />}
|
||||
<Markdown source={text} isChatting={false} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
/* AI */
|
||||
return (
|
||||
<Flex flexDirection={'column'} gap={2}>
|
||||
{chat.value.map((value, i) => {
|
||||
const key = `${chat.dataId}-ai-${i}`;
|
||||
if (value.text) {
|
||||
let source = value.text?.content || '';
|
||||
|
||||
if (isLastChild && !isChatting && questionGuides.length > 0) {
|
||||
source = `${source}
|
||||
\`\`\`${CodeClassName.questionGuide}
|
||||
${JSON.stringify(questionGuides)}`;
|
||||
}
|
||||
|
||||
return <Markdown key={key} source={source} isChatting={isLastChild && isChatting} />;
|
||||
}
|
||||
if (value.type === ChatItemValueTypeEnum.tool && value.tools) {
|
||||
return (
|
||||
<Box key={key}>
|
||||
{value.tools.map((tool) => {
|
||||
const toolParams = (() => {
|
||||
try {
|
||||
return JSON.stringify(JSON.parse(tool.params), null, 2);
|
||||
} catch (error) {
|
||||
return tool.params;
|
||||
}
|
||||
})();
|
||||
const toolResponse = (() => {
|
||||
try {
|
||||
return JSON.stringify(JSON.parse(tool.response), null, 2);
|
||||
} catch (error) {
|
||||
return tool.response;
|
||||
}
|
||||
})();
|
||||
return (
|
||||
<Box key={tool.id}>
|
||||
<Accordion allowToggle>
|
||||
<AccordionItem borderTop={'none'} borderBottom={'none'}>
|
||||
<AccordionButton
|
||||
w={'auto'}
|
||||
bg={'white'}
|
||||
borderRadius={'md'}
|
||||
borderWidth={'1px'}
|
||||
borderColor={'myGray.200'}
|
||||
boxShadow={'1'}
|
||||
_hover={{
|
||||
bg: 'auto',
|
||||
color: 'primary.600'
|
||||
}}
|
||||
>
|
||||
<Image src={tool.toolAvatar} alt={''} w={'14px'} mr={2} />
|
||||
<Box mr={1}>{tool.toolName}</Box>
|
||||
{isChatting && !tool.response && (
|
||||
<MyIcon name={'common/loading'} w={'14px'} />
|
||||
)}
|
||||
<AccordionIcon color={'myGray.600'} ml={5} />
|
||||
</AccordionButton>
|
||||
<AccordionPanel
|
||||
py={0}
|
||||
px={0}
|
||||
mt={0}
|
||||
borderRadius={'md'}
|
||||
overflow={'hidden'}
|
||||
maxH={'500px'}
|
||||
overflowY={'auto'}
|
||||
>
|
||||
{toolParams && (
|
||||
<Markdown
|
||||
source={`~~~json#Input
|
||||
${toolParams}`}
|
||||
/>
|
||||
)}
|
||||
{toolResponse && (
|
||||
<Markdown
|
||||
source={`~~~json#Response
|
||||
${toolResponse}`}
|
||||
/>
|
||||
)}
|
||||
</AccordionPanel>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</Flex>
|
||||
);
|
||||
}, [chat.dataId, chat.value, isChatting, isLastChild, questionGuides, type]);
|
||||
|
||||
const chatStatusMap = useMemo(() => {
|
||||
if (!statusBoxData?.status) return;
|
||||
return colorMap[statusBoxData.status];
|
||||
}, [statusBoxData?.status]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* control icon */}
|
||||
<Flex w={'100%'} alignItems={'center'} gap={2} justifyContent={styleMap.justifyContent}>
|
||||
{isChatting && type === ChatRoleEnum.AI && isLastChild ? null : (
|
||||
<Box order={styleMap.order} ml={styleMap.ml}>
|
||||
<ChatController {...chatControllerProps} />
|
||||
</Box>
|
||||
)}
|
||||
<ChatAvatar src={avatar} type={type} />
|
||||
|
||||
{!!chatStatusMap && statusBoxData && isLastChild && (
|
||||
<Flex alignItems={'center'} px={3} py={'1.5px'} borderRadius="md" bg={chatStatusMap.bg}>
|
||||
<Box
|
||||
className={styles.statusAnimation}
|
||||
bg={chatStatusMap.color}
|
||||
w="8px"
|
||||
h="8px"
|
||||
borderRadius={'50%'}
|
||||
mt={'1px'}
|
||||
/>
|
||||
<Box ml={2} color={'myGray.600'}>
|
||||
{statusBoxData.name}
|
||||
</Box>
|
||||
</Flex>
|
||||
)}
|
||||
</Flex>
|
||||
{/* content */}
|
||||
<Box mt={['6px', 2]} textAlign={styleMap.textAlign}>
|
||||
<Card
|
||||
className="markdown"
|
||||
{...MessageCardStyle}
|
||||
bg={styleMap.bg}
|
||||
borderRadius={styleMap.borderRadius}
|
||||
textAlign={'left'}
|
||||
>
|
||||
{ContentCard}
|
||||
{children}
|
||||
</Card>
|
||||
</Box>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChatItem;
|
||||
23
projects/app/src/components/ChatBox/components/Empty.tsx
Normal file
23
projects/app/src/components/ChatBox/components/Empty.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import Markdown from '@/components/Markdown';
|
||||
import { useMarkdown } from '@/web/common/hooks/useMarkdown';
|
||||
import { Box, Card } from '@chakra-ui/react';
|
||||
import React from 'react';
|
||||
|
||||
const Empty = () => {
|
||||
const { data: chatProblem } = useMarkdown({ url: '/chatProblem.md' });
|
||||
const { data: versionIntro } = useMarkdown({ url: '/versionIntro.md' });
|
||||
|
||||
return (
|
||||
<Box pt={6} w={'85%'} maxW={'600px'} m={'auto'} alignItems={'center'} justifyContent={'center'}>
|
||||
{/* version intro */}
|
||||
<Card p={4} mb={10} minH={'200px'}>
|
||||
<Markdown source={versionIntro} />
|
||||
</Card>
|
||||
<Card p={4} minH={'600px'}>
|
||||
<Markdown source={chatProblem} />
|
||||
</Card>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(Empty);
|
||||
22
projects/app/src/components/ChatBox/components/FilesBox.tsx
Normal file
22
projects/app/src/components/ChatBox/components/FilesBox.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import { Box, Flex, Grid } from '@chakra-ui/react';
|
||||
import MdImage from '@/components/Markdown/img/Image';
|
||||
import { UserInputFileItemType } from '@/components/ChatBox/type';
|
||||
|
||||
const FilesBlock = ({ files }: { files: UserInputFileItemType[] }) => {
|
||||
return (
|
||||
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
|
||||
{files.map(({ id, type, name, url }, i) => {
|
||||
if (type === 'image') {
|
||||
return (
|
||||
<Box key={i} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
|
||||
<MdImage src={url} />
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
})}
|
||||
</Grid>
|
||||
);
|
||||
};
|
||||
|
||||
export default FilesBlock;
|
||||
119
projects/app/src/components/ChatBox/components/VariableInput.tsx
Normal file
119
projects/app/src/components/ChatBox/components/VariableInput.tsx
Normal file
@@ -0,0 +1,119 @@
|
||||
import { VariableItemType } from '@fastgpt/global/core/module/type';
|
||||
import React, { useState } from 'react';
|
||||
import { UseFormReturn } from 'react-hook-form';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { Box, Button, Card, Input, Textarea } from '@chakra-ui/react';
|
||||
import ChatAvatar from './ChatAvatar';
|
||||
import { MessageCardStyle } from '../constants';
|
||||
import { VariableInputEnum } from '@fastgpt/global/core/module/constants';
|
||||
import MySelect from '@fastgpt/web/components/common/MySelect';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { ChatBoxInputFormType } from '../type.d';
|
||||
|
||||
const VariableInput = ({
|
||||
appAvatar,
|
||||
variableModules,
|
||||
variableIsFinish,
|
||||
chatForm,
|
||||
onSubmitVariables
|
||||
}: {
|
||||
appAvatar?: string;
|
||||
variableModules: VariableItemType[];
|
||||
variableIsFinish: boolean;
|
||||
onSubmitVariables: (e: Record<string, any>) => void;
|
||||
chatForm: UseFormReturn<ChatBoxInputFormType>;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [refresh, setRefresh] = useState(false);
|
||||
const { register, setValue, handleSubmit: handleSubmitChat, watch } = chatForm;
|
||||
const variables = watch('variables');
|
||||
|
||||
return (
|
||||
<Box py={3}>
|
||||
{/* avatar */}
|
||||
<ChatAvatar src={appAvatar} type={'AI'} />
|
||||
{/* message */}
|
||||
<Box textAlign={'left'}>
|
||||
<Card
|
||||
order={2}
|
||||
mt={2}
|
||||
w={'400px'}
|
||||
{...MessageCardStyle}
|
||||
bg={'white'}
|
||||
boxShadow={'0 0 8px rgba(0,0,0,0.15)'}
|
||||
>
|
||||
{variableModules.map((item) => (
|
||||
<Box key={item.id} mb={4}>
|
||||
<Box as={'label'} display={'inline-block'} position={'relative'} mb={1}>
|
||||
{item.label}
|
||||
{item.required && (
|
||||
<Box
|
||||
position={'absolute'}
|
||||
top={'-2px'}
|
||||
right={'-10px'}
|
||||
color={'red.500'}
|
||||
fontWeight={'bold'}
|
||||
>
|
||||
*
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
{item.type === VariableInputEnum.input && (
|
||||
<Input
|
||||
isDisabled={variableIsFinish}
|
||||
bg={'myWhite.400'}
|
||||
{...register(`variables.${item.key}`, {
|
||||
required: item.required
|
||||
})}
|
||||
/>
|
||||
)}
|
||||
{item.type === VariableInputEnum.textarea && (
|
||||
<Textarea
|
||||
isDisabled={variableIsFinish}
|
||||
bg={'myWhite.400'}
|
||||
{...register(`variables.${item.key}`, {
|
||||
required: item.required
|
||||
})}
|
||||
rows={5}
|
||||
maxLength={4000}
|
||||
/>
|
||||
)}
|
||||
{item.type === VariableInputEnum.select && (
|
||||
<MySelect
|
||||
width={'100%'}
|
||||
isDisabled={variableIsFinish}
|
||||
list={(item.enums || []).map((item) => ({
|
||||
label: item.value,
|
||||
value: item.value
|
||||
}))}
|
||||
{...register(`variables.${item.key}`, {
|
||||
required: item.required
|
||||
})}
|
||||
value={variables[item.key]}
|
||||
onchange={(e) => {
|
||||
setValue(`variables.${item.key}`, e);
|
||||
setRefresh((state) => !state);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</Box>
|
||||
))}
|
||||
{!variableIsFinish && (
|
||||
<Button
|
||||
leftIcon={<MyIcon name={'core/chat/chatFill'} w={'16px'} />}
|
||||
size={'sm'}
|
||||
maxW={'100px'}
|
||||
onClick={handleSubmitChat((data) => {
|
||||
onSubmitVariables(data);
|
||||
})}
|
||||
>
|
||||
{t('core.chat.Start Chat')}
|
||||
</Button>
|
||||
)}
|
||||
</Card>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(VariableInput);
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Box, Card } from '@chakra-ui/react';
|
||||
import React from 'react';
|
||||
import { MessageCardStyle } from '../constants';
|
||||
import Markdown from '@/components/Markdown';
|
||||
import ChatAvatar from './ChatAvatar';
|
||||
|
||||
const WelcomeBox = ({ appAvatar, welcomeText }: { appAvatar?: string; welcomeText: string }) => {
|
||||
return (
|
||||
<Box py={3}>
|
||||
{/* avatar */}
|
||||
<ChatAvatar src={appAvatar} type={'AI'} />
|
||||
{/* message */}
|
||||
<Box textAlign={'left'}>
|
||||
<Card
|
||||
order={2}
|
||||
mt={2}
|
||||
{...MessageCardStyle}
|
||||
bg={'white'}
|
||||
boxShadow={'0 0 8px rgba(0,0,0,0.15)'}
|
||||
>
|
||||
<Markdown source={`~~~guide \n${welcomeText}`} isChatting={false} />
|
||||
</Card>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export default WelcomeBox;
|
||||
13
projects/app/src/components/ChatBox/constants.ts
Normal file
13
projects/app/src/components/ChatBox/constants.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { BoxProps } from '@chakra-ui/react';
|
||||
|
||||
export const textareaMinH = '22px';
|
||||
|
||||
export const MessageCardStyle: BoxProps = {
|
||||
px: 4,
|
||||
py: 3,
|
||||
borderRadius: '0 8px 8px 8px',
|
||||
boxShadow: 'none',
|
||||
display: 'inline-block',
|
||||
maxW: ['calc(100% - 25px)', 'calc(100% - 40px)'],
|
||||
color: 'myGray.900'
|
||||
};
|
||||
78
projects/app/src/components/ChatBox/hooks/useChatBox.tsx
Normal file
78
projects/app/src/components/ChatBox/hooks/useChatBox.tsx
Normal file
@@ -0,0 +1,78 @@
|
||||
import { ExportChatType } from '@/types/chat';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { useCallback } from 'react';
|
||||
import { htmlTemplate } from '@/constants/common';
|
||||
import { fileDownload } from '@/web/common/file/utils';
|
||||
|
||||
export const useChatBox = () => {
|
||||
const onExportChat = useCallback(
|
||||
({ type, history }: { type: ExportChatType; history: ChatItemType[] }) => {
|
||||
const getHistoryHtml = () => {
|
||||
const historyDom = document.getElementById('history');
|
||||
if (!historyDom) return;
|
||||
const dom = Array.from(historyDom.children).map((child, i) => {
|
||||
const avatar = `<img src="${
|
||||
child.querySelector<HTMLImageElement>('.avatar')?.src
|
||||
}" alt="" />`;
|
||||
|
||||
const chatContent = child.querySelector<HTMLDivElement>('.markdown');
|
||||
|
||||
if (!chatContent) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const chatContentClone = chatContent.cloneNode(true) as HTMLDivElement;
|
||||
|
||||
const codeHeader = chatContentClone.querySelectorAll('.code-header');
|
||||
codeHeader.forEach((childElement: any) => {
|
||||
childElement.remove();
|
||||
});
|
||||
|
||||
return `<div class="chat-item">
|
||||
${avatar}
|
||||
${chatContentClone.outerHTML}
|
||||
</div>`;
|
||||
});
|
||||
|
||||
const html = htmlTemplate.replace('{{CHAT_CONTENT}}', dom.join('\n'));
|
||||
return html;
|
||||
};
|
||||
|
||||
const map: Record<ExportChatType, () => void> = {
|
||||
md: () => {
|
||||
fileDownload({
|
||||
text: history.map((item) => item.value).join('\n\n'),
|
||||
type: 'text/markdown',
|
||||
filename: 'chat.md'
|
||||
});
|
||||
},
|
||||
html: () => {
|
||||
const html = getHistoryHtml();
|
||||
html &&
|
||||
fileDownload({
|
||||
text: html,
|
||||
type: 'text/html',
|
||||
filename: '聊天记录.html'
|
||||
});
|
||||
},
|
||||
pdf: () => {
|
||||
const html = getHistoryHtml();
|
||||
|
||||
html &&
|
||||
// @ts-ignore
|
||||
html2pdf(html, {
|
||||
margin: 0,
|
||||
filename: `聊天记录.pdf`
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
map[type]();
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
return {
|
||||
onExportChat
|
||||
};
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
53
projects/app/src/components/ChatBox/type.d.ts
vendored
Normal file
53
projects/app/src/components/ChatBox/type.d.ts
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
|
||||
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
ChatItemValueItemType,
|
||||
ChatSiteItemType,
|
||||
ToolModuleResponseItemType
|
||||
} from '@fastgpt/global/core/chat/type';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
|
||||
export type generatingMessageProps = {
|
||||
event: `${SseResponseEventEnum}`;
|
||||
text?: string;
|
||||
name?: string;
|
||||
status?: 'running' | 'finish';
|
||||
tool?: ToolModuleResponseItemType;
|
||||
};
|
||||
|
||||
export type UserInputFileItemType = {
|
||||
id: string;
|
||||
rawFile?: File;
|
||||
type: `${ChatFileTypeEnum}`;
|
||||
name: string;
|
||||
icon: string; // img is base64
|
||||
url?: string;
|
||||
};
|
||||
|
||||
export type ChatBoxInputFormType = {
|
||||
input: string;
|
||||
files: UserInputFileItemType[];
|
||||
variables: Record<string, any>;
|
||||
chatStarted: boolean;
|
||||
};
|
||||
|
||||
export type ChatBoxInputType = {
|
||||
text?: string;
|
||||
files?: UserInputFileItemType[];
|
||||
};
|
||||
|
||||
export type StartChatFnProps = {
|
||||
chatList: ChatSiteItemType[];
|
||||
messages: ChatCompletionMessageParam[];
|
||||
controller: AbortController;
|
||||
variables: Record<string, any>;
|
||||
generatingMessage: (e: generatingMessageProps) => void;
|
||||
};
|
||||
|
||||
export type ComponentRef = {
|
||||
getChatHistories: () => ChatSiteItemType[];
|
||||
resetVariables: (data?: Record<string, any>) => void;
|
||||
resetHistory: (history: ChatSiteItemType[]) => void;
|
||||
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;
|
||||
sendPrompt: (question: string) => void;
|
||||
};
|
||||
33
projects/app/src/components/ChatBox/utils.ts
Normal file
33
projects/app/src/components/ChatBox/utils.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ChatBoxInputType, UserInputFileItemType } from './type';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
export const formatChatValue2InputType = (value: ChatItemValueItemType[]): ChatBoxInputType => {
|
||||
if (!Array.isArray(value)) {
|
||||
console.error('value is error', value);
|
||||
return { text: '', files: [] };
|
||||
}
|
||||
const text = value
|
||||
.filter((item) => item.text?.content)
|
||||
.map((item) => item.text?.content || '')
|
||||
.join('');
|
||||
const files =
|
||||
(value
|
||||
.map((item) =>
|
||||
item.type === 'file' && item.file
|
||||
? {
|
||||
id: getNanoid(),
|
||||
type: item.file.type,
|
||||
name: item.file.name,
|
||||
icon: '',
|
||||
url: item.file.url
|
||||
}
|
||||
: undefined
|
||||
)
|
||||
.filter(Boolean) as UserInputFileItemType[]) || [];
|
||||
|
||||
return {
|
||||
text,
|
||||
files
|
||||
};
|
||||
};
|
||||
@@ -1,8 +1,9 @@
|
||||
import React from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
|
||||
import { Box, Flex, useColorModeValue } from '@chakra-ui/react';
|
||||
import { Box, Flex } from '@chakra-ui/react';
|
||||
import Icon from '@fastgpt/web/components/common/Icon';
|
||||
import { useCopyData } from '@/web/common/hooks/useCopyData';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const codeLight: { [key: string]: React.CSSProperties } = {
|
||||
'code[class*=language-]': {
|
||||
@@ -294,24 +295,41 @@ const CodeLight = ({
|
||||
inline?: boolean;
|
||||
match: RegExpExecArray | null;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const { copyData } = useCopyData();
|
||||
|
||||
if (!inline) {
|
||||
const codeBoxName = useMemo(() => {
|
||||
const input = match?.['input'] || '';
|
||||
if (!input) return match?.[1];
|
||||
|
||||
const splitInput = input.split('#');
|
||||
return splitInput[1] || match?.[1];
|
||||
}, [match]);
|
||||
|
||||
return (
|
||||
<Box my={3} borderRadius={'md'} overflow={'overlay'} backgroundColor={'#222'}>
|
||||
<Box
|
||||
my={3}
|
||||
borderRadius={'md'}
|
||||
overflow={'overlay'}
|
||||
bg={'myGray.900'}
|
||||
boxShadow={
|
||||
'0px 0px 1px 0px rgba(19, 51, 107, 0.08), 0px 1px 2px 0px rgba(19, 51, 107, 0.05)'
|
||||
}
|
||||
>
|
||||
<Flex
|
||||
className="code-header"
|
||||
py={2}
|
||||
px={5}
|
||||
backgroundColor={useColorModeValue('#323641', 'gray.600')}
|
||||
color={'#fff'}
|
||||
bg={'myGray.600'}
|
||||
color={'white'}
|
||||
fontSize={'sm'}
|
||||
userSelect={'none'}
|
||||
>
|
||||
<Box flex={1}>{match?.[1]}</Box>
|
||||
<Box flex={1}>{codeBoxName}</Box>
|
||||
<Flex cursor={'pointer'} onClick={() => copyData(String(children))} alignItems={'center'}>
|
||||
<Icon name={'copy'} width={15} height={15} fill={'#fff'}></Icon>
|
||||
<Box ml={1}>复制</Box>
|
||||
<Icon name={'copy'} width={15} height={15}></Icon>
|
||||
<Box ml={1}>{t('common.Copy')}</Box>
|
||||
</Flex>
|
||||
</Flex>
|
||||
<SyntaxHighlighter style={codeLight as any} language={match?.[1]} PreTag="pre">
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Box, Flex, Grid } from '@chakra-ui/react';
|
||||
import MdImage from '../img/Image';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
const ImageBlock = ({ images }: { images: string }) => {
|
||||
const formatData = useMemo(
|
||||
() =>
|
||||
images
|
||||
.split('\n')
|
||||
.filter((item) => item)
|
||||
.map((item) => {
|
||||
try {
|
||||
return JSON.parse(item) as { src: string };
|
||||
} catch (error) {
|
||||
return { src: '' };
|
||||
}
|
||||
}),
|
||||
[images]
|
||||
);
|
||||
|
||||
return (
|
||||
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
|
||||
{formatData.map(({ src }) => {
|
||||
return (
|
||||
<Box key={src} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
|
||||
<MdImage src={src} />
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
</Grid>
|
||||
);
|
||||
};
|
||||
|
||||
export default ImageBlock;
|
||||
@@ -24,7 +24,6 @@ const EChartsCodeBlock = dynamic(() => import('./img/EChartsCodeBlock'));
|
||||
|
||||
const ChatGuide = dynamic(() => import('./chat/Guide'));
|
||||
const QuestionGuide = dynamic(() => import('./chat/QuestionGuide'));
|
||||
const ImageBlock = dynamic(() => import('./chat/Image'));
|
||||
|
||||
export enum CodeClassName {
|
||||
guide = 'guide',
|
||||
@@ -32,10 +31,16 @@ export enum CodeClassName {
|
||||
mermaid = 'mermaid',
|
||||
echarts = 'echarts',
|
||||
quote = 'quote',
|
||||
img = 'img'
|
||||
files = 'files'
|
||||
}
|
||||
|
||||
const Markdown = ({ source, isChatting = false }: { source: string; isChatting?: boolean }) => {
|
||||
const Markdown = ({
|
||||
source = '',
|
||||
isChatting = false
|
||||
}: {
|
||||
source?: string;
|
||||
isChatting?: boolean;
|
||||
}) => {
|
||||
const components = useMemo<any>(
|
||||
() => ({
|
||||
img: Image,
|
||||
@@ -91,9 +96,7 @@ const Code = React.memo(function Code(e: any) {
|
||||
if (codeType === CodeClassName.echarts) {
|
||||
return <EChartsCodeBlock code={strChildren} />;
|
||||
}
|
||||
if (codeType === CodeClassName.img) {
|
||||
return <ImageBlock images={strChildren} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<CodeLight className={className} inline={inline} match={match}>
|
||||
{children}
|
||||
|
||||
@@ -103,7 +103,7 @@ const QuoteItem = ({
|
||||
fontSize={'sm'}
|
||||
whiteSpace={'pre-wrap'}
|
||||
wordBreak={'break-all'}
|
||||
_hover={{ '& .hover-data': { display: 'flex' } }}
|
||||
_hover={{ '& .hover-data': { visibility: 'visible' } }}
|
||||
h={'100%'}
|
||||
display={'flex'}
|
||||
flexDirection={'column'}
|
||||
@@ -218,7 +218,8 @@ const QuoteItem = ({
|
||||
<MyTooltip label={t('core.dataset.data.Edit')}>
|
||||
<Box
|
||||
className="hover-data"
|
||||
display={['flex', 'none']}
|
||||
visibility={'hidden'}
|
||||
display={'flex'}
|
||||
alignItems={'center'}
|
||||
justifyContent={'center'}
|
||||
>
|
||||
@@ -245,7 +246,7 @@ const QuoteItem = ({
|
||||
<Link
|
||||
as={NextLink}
|
||||
className="hover-data"
|
||||
display={'none'}
|
||||
visibility={'hidden'}
|
||||
alignItems={'center'}
|
||||
color={'primary.500'}
|
||||
href={`/dataset/detail?datasetId=${quoteItem.datasetId}¤tTab=dataCard&collectionId=${quoteItem.collectionId}`}
|
||||
|
||||
@@ -86,10 +86,12 @@ const DatasetParamsModal = ({
|
||||
const cfbBgDesc = watch('datasetSearchExtensionBg');
|
||||
|
||||
const chatModelSelectList = (() =>
|
||||
llmModelList.map((item) => ({
|
||||
value: item.model,
|
||||
label: item.name
|
||||
})))();
|
||||
llmModelList
|
||||
.filter((model) => model.usedInQueryExtension)
|
||||
.map((item) => ({
|
||||
value: item.model,
|
||||
label: item.name
|
||||
})))();
|
||||
|
||||
const searchModeList = useMemo(() => {
|
||||
const list = Object.values(DatasetSearchModeMap);
|
||||
|
||||
@@ -13,7 +13,8 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
||||
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
|
||||
import { getGuideModule } from '@fastgpt/global/core/module/utils';
|
||||
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
@@ -12,7 +12,10 @@ import type {
|
||||
FlowModuleItemType,
|
||||
FlowModuleTemplateType
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { FlowNodeChangeProps } from '@fastgpt/global/core/module/node/type';
|
||||
import type {
|
||||
FlowNodeChangeProps,
|
||||
FlowNodeInputItemType
|
||||
} from '@fastgpt/global/core/module/node/type';
|
||||
import React, {
|
||||
type SetStateAction,
|
||||
type Dispatch,
|
||||
@@ -20,13 +23,18 @@ import React, {
|
||||
useCallback,
|
||||
createContext,
|
||||
useRef,
|
||||
useEffect
|
||||
useEffect,
|
||||
useMemo
|
||||
} from 'react';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import { appModule2FlowEdge, appModule2FlowNode } from '@/utils/adapt';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { EDGE_TYPE, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
|
||||
import {
|
||||
ModuleIOValueTypeEnum,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus';
|
||||
@@ -34,6 +42,14 @@ import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
|
||||
|
||||
type OnChange<ChangesType> = (changes: ChangesType[]) => void;
|
||||
type requestEventType =
|
||||
| 'onChangeNode'
|
||||
| 'onCopyNode'
|
||||
| 'onResetNode'
|
||||
| 'onDelNode'
|
||||
| 'onDelConnect'
|
||||
| 'setNodes';
|
||||
|
||||
export type useFlowProviderStoreType = {
|
||||
reactFlowWrapper: null | React.RefObject<HTMLDivElement>;
|
||||
mode: 'app' | 'plugin';
|
||||
@@ -57,14 +73,16 @@ export type useFlowProviderStoreType = {
|
||||
onDelConnect: (id: string) => void;
|
||||
onConnect: ({ connect }: { connect: Connection }) => any;
|
||||
initData: (modules: ModuleItemType[]) => void;
|
||||
splitToolInputs: (
|
||||
inputs: FlowNodeInputItemType[],
|
||||
moduleId: string
|
||||
) => {
|
||||
isTool: boolean;
|
||||
toolInputs: FlowNodeInputItemType[];
|
||||
commonInputs: FlowNodeInputItemType[];
|
||||
};
|
||||
hasToolNode: boolean;
|
||||
};
|
||||
type requestEventType =
|
||||
| 'onChangeNode'
|
||||
| 'onCopyNode'
|
||||
| 'onResetNode'
|
||||
| 'onDelNode'
|
||||
| 'onDelConnect'
|
||||
| 'setNodes';
|
||||
|
||||
const StateContext = createContext<useFlowProviderStoreType>({
|
||||
reactFlowWrapper: null,
|
||||
@@ -116,7 +134,18 @@ const StateContext = createContext<useFlowProviderStoreType>({
|
||||
},
|
||||
onResetNode: function (e): void {
|
||||
throw new Error('Function not implemented.');
|
||||
}
|
||||
},
|
||||
splitToolInputs: function (
|
||||
inputs: FlowNodeInputItemType[],
|
||||
moduleId: string
|
||||
): {
|
||||
isTool: boolean;
|
||||
toolInputs: FlowNodeInputItemType[];
|
||||
commonInputs: FlowNodeInputItemType[];
|
||||
} {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
hasToolNode: false
|
||||
});
|
||||
export const useFlowProviderStore = () => useContext(StateContext);
|
||||
|
||||
@@ -135,6 +164,10 @@ export const FlowProvider = ({
|
||||
const [nodes = [], setNodes, onNodesChange] = useNodesState<FlowModuleItemType>([]);
|
||||
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||
|
||||
const hasToolNode = useMemo(() => {
|
||||
return !!nodes.find((node) => node.data.flowType === FlowNodeTypeEnum.tools);
|
||||
}, [nodes]);
|
||||
|
||||
const onFixView = useCallback(() => {
|
||||
const btn = document.querySelector('.react-flow__controls-fitview') as HTMLButtonElement;
|
||||
|
||||
@@ -180,10 +213,13 @@ export const FlowProvider = ({
|
||||
const type = source?.outputs.find(
|
||||
(output) => output.key === connect.sourceHandle
|
||||
)?.valueType;
|
||||
console.log(type);
|
||||
|
||||
if (source?.flowType === FlowNodeTypeEnum.classifyQuestion && !type) {
|
||||
return ModuleIOValueTypeEnum.boolean;
|
||||
}
|
||||
if (source?.flowType === FlowNodeTypeEnum.tools) {
|
||||
return ModuleIOValueTypeEnum.tools;
|
||||
}
|
||||
if (source?.flowType === FlowNodeTypeEnum.pluginInput) {
|
||||
return source?.inputs.find((input) => input.key === connect.sourceHandle)?.valueType;
|
||||
}
|
||||
@@ -193,14 +229,17 @@ export const FlowProvider = ({
|
||||
const targetType = nodes
|
||||
.find((node) => node.id === connect.target)
|
||||
?.data?.inputs.find((input) => input.key === connect.targetHandle)?.valueType;
|
||||
console.log(source, targetType);
|
||||
if (!sourceType || !targetType) {
|
||||
|
||||
if (
|
||||
connect.sourceHandle === ModuleOutputKeyEnum.selectedTools &&
|
||||
connect.targetHandle === ModuleOutputKeyEnum.selectedTools
|
||||
) {
|
||||
} else if (!sourceType || !targetType) {
|
||||
return toast({
|
||||
status: 'warning',
|
||||
title: t('app.Connection is invalid')
|
||||
});
|
||||
}
|
||||
if (
|
||||
} else if (
|
||||
sourceType !== ModuleIOValueTypeEnum.any &&
|
||||
targetType !== ModuleIOValueTypeEnum.any &&
|
||||
sourceType !== targetType
|
||||
@@ -215,16 +254,13 @@ export const FlowProvider = ({
|
||||
addEdge(
|
||||
{
|
||||
...connect,
|
||||
type: EDGE_TYPE,
|
||||
data: {
|
||||
onDelete: onDelConnect
|
||||
}
|
||||
type: EDGE_TYPE
|
||||
},
|
||||
state
|
||||
)
|
||||
);
|
||||
},
|
||||
[nodes, onDelConnect, setEdges, t, toast]
|
||||
[nodes, setEdges, t, toast]
|
||||
);
|
||||
|
||||
const onDelNode = useCallback(
|
||||
@@ -359,6 +395,26 @@ export const FlowProvider = ({
|
||||
[setNodes]
|
||||
);
|
||||
|
||||
/* If the module is connected by a tool, the tool input and the normal input are separated */
|
||||
const splitToolInputs = useCallback(
|
||||
(inputs: FlowNodeInputItemType[], moduleId: string) => {
|
||||
const isTool = !!edges.find(
|
||||
(edge) =>
|
||||
edge.targetHandle === ModuleOutputKeyEnum.selectedTools && edge.target === moduleId
|
||||
);
|
||||
|
||||
return {
|
||||
isTool,
|
||||
toolInputs: inputs.filter((item) => isTool && item.toolDescription),
|
||||
commonInputs: inputs.filter((item) => {
|
||||
if (!isTool) return true;
|
||||
return !item.toolDescription && item.key !== ModuleInputKeyEnum.switch;
|
||||
})
|
||||
};
|
||||
},
|
||||
[edges]
|
||||
);
|
||||
|
||||
// reset a node data. delete edge and replace it
|
||||
const onResetNode = useCallback(
|
||||
({ id, module }: { id: string; module: FlowModuleTemplateType }) => {
|
||||
@@ -465,7 +521,9 @@ export const FlowProvider = ({
|
||||
onDelEdge,
|
||||
onDelConnect,
|
||||
onConnect,
|
||||
initData
|
||||
initData,
|
||||
splitToolInputs,
|
||||
hasToolNode
|
||||
};
|
||||
|
||||
return <StateContext.Provider value={value}>{children}</StateContext.Provider>;
|
||||
|
||||
@@ -17,7 +17,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { getPreviewPluginModule } from '@/web/core/plugin/api';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { moduleTemplatesList } from '@/web/core/modules/template/system';
|
||||
import { moduleTemplatesList } from '@fastgpt/global/core/module/template/constants';
|
||||
|
||||
export type ModuleTemplateProps = {
|
||||
templates: FlowModuleTemplateType[];
|
||||
|
||||
@@ -3,6 +3,7 @@ import { BezierEdge, getBezierPath, EdgeLabelRenderer, EdgeProps } from 'reactfl
|
||||
import { onDelConnect, useFlowProviderStore } from '../../FlowProvider';
|
||||
import { Flex } from '@chakra-ui/react';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
const ButtonEdge = (props: EdgeProps) => {
|
||||
const { nodes } = useFlowProviderStore();
|
||||
@@ -15,6 +16,8 @@ const ButtonEdge = (props: EdgeProps) => {
|
||||
sourcePosition,
|
||||
targetPosition,
|
||||
selected,
|
||||
sourceHandleId,
|
||||
animated,
|
||||
style = {}
|
||||
} = props;
|
||||
|
||||
@@ -34,6 +37,8 @@ const ButtonEdge = (props: EdgeProps) => {
|
||||
targetPosition
|
||||
});
|
||||
|
||||
const isToolEdge = sourceHandleId === ModuleOutputKeyEnum.selectedTools;
|
||||
|
||||
const memoEdgeLabel = useMemo(() => {
|
||||
return (
|
||||
<EdgeLabelRenderer>
|
||||
@@ -60,29 +65,31 @@ const ButtonEdge = (props: EdgeProps) => {
|
||||
<MyIcon
|
||||
name="closeSolid"
|
||||
w={'100%'}
|
||||
color={active ? 'primary.800' : 'myGray.400'}
|
||||
></MyIcon>
|
||||
</Flex>
|
||||
<Flex
|
||||
alignItems={'center'}
|
||||
justifyContent={'center'}
|
||||
position={'absolute'}
|
||||
transform={`translate(-78%, -50%) translate(${targetX}px,${targetY}px)`}
|
||||
pointerEvents={'all'}
|
||||
w={'16px'}
|
||||
h={'16px'}
|
||||
bg={'white'}
|
||||
zIndex={active ? 1000 : 0}
|
||||
>
|
||||
<MyIcon
|
||||
name={'common/rightArrowLight'}
|
||||
w={'100%'}
|
||||
color={active ? 'primary.800' : 'myGray.400'}
|
||||
color={active ? 'primary.700' : 'myGray.400'}
|
||||
></MyIcon>
|
||||
</Flex>
|
||||
{!isToolEdge && (
|
||||
<Flex
|
||||
alignItems={'center'}
|
||||
justifyContent={'center'}
|
||||
position={'absolute'}
|
||||
transform={`translate(-78%, -50%) translate(${targetX}px,${targetY}px)`}
|
||||
pointerEvents={'all'}
|
||||
w={'16px'}
|
||||
h={'16px'}
|
||||
bg={'white'}
|
||||
zIndex={active ? 1000 : 0}
|
||||
>
|
||||
<MyIcon
|
||||
name={'common/rightArrowLight'}
|
||||
w={'100%'}
|
||||
color={active ? 'primary.700' : 'myGray.400'}
|
||||
></MyIcon>
|
||||
</Flex>
|
||||
)}
|
||||
</EdgeLabelRenderer>
|
||||
);
|
||||
}, [id, labelX, labelY, active, targetX, targetY]);
|
||||
}, [labelX, labelY, active, isToolEdge, targetX, targetY, id]);
|
||||
|
||||
const memoBezierEdge = useMemo(() => {
|
||||
const edgeStyle: React.CSSProperties = {
|
||||
@@ -96,7 +103,7 @@ const ButtonEdge = (props: EdgeProps) => {
|
||||
};
|
||||
|
||||
return <BezierEdge {...props} style={edgeStyle} />;
|
||||
}, [props, active, style]);
|
||||
}, [style, active, props]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -4,7 +4,7 @@ import { BoxProps } from '@chakra-ui/react';
|
||||
|
||||
const Container = ({ children, ...props }: BoxProps) => {
|
||||
return (
|
||||
<Box px={4} py={3} position={'relative'} {...props}>
|
||||
<Box px={'16px'} py={'10px'} position={'relative'} {...props}>
|
||||
{children}
|
||||
</Box>
|
||||
);
|
||||
|
||||
@@ -2,7 +2,13 @@ import React from 'react';
|
||||
import { Box, useTheme } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const Divider = ({ text }: { text?: 'Input' | 'Output' | string }) => {
|
||||
const Divider = ({
|
||||
text,
|
||||
showBorderBottom = true
|
||||
}: {
|
||||
text?: 'Input' | 'Output' | string;
|
||||
showBorderBottom?: boolean;
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
const { t } = useTranslation();
|
||||
|
||||
@@ -14,10 +20,10 @@ const Divider = ({ text }: { text?: 'Input' | 'Output' | string }) => {
|
||||
bg={'#f8f8f8'}
|
||||
py={isDivider ? '0' : 2}
|
||||
borderTop={theme.borders.base}
|
||||
borderBottom={theme.borders.base}
|
||||
borderBottom={showBorderBottom ? theme.borders.base : 0}
|
||||
fontSize={'lg'}
|
||||
>
|
||||
{text ? t(`common.${text}`) : ''}
|
||||
{text}
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -5,14 +5,29 @@ import { FlowModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import Container from '../modules/Container';
|
||||
import RenderInput from '../render/RenderInput';
|
||||
import RenderOutput from '../render/RenderOutput';
|
||||
import { useFlowProviderStore } from '../../FlowProvider';
|
||||
import Divider from '../modules/Divider';
|
||||
import RenderToolInput from '../render/RenderToolInput';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const NodeAnswer = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { moduleId, inputs, outputs } = data;
|
||||
const { splitToolInputs } = useFlowProviderStore();
|
||||
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
|
||||
|
||||
return (
|
||||
<NodeCard minW={'400px'} selected={selected} {...data}>
|
||||
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
|
||||
<RenderInput moduleId={moduleId} flowInputList={inputs} />
|
||||
{toolInputs.length > 0 && (
|
||||
<>
|
||||
<Divider text={t('core.module.tool.Tool input')} />
|
||||
<Container>
|
||||
<RenderToolInput moduleId={moduleId} inputs={toolInputs} />
|
||||
</Container>
|
||||
</>
|
||||
)}
|
||||
<RenderInput moduleId={moduleId} flowInputList={commonInputs} />
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
</NodeCard>
|
||||
|
||||
@@ -23,7 +23,7 @@ const NodeCQNode = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
|
||||
return (
|
||||
<NodeCard minW={'400px'} selected={selected} {...data}>
|
||||
<Divider text="Input" />
|
||||
<Divider text={t('common.Input')} />
|
||||
<Container>
|
||||
<RenderInput
|
||||
moduleId={moduleId}
|
||||
|
||||
@@ -26,117 +26,132 @@ import ExtractFieldModal, { defaultField } from './ExtractFieldModal';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { onChangeNode } from '../../../FlowProvider';
|
||||
import { onChangeNode, useFlowProviderStore } from '../../../FlowProvider';
|
||||
import RenderToolInput from '../../render/RenderToolInput';
|
||||
|
||||
const NodeExtract = ({ data }: NodeProps<FlowModuleItemType>) => {
|
||||
const { inputs, outputs, moduleId } = data;
|
||||
const { splitToolInputs } = useFlowProviderStore();
|
||||
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
|
||||
const { t } = useTranslation();
|
||||
const [editExtractFiled, setEditExtractField] = useState<ContextExtractAgentItemType>();
|
||||
|
||||
return (
|
||||
<NodeCard minW={'400px'} {...data}>
|
||||
<Divider text="Input" />
|
||||
<Container>
|
||||
<RenderInput
|
||||
moduleId={moduleId}
|
||||
flowInputList={inputs}
|
||||
CustomComponent={{
|
||||
[ModuleInputKeyEnum.extractKeys]: ({
|
||||
value: extractKeys = [],
|
||||
...props
|
||||
}: {
|
||||
value?: ContextExtractAgentItemType[];
|
||||
}) => (
|
||||
<Box>
|
||||
<Flex alignItems={'center'}>
|
||||
<Box flex={'1 0 0'}>{t('core.module.extract.Target field')}</Box>
|
||||
<Button
|
||||
size={'sm'}
|
||||
variant={'whitePrimary'}
|
||||
leftIcon={<AddIcon fontSize={'10px'} />}
|
||||
onClick={() => setEditExtractField(defaultField)}
|
||||
{toolInputs.length > 0 && (
|
||||
<>
|
||||
<Divider text={t('core.module.tool.Tool input')} />
|
||||
<Container>
|
||||
<RenderToolInput moduleId={moduleId} inputs={toolInputs} />
|
||||
</Container>
|
||||
</>
|
||||
)}
|
||||
<>
|
||||
<Divider text={t('common.Input')} />
|
||||
<Container>
|
||||
<RenderInput
|
||||
moduleId={moduleId}
|
||||
flowInputList={commonInputs}
|
||||
CustomComponent={{
|
||||
[ModuleInputKeyEnum.extractKeys]: ({
|
||||
value: extractKeys = [],
|
||||
...props
|
||||
}: {
|
||||
value?: ContextExtractAgentItemType[];
|
||||
}) => (
|
||||
<Box>
|
||||
<Flex alignItems={'center'}>
|
||||
<Box flex={'1 0 0'}>{t('core.module.extract.Target field')}</Box>
|
||||
<Button
|
||||
size={'sm'}
|
||||
variant={'whitePrimary'}
|
||||
leftIcon={<AddIcon fontSize={'10px'} />}
|
||||
onClick={() => setEditExtractField(defaultField)}
|
||||
>
|
||||
{t('core.module.extract.Add field')}
|
||||
</Button>
|
||||
</Flex>
|
||||
<Box
|
||||
mt={2}
|
||||
borderRadius={'md'}
|
||||
overflow={'hidden'}
|
||||
borderWidth={'1px'}
|
||||
borderBottom="none"
|
||||
>
|
||||
{t('core.module.extract.Add field')}
|
||||
</Button>
|
||||
</Flex>
|
||||
<Box
|
||||
mt={2}
|
||||
borderRadius={'md'}
|
||||
overflow={'hidden'}
|
||||
borderWidth={'1px'}
|
||||
borderBottom="none"
|
||||
>
|
||||
<TableContainer>
|
||||
<Table bg={'white'}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th bg={'myGray.50'}>字段 key</Th>
|
||||
<Th bg={'myGray.50'}>字段描述</Th>
|
||||
<Th bg={'myGray.50'}>必须</Th>
|
||||
<Th bg={'myGray.50'}></Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{extractKeys.map((item, index) => (
|
||||
<Tr
|
||||
key={index}
|
||||
position={'relative'}
|
||||
whiteSpace={'pre-wrap'}
|
||||
wordBreak={'break-all'}
|
||||
>
|
||||
<Td>{item.key}</Td>
|
||||
<Td>{item.desc}</Td>
|
||||
<Td>{item.required ? '✔' : ''}</Td>
|
||||
<Td whiteSpace={'nowrap'}>
|
||||
<MyIcon
|
||||
mr={3}
|
||||
name={'common/settingLight'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
setEditExtractField(item);
|
||||
}}
|
||||
/>
|
||||
<MyIcon
|
||||
name={'delete'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'updateInput',
|
||||
key: ModuleInputKeyEnum.extractKeys,
|
||||
value: {
|
||||
...props,
|
||||
value: extractKeys.filter(
|
||||
(extract) => item.key !== extract.key
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'delOutput',
|
||||
key: item.key
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Td>
|
||||
<TableContainer>
|
||||
<Table bg={'white'}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th bg={'myGray.50'}>字段 key</Th>
|
||||
<Th bg={'myGray.50'}>字段描述</Th>
|
||||
<Th bg={'myGray.50'}>必须</Th>
|
||||
<Th bg={'myGray.50'}></Th>
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{extractKeys.map((item, index) => (
|
||||
<Tr
|
||||
key={index}
|
||||
position={'relative'}
|
||||
whiteSpace={'pre-wrap'}
|
||||
wordBreak={'break-all'}
|
||||
>
|
||||
<Td>{item.key}</Td>
|
||||
<Td>{item.desc}</Td>
|
||||
<Td>{item.required ? '✔' : ''}</Td>
|
||||
<Td whiteSpace={'nowrap'}>
|
||||
<MyIcon
|
||||
mr={3}
|
||||
name={'common/settingLight'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
setEditExtractField(item);
|
||||
}}
|
||||
/>
|
||||
<MyIcon
|
||||
name={'delete'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'updateInput',
|
||||
key: ModuleInputKeyEnum.extractKeys,
|
||||
value: {
|
||||
...props,
|
||||
value: extractKeys.filter(
|
||||
(extract) => item.key !== extract.key
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'delOutput',
|
||||
key: item.key
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Td>
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Box>
|
||||
</Box>
|
||||
</Box>
|
||||
)
|
||||
}}
|
||||
/>
|
||||
</Container>
|
||||
<Divider text="Output" />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
)
|
||||
}}
|
||||
/>
|
||||
</Container>
|
||||
</>
|
||||
<>
|
||||
<Divider text={t('common.Output')} />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
</>
|
||||
|
||||
{!!editExtractFiled && (
|
||||
<ExtractFieldModal
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import MyModal from '@/components/MyModal';
|
||||
import { ModalBody, Button, ModalFooter, useDisclosure, Textarea, Box } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { onChangeNode } from '../../../FlowProvider';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';
|
||||
|
||||
@@ -38,6 +38,7 @@ import { EditorVariablePickerType } from '@fastgpt/web/components/common/Textare
|
||||
import HttpInput from '@fastgpt/web/components/common/Input/HttpInput';
|
||||
import dynamic from 'next/dynamic';
|
||||
import MySelect from '@fastgpt/web/components/common/MySelect';
|
||||
import RenderToolInput from '../../render/RenderToolInput';
|
||||
const OpenApiImportModal = dynamic(() => import('./OpenApiImportModal'));
|
||||
|
||||
enum TabEnum {
|
||||
@@ -137,12 +138,12 @@ const RenderHttpMethodAndUrl = React.memo(function RenderHttpMethodAndUrl({
|
||||
return (
|
||||
<Box>
|
||||
<Box mb={2} display={'flex'} justifyContent={'space-between'}>
|
||||
<span>{t('core.module.Http request settings')}</span>
|
||||
<span>
|
||||
<Box>{t('core.module.Http request settings')}</Box>
|
||||
<Box>
|
||||
<OpenApiImportModal moduleId={moduleId} inputs={inputs}>
|
||||
<Button variant={'link'}>{t('core.module.http.OpenAPI import')}</Button>
|
||||
</OpenApiImportModal>
|
||||
</span>
|
||||
</Box>
|
||||
</Box>
|
||||
<Flex alignItems={'center'} className="nodrag">
|
||||
<MySelect
|
||||
@@ -252,7 +253,7 @@ function RenderHttpProps({
|
||||
];
|
||||
const moduleVariables = formatEditorVariablePickerIcon(
|
||||
inputs
|
||||
.filter((input) => input.edit)
|
||||
.filter((input) => input.edit || input.toolDescription)
|
||||
.map((item) => ({
|
||||
key: item.key,
|
||||
label: item.label
|
||||
@@ -593,6 +594,8 @@ const RenderPropsItem = ({ text, num }: { text: string; num: number }) => {
|
||||
const NodeHttp = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { moduleId, inputs, outputs } = data;
|
||||
const { splitToolInputs, hasToolNode } = useFlowProviderStore();
|
||||
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
|
||||
|
||||
const CustomComponents = useMemo(
|
||||
() => ({
|
||||
@@ -613,18 +616,30 @@ const NodeHttp = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
|
||||
return (
|
||||
<NodeCard minW={'350px'} selected={selected} {...data}>
|
||||
<Divider text="Input" />
|
||||
<Container>
|
||||
<RenderInput
|
||||
moduleId={moduleId}
|
||||
flowInputList={inputs}
|
||||
CustomComponent={CustomComponents}
|
||||
/>
|
||||
</Container>
|
||||
<Divider text="Output" />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
{hasToolNode && (
|
||||
<>
|
||||
<Divider text={t('core.module.tool.Tool input')} />
|
||||
<Container>
|
||||
<RenderToolInput moduleId={moduleId} inputs={toolInputs} canEdit />
|
||||
</Container>
|
||||
</>
|
||||
)}
|
||||
<>
|
||||
<Divider text={t('common.Input')} />
|
||||
<Container>
|
||||
<RenderInput
|
||||
moduleId={moduleId}
|
||||
flowInputList={commonInputs}
|
||||
CustomComponent={CustomComponents}
|
||||
/>
|
||||
</Container>
|
||||
</>
|
||||
<>
|
||||
<Divider text={t('common.Output')} />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
</>
|
||||
</NodeCard>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -15,6 +15,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import SourceHandle from '../render/SourceHandle';
|
||||
import type {
|
||||
EditInputFieldMap,
|
||||
EditNodeFieldType,
|
||||
FlowNodeInputItemType,
|
||||
FlowNodeOutputItemType
|
||||
@@ -32,13 +33,14 @@ const defaultCreateField: EditNodeFieldType = {
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
required: true
|
||||
};
|
||||
const createEditField = {
|
||||
const createEditField: EditInputFieldMap = {
|
||||
key: true,
|
||||
name: true,
|
||||
description: true,
|
||||
required: true,
|
||||
dataType: true,
|
||||
inputType: true
|
||||
inputType: true,
|
||||
isToolInput: true
|
||||
};
|
||||
|
||||
const NodePluginInput = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
@@ -73,7 +75,8 @@ const NodePluginInput = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
key: item.key,
|
||||
label: item.label,
|
||||
description: item.description,
|
||||
required: item.required
|
||||
required: item.required,
|
||||
isToolInput: !!item.toolDescription
|
||||
})
|
||||
}
|
||||
/>
|
||||
@@ -148,6 +151,7 @@ const NodePluginInput = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
type: data.inputType,
|
||||
required: data.required,
|
||||
description: data.description,
|
||||
toolDescription: data.isToolInput ? data.description : undefined,
|
||||
edit: true,
|
||||
editField: createEditField
|
||||
}
|
||||
@@ -191,6 +195,7 @@ const NodePluginInput = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
required: data.required,
|
||||
label: data.label,
|
||||
description: data.description,
|
||||
toolDescription: data.isToolInput ? data.description : undefined,
|
||||
...(data.inputType === FlowNodeInputTypeEnum.addInputParam
|
||||
? {
|
||||
editField: {
|
||||
@@ -218,7 +223,7 @@ const NodePluginInput = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
key: data.key,
|
||||
label: data.label
|
||||
};
|
||||
|
||||
console.log(data);
|
||||
if (changeKey) {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
|
||||
@@ -6,8 +6,10 @@ import Divider from '../modules/Divider';
|
||||
import Container from '../modules/Container';
|
||||
import RenderInput from '../render/RenderInput';
|
||||
import RenderOutput from '../render/RenderOutput';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
const NodeRunAPP = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { moduleId, inputs, outputs } = data;
|
||||
|
||||
return (
|
||||
@@ -15,7 +17,7 @@ const NodeRunAPP = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
|
||||
<RenderInput moduleId={moduleId} flowInputList={inputs} />
|
||||
</Container>
|
||||
<Divider text="Output" />
|
||||
<Divider text={t('common.Output')} />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import { NodeProps } from 'reactflow';
|
||||
import NodeCard from '../render/NodeCard';
|
||||
import { FlowModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
@@ -6,23 +6,42 @@ import Divider from '../modules/Divider';
|
||||
import Container from '../modules/Container';
|
||||
import RenderInput from '../render/RenderInput';
|
||||
import RenderOutput from '../render/RenderOutput';
|
||||
import RenderToolInput from '../render/RenderToolInput';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { useFlowProviderStore } from '../../FlowProvider';
|
||||
|
||||
const NodeSimple = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { splitToolInputs } = useFlowProviderStore();
|
||||
const { moduleId, inputs, outputs } = data;
|
||||
const { toolInputs, commonInputs } = splitToolInputs(inputs, moduleId);
|
||||
|
||||
const filterHiddenInputs = useMemo(
|
||||
() => commonInputs.filter((item) => item.type !== 'hidden'),
|
||||
[commonInputs]
|
||||
);
|
||||
|
||||
return (
|
||||
<NodeCard minW={'350px'} selected={selected} {...data}>
|
||||
{inputs.length > 0 && (
|
||||
{toolInputs.length > 0 && (
|
||||
<>
|
||||
<Divider text="Input" />
|
||||
<Divider text={t('core.module.tool.Tool input')} />
|
||||
<Container>
|
||||
<RenderInput moduleId={moduleId} flowInputList={inputs} />
|
||||
<RenderToolInput moduleId={moduleId} inputs={toolInputs} />
|
||||
</Container>
|
||||
</>
|
||||
)}
|
||||
{filterHiddenInputs.length > 0 && (
|
||||
<>
|
||||
<Divider text={t('common.Input')} />
|
||||
<Container>
|
||||
<RenderInput moduleId={moduleId} flowInputList={commonInputs} />
|
||||
</Container>
|
||||
</>
|
||||
)}
|
||||
{outputs.length > 0 && (
|
||||
<>
|
||||
<Divider text="Output" />
|
||||
<Divider text={t('common.Output')} />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import React from 'react';
|
||||
import { NodeProps } from 'reactflow';
|
||||
import NodeCard from '../render/NodeCard';
|
||||
import { FlowModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import Divider from '../modules/Divider';
|
||||
import Container from '../modules/Container';
|
||||
import RenderInput from '../render/RenderInput';
|
||||
import RenderOutput from '../render/RenderOutput';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { ToolSourceHandle } from '../render/ToolHandle';
|
||||
import { Box } from '@chakra-ui/react';
|
||||
|
||||
const NodeTools = ({ data, selected }: NodeProps<FlowModuleItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { moduleId, inputs, outputs } = data;
|
||||
|
||||
return (
|
||||
<NodeCard minW={'350px'} selected={selected} {...data}>
|
||||
<Divider text={t('common.Input')} />
|
||||
<Container>
|
||||
<RenderInput moduleId={moduleId} flowInputList={inputs} />
|
||||
</Container>
|
||||
|
||||
<Divider text={t('common.Output')} />
|
||||
<Container>
|
||||
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
<Box position={'relative'}>
|
||||
<Box borderBottomLeftRadius={'md'} borderBottomRadius={'md'} overflow={'hidden'}>
|
||||
<Divider showBorderBottom={false} text={t('core.module.template.Tool module')} />
|
||||
</Box>
|
||||
<ToolSourceHandle moduleId={moduleId} />
|
||||
</Box>
|
||||
</NodeCard>
|
||||
);
|
||||
};
|
||||
export default React.memo(NodeTools);
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
@@ -95,11 +95,12 @@ const FieldEditModal = ({
|
||||
});
|
||||
const inputType = watch('inputType');
|
||||
const outputType = watch('outputType');
|
||||
const valueType = watch('valueType');
|
||||
const required = watch('required');
|
||||
const [refresh, setRefresh] = useState(false);
|
||||
|
||||
const showDataTypeSelect = useMemo(() => {
|
||||
if (!editField.dataType) return false;
|
||||
if (inputType === undefined) return true;
|
||||
if (inputType === FlowNodeInputTypeEnum.target) return true;
|
||||
if (outputType === FlowNodeOutputTypeEnum.source) return true;
|
||||
|
||||
@@ -109,8 +110,8 @@ const FieldEditModal = ({
|
||||
const showRequired = useMemo(() => {
|
||||
if (inputType === FlowNodeInputTypeEnum.addInputParam) return false;
|
||||
|
||||
return editField.required;
|
||||
}, [editField.required, inputType]);
|
||||
return editField.required || editField.defaultValue;
|
||||
}, [editField.defaultValue, editField.required, inputType]);
|
||||
|
||||
const showNameInput = useMemo(() => {
|
||||
return editField.name;
|
||||
@@ -126,6 +127,37 @@ const FieldEditModal = ({
|
||||
return editField.description;
|
||||
}, [editField.description]);
|
||||
|
||||
const onSubmitSuccess = useCallback(
|
||||
(data: EditNodeFieldType) => {
|
||||
if (!data.key) return;
|
||||
if (isCreate && keys.includes(data.key)) {
|
||||
return toast({
|
||||
status: 'warning',
|
||||
title: t('core.module.edit.Field Already Exist')
|
||||
});
|
||||
}
|
||||
onSubmit({
|
||||
data,
|
||||
changeKey: !keys.includes(data.key)
|
||||
});
|
||||
},
|
||||
[isCreate, keys, onSubmit, t, toast]
|
||||
);
|
||||
const onSubmitError = useCallback(
|
||||
(e: Object) => {
|
||||
for (const item of Object.values(e)) {
|
||||
if (item.message) {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: item.message
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
[toast]
|
||||
);
|
||||
|
||||
return (
|
||||
<MyModal
|
||||
isOpen={true}
|
||||
@@ -164,7 +196,31 @@ const FieldEditModal = ({
|
||||
{showRequired && (
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={'0 0 70px'}>{t('common.Require Input')}</Box>
|
||||
<Switch {...register('required')} />
|
||||
<Switch
|
||||
{...register('required', {
|
||||
onChange(e) {
|
||||
if (!e.target.checked) {
|
||||
setValue('defaultValue', '');
|
||||
}
|
||||
}
|
||||
})}
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
{showRequired && required && editField.defaultValue && (
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={['0 0 70px']}>{t('core.module.Default value')}</Box>
|
||||
<Input
|
||||
bg={'myGray.50'}
|
||||
placeholder={t('core.module.Default value placeholder')}
|
||||
{...register('defaultValue')}
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
{editField.isToolInput && (
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={'0 0 70px'}>工具参数</Box>
|
||||
<Switch {...register('isToolInput')} />
|
||||
</Flex>
|
||||
)}
|
||||
{showDataTypeSelect && (
|
||||
@@ -194,18 +250,28 @@ const FieldEditModal = ({
|
||||
{showNameInput && (
|
||||
<Flex mb={5} alignItems={'center'}>
|
||||
<Box flex={'0 0 70px'}>{t('core.module.Field Name')}</Box>
|
||||
<Input placeholder="预约字段/sql语句……" {...register('label', { required: true })} />
|
||||
<Input
|
||||
bg={'myGray.50'}
|
||||
placeholder="预约字段/sql语句……"
|
||||
{...register('label', { required: true })}
|
||||
/>
|
||||
</Flex>
|
||||
)}
|
||||
{showKeyInput && (
|
||||
<Flex mb={5} alignItems={'center'}>
|
||||
<Box flex={'0 0 70px'}>{t('core.module.Field key')}</Box>
|
||||
<Input
|
||||
bg={'myGray.50'}
|
||||
placeholder="appointment/sql"
|
||||
{...register('key', {
|
||||
required: true,
|
||||
pattern: {
|
||||
value: /^[a-zA-Z]+[0-9]*$/,
|
||||
message: '字段key必须是纯英文字母或数字,并且不能以数字开头。'
|
||||
},
|
||||
onChange: (e) => {
|
||||
const value = e.target.value;
|
||||
// auto fill label
|
||||
if (!showNameInput) {
|
||||
setValue('label', value);
|
||||
}
|
||||
@@ -215,10 +281,17 @@ const FieldEditModal = ({
|
||||
</Flex>
|
||||
)}
|
||||
{showDescriptionInput && (
|
||||
<Flex mb={5} alignItems={'flex-start'}>
|
||||
<Box flex={'0 0 70px'}>{t('core.module.Field Description')}</Box>
|
||||
<Textarea placeholder={t('common.choosable')} rows={3} {...register('description')} />
|
||||
</Flex>
|
||||
<Box mb={5} alignItems={'flex-start'}>
|
||||
<Box flex={'0 0 70px'} mb={'1px'}>
|
||||
{t('core.module.Field Description')}
|
||||
</Box>
|
||||
<Textarea
|
||||
bg={'myGray.50'}
|
||||
placeholder={t('common.choosable')}
|
||||
rows={5}
|
||||
{...register('description')}
|
||||
/>
|
||||
</Box>
|
||||
)}
|
||||
</ModalBody>
|
||||
|
||||
@@ -226,21 +299,7 @@ const FieldEditModal = ({
|
||||
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
|
||||
{t('common.Close')}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSubmit((data) => {
|
||||
if (!data.key) return;
|
||||
if (isCreate && keys.includes(data.key)) {
|
||||
return toast({
|
||||
status: 'warning',
|
||||
title: t('core.module.edit.Field Already Exist')
|
||||
});
|
||||
}
|
||||
onSubmit({
|
||||
data,
|
||||
changeKey: !keys.includes(data.key)
|
||||
});
|
||||
})}
|
||||
>
|
||||
<Button onClick={handleSubmit(onSubmitSuccess, onSubmitError)}>
|
||||
{t('common.Confirm')}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { Box, Flex, useTheme, MenuButton } from '@chakra-ui/react';
|
||||
import { Box, Button, Flex } from '@chakra-ui/react';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import Avatar from '@/components/Avatar';
|
||||
import type { FlowModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { onChangeNode, onCopyNode, onResetNode, onDelNode } from '../../FlowProvider';
|
||||
import {
|
||||
onChangeNode,
|
||||
onCopyNode,
|
||||
onResetNode,
|
||||
onDelNode,
|
||||
useFlowProviderStore
|
||||
} from '../../FlowProvider';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
@@ -16,7 +20,8 @@ import { getPreviewPluginModule } from '@/web/core/plugin/api';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { useConfirm } from '@/web/common/hooks/useConfirm';
|
||||
import { LOGO_ICON } from '@fastgpt/global/common/system/constants';
|
||||
import MyMenu from '@/components/MyMenu';
|
||||
import { ToolTargetHandle } from './ToolHandle';
|
||||
import { useEditTextarea } from '@fastgpt/web/hooks/useEditTextarea';
|
||||
|
||||
type Props = FlowModuleItemType & {
|
||||
children?: React.ReactNode | React.ReactNode[] | string;
|
||||
@@ -37,12 +42,18 @@ const NodeCard = (props: Props) => {
|
||||
flowType,
|
||||
inputs,
|
||||
selected,
|
||||
forbidMenu
|
||||
forbidMenu,
|
||||
isTool = false
|
||||
} = props;
|
||||
|
||||
const theme = useTheme();
|
||||
const { toast } = useToast();
|
||||
const { setLoading } = useSystemStore();
|
||||
const { nodes, splitToolInputs } = useFlowProviderStore();
|
||||
const { onOpenModal: onOpenIntroModal, EditModal: EditIntroModal } = useEditTextarea({
|
||||
title: t('core.module.Edit intro'),
|
||||
tip: '调整该模块会对工具调用时机有影响。\n你可以通过精确的描述该模块功能,引导模型进行工具调用。',
|
||||
canEmpty: false
|
||||
});
|
||||
|
||||
// custom title edit
|
||||
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
|
||||
@@ -53,13 +64,23 @@ const NodeCard = (props: Props) => {
|
||||
content: t('module.Confirm Sync Plugin')
|
||||
});
|
||||
|
||||
const menuList = useMemo(
|
||||
() => [
|
||||
const showToolHandle = useMemo(
|
||||
() => isTool && !!nodes.find((item) => item.data?.flowType === FlowNodeTypeEnum.tools),
|
||||
[isTool, nodes]
|
||||
);
|
||||
const moduleIsTool = useMemo(() => {
|
||||
const { isTool } = splitToolInputs([], moduleId);
|
||||
return isTool;
|
||||
}, [moduleId, splitToolInputs]);
|
||||
|
||||
const Header = useMemo(() => {
|
||||
const menuList = [
|
||||
...(flowType === FlowNodeTypeEnum.pluginModule
|
||||
? [
|
||||
{
|
||||
icon: 'common/refreshLight',
|
||||
label: t('plugin.Synchronous version'),
|
||||
variant: 'whiteBase',
|
||||
onClick: () => {
|
||||
const pluginId = inputs.find(
|
||||
(item) => item.key === ModuleInputKeyEnum.pluginId
|
||||
@@ -88,6 +109,7 @@ const NodeCard = (props: Props) => {
|
||||
{
|
||||
icon: 'edit',
|
||||
label: t('common.Rename'),
|
||||
variant: 'whiteBase',
|
||||
onClick: () =>
|
||||
onOpenModal({
|
||||
defaultVal: name,
|
||||
@@ -111,65 +133,130 @@ const NodeCard = (props: Props) => {
|
||||
{
|
||||
icon: 'copy',
|
||||
label: t('common.Copy'),
|
||||
variant: 'whiteBase',
|
||||
onClick: () => onCopyNode(moduleId)
|
||||
},
|
||||
{
|
||||
icon: 'delete',
|
||||
label: t('common.Delete'),
|
||||
variant: 'whiteDanger',
|
||||
onClick: () => onDelNode(moduleId)
|
||||
}
|
||||
],
|
||||
[flowType, inputs, moduleId, name, onOpenModal, openConfirm, setLoading, t, toast]
|
||||
);
|
||||
];
|
||||
|
||||
return (
|
||||
<Box className="custom-drag-handle" px={4} py={3} position={'relative'}>
|
||||
{showToolHandle && <ToolTargetHandle moduleId={moduleId} />}
|
||||
<Flex alignItems={'center'}>
|
||||
<Avatar src={avatar} borderRadius={'0'} objectFit={'contain'} w={'30px'} h={'30px'} />
|
||||
<Box ml={3} fontSize={'lg'}>
|
||||
{t(name)}
|
||||
</Box>
|
||||
</Flex>
|
||||
{!forbidMenu && (
|
||||
<Box
|
||||
className="nodrag controller-menu"
|
||||
display={'none'}
|
||||
flexDirection={'column'}
|
||||
gap={3}
|
||||
position={'absolute'}
|
||||
top={'-20px'}
|
||||
right={0}
|
||||
transform={'translateX(90%)'}
|
||||
pl={'17px'}
|
||||
pr={'10px'}
|
||||
pb={'20px'}
|
||||
pt={'20px'}
|
||||
>
|
||||
{menuList.map((item) => (
|
||||
<Box key={item.icon}>
|
||||
<Button
|
||||
size={'xs'}
|
||||
variant={item.variant}
|
||||
leftIcon={<MyIcon name={item.icon as any} w={'12px'} />}
|
||||
onClick={item.onClick}
|
||||
>
|
||||
{item.label}
|
||||
</Button>
|
||||
</Box>
|
||||
))}
|
||||
</Box>
|
||||
)}
|
||||
<Flex alignItems={'flex-end'} py={1}>
|
||||
<Box fontSize={'xs'} color={'myGray.600'} flex={'1 0 0'}>
|
||||
{t(intro)}
|
||||
</Box>
|
||||
{moduleIsTool && (
|
||||
<Button
|
||||
size={'xs'}
|
||||
variant={'whiteBase'}
|
||||
onClick={() => {
|
||||
onOpenIntroModal({
|
||||
defaultVal: intro,
|
||||
onSuccess(e) {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'attr',
|
||||
key: 'intro',
|
||||
value: e
|
||||
});
|
||||
}
|
||||
});
|
||||
}}
|
||||
>
|
||||
{t('core.module.Edit intro')}
|
||||
</Button>
|
||||
)}
|
||||
</Flex>
|
||||
</Box>
|
||||
);
|
||||
}, [
|
||||
avatar,
|
||||
flowType,
|
||||
forbidMenu,
|
||||
inputs,
|
||||
intro,
|
||||
moduleId,
|
||||
moduleIsTool,
|
||||
name,
|
||||
onOpenIntroModal,
|
||||
onOpenModal,
|
||||
openConfirm,
|
||||
setLoading,
|
||||
showToolHandle,
|
||||
t,
|
||||
toast
|
||||
]);
|
||||
|
||||
const RenderModal = useMemo(() => {
|
||||
return (
|
||||
<>
|
||||
<EditTitleModal maxLength={20} />
|
||||
{moduleIsTool && <EditIntroModal maxLength={500} />}
|
||||
<ConfirmModal />
|
||||
</>
|
||||
);
|
||||
}, [ConfirmModal, EditIntroModal, EditTitleModal, moduleIsTool]);
|
||||
|
||||
return (
|
||||
<Box
|
||||
minW={minW}
|
||||
maxW={'500px'}
|
||||
maxW={'600px'}
|
||||
bg={'white'}
|
||||
borderWidth={'1px'}
|
||||
borderColor={selected ? 'primary.600' : 'borderColor.base'}
|
||||
borderRadius={'md'}
|
||||
boxShadow={'1'}
|
||||
_hover={{
|
||||
boxShadow: '4'
|
||||
boxShadow: '4',
|
||||
'& .controller-menu': {
|
||||
display: 'flex'
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Box className="custom-drag-handle" px={4} py={3}>
|
||||
<Flex alignItems={'center'}>
|
||||
<Avatar src={avatar} borderRadius={'0'} objectFit={'contain'} w={'30px'} h={'30px'} />
|
||||
<Box ml={3} fontSize={'lg'}>
|
||||
{t(name)}
|
||||
</Box>
|
||||
<Box flex={1} />
|
||||
{!forbidMenu && (
|
||||
<MyMenu
|
||||
offset={[-60, 5]}
|
||||
width={120}
|
||||
Button={
|
||||
<MenuButton
|
||||
className={'nodrag'}
|
||||
_hover={{ bg: 'myWhite.600' }}
|
||||
cursor={'pointer'}
|
||||
borderRadius={'md'}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
<MyIcon name={'more'} w={'14px'} p={2} />
|
||||
</MenuButton>
|
||||
}
|
||||
menuList={menuList}
|
||||
/>
|
||||
)}
|
||||
</Flex>
|
||||
<Box fontSize={'xs'} color={'myGray.600'}>
|
||||
{t(intro)}
|
||||
</Box>
|
||||
</Box>
|
||||
{Header}
|
||||
{children}
|
||||
<EditTitleModal />
|
||||
<ConfirmModal />
|
||||
{RenderModal}
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -78,32 +78,33 @@ const RenderInput = ({ flowInputList, moduleId, CustomComponent }: Props) => {
|
||||
|
||||
const sortInputs = useMemo(
|
||||
() =>
|
||||
flowInputList.sort((a, b) => {
|
||||
if (a.type === FlowNodeInputTypeEnum.addInputParam) {
|
||||
return 1;
|
||||
}
|
||||
if (b.type === FlowNodeInputTypeEnum.addInputParam) {
|
||||
return -1;
|
||||
}
|
||||
JSON.stringify(
|
||||
[...flowInputList].sort((a, b) => {
|
||||
if (a.type === FlowNodeInputTypeEnum.addInputParam) {
|
||||
return 1;
|
||||
}
|
||||
if (b.type === FlowNodeInputTypeEnum.addInputParam) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.type === FlowNodeInputTypeEnum.switch) {
|
||||
return -1;
|
||||
}
|
||||
if (a.type === FlowNodeInputTypeEnum.switch) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}),
|
||||
return 0;
|
||||
})
|
||||
),
|
||||
[flowInputList]
|
||||
);
|
||||
const filterInputs = useMemo(
|
||||
() =>
|
||||
sortInputs.filter((input) => {
|
||||
if (mode === 'app' && input.hideInApp) return false;
|
||||
if (mode === 'plugin' && input.hideInPlugin) return false;
|
||||
const filterInputs = useMemo(() => {
|
||||
const parseSortInputs = JSON.parse(sortInputs) as FlowNodeInputItemType[];
|
||||
return parseSortInputs.filter((input) => {
|
||||
if (mode === 'app' && input.hideInApp) return false;
|
||||
if (mode === 'plugin' && input.hideInPlugin) return false;
|
||||
|
||||
return true;
|
||||
}),
|
||||
[mode, sortInputs]
|
||||
);
|
||||
return true;
|
||||
});
|
||||
}, [mode, sortInputs]);
|
||||
|
||||
const memoCustomComponent = useMemo(() => CustomComponent || {}, [CustomComponent]);
|
||||
|
||||
@@ -135,7 +136,7 @@ const RenderInput = ({ flowInputList, moduleId, CustomComponent }: Props) => {
|
||||
</Box>
|
||||
) : null;
|
||||
});
|
||||
}, [memoCustomComponent, filterInputs, mode, moduleId]);
|
||||
}, [filterInputs, memoCustomComponent, mode, moduleId]);
|
||||
|
||||
return <>{Render}</>;
|
||||
};
|
||||
|
||||
@@ -3,14 +3,24 @@ import type { RenderInputProps } from '../type';
|
||||
import { onChangeNode } from '../../../../FlowProvider';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import SelectAiModel from '@/components/Select/SelectAiModel';
|
||||
import { llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
|
||||
|
||||
const SelectAiModelRender = ({ item, inputs = [], moduleId }: RenderInputProps) => {
|
||||
const { llmModelList } = useSystemStore();
|
||||
const modelList = llmModelList.map((item) => ({
|
||||
model: item.model,
|
||||
name: item.name,
|
||||
maxResponse: item.maxResponse
|
||||
}));
|
||||
|
||||
const modelList = llmModelList
|
||||
.filter((model) => {
|
||||
if (!item.llmModelType) return true;
|
||||
const filterField = llmModelTypeFilterMap[item.llmModelType];
|
||||
if (!filterField) return true;
|
||||
//@ts-ignore
|
||||
return !!model[filterField];
|
||||
})
|
||||
.map((item) => ({
|
||||
model: item.model,
|
||||
name: item.name,
|
||||
maxResponse: item.maxResponse
|
||||
}));
|
||||
|
||||
const onChangeModel = useCallback(
|
||||
(e: string) => {
|
||||
|
||||
@@ -48,7 +48,9 @@ const OutputLabel = ({
|
||||
label: item.label,
|
||||
description: item.description,
|
||||
valueType: item.valueType,
|
||||
outputType: item.type
|
||||
outputType: item.type,
|
||||
required: item.required,
|
||||
defaultValue: item.defaultValue
|
||||
})
|
||||
}
|
||||
/>
|
||||
@@ -74,7 +76,20 @@ const OutputLabel = ({
|
||||
<QuestionOutlineIcon display={['none', 'inline']} mr={1} />
|
||||
</MyTooltip>
|
||||
)}
|
||||
<Box>{t(label)}</Box>
|
||||
<Box position={'relative'}>
|
||||
{item.required && (
|
||||
<Box
|
||||
position={'absolute'}
|
||||
top={'-2px'}
|
||||
left={'-5px'}
|
||||
color={'red.500'}
|
||||
fontWeight={'bold'}
|
||||
>
|
||||
*
|
||||
</Box>
|
||||
)}
|
||||
{t(label)}
|
||||
</Box>
|
||||
|
||||
{item.type === FlowNodeOutputTypeEnum.source && (
|
||||
<SourceHandle handleKey={outputKey} valueType={item.valueType} />
|
||||
@@ -95,7 +110,9 @@ const OutputLabel = ({
|
||||
valueType: data.valueType,
|
||||
key: data.key,
|
||||
label: data.label,
|
||||
description: data.description
|
||||
description: data.description,
|
||||
required: data.required,
|
||||
defaultValue: data.defaultValue
|
||||
};
|
||||
|
||||
if (changeKey) {
|
||||
|
||||
@@ -17,7 +17,7 @@ const RenderList: {
|
||||
}
|
||||
];
|
||||
|
||||
const RenderOutput = ({
|
||||
const RenderToolOutput = ({
|
||||
moduleId,
|
||||
flowOutputList
|
||||
}: {
|
||||
@@ -77,4 +77,4 @@ const RenderOutput = ({
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(RenderOutput);
|
||||
export default React.memo(RenderToolOutput);
|
||||
|
||||
@@ -43,6 +43,7 @@ const AddOutputParam = ({ outputs = [], item, moduleId }: RenderOutputProps) =>
|
||||
label: data.label,
|
||||
description: data.description,
|
||||
required: data.required,
|
||||
defaultValue: data.defaultValue,
|
||||
edit: true,
|
||||
editField: item.editField,
|
||||
targets: []
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
import React, { useCallback, useRef } from 'react';
|
||||
import MyModal from '@/components/MyModal';
|
||||
import type { EditFieldModalProps } from './type.d';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Flex,
|
||||
Input,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
Switch,
|
||||
Textarea
|
||||
} from '@chakra-ui/react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { defaultEditFormData } from './constants';
|
||||
import MySelect from '@fastgpt/web/components/common/MySelect';
|
||||
import { useRequest } from '@/web/common/hooks/useRequest';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { onChangeNode } from '../../../FlowProvider';
|
||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';
|
||||
|
||||
const EditFieldModal = ({
|
||||
defaultValue = defaultEditFormData,
|
||||
moduleId,
|
||||
onClose
|
||||
}: EditFieldModalProps) => {
|
||||
const { t } = useTranslation();
|
||||
const { toast } = useToast();
|
||||
|
||||
const { register, getValues, setValue, handleSubmit, watch } = useForm<FlowNodeInputItemType>({
|
||||
defaultValues: defaultValue
|
||||
});
|
||||
|
||||
const selectTypeList = useRef([
|
||||
{
|
||||
label: '字符串',
|
||||
value: 'string'
|
||||
}
|
||||
]);
|
||||
|
||||
const { mutate: onclickSubmit } = useRequest({
|
||||
mutationFn: async (e: FlowNodeInputItemType) => {
|
||||
const inputConfig = {
|
||||
...e,
|
||||
label: e.key
|
||||
};
|
||||
if (defaultValue.key) {
|
||||
// edit
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'replaceInput',
|
||||
key: defaultValue.key,
|
||||
value: inputConfig
|
||||
});
|
||||
} else {
|
||||
// create
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'addInput',
|
||||
key: e.key,
|
||||
value: {
|
||||
...e,
|
||||
label: e.key
|
||||
}
|
||||
});
|
||||
}
|
||||
onClose();
|
||||
}
|
||||
});
|
||||
const onclickSubmitError = useCallback(
|
||||
(e: Object) => {
|
||||
for (const item of Object.values(e)) {
|
||||
if (item.message) {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: item.message
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
[toast]
|
||||
);
|
||||
|
||||
return (
|
||||
<MyModal isOpen iconSrc="modal/edit" title={'工具字段参数配置'} onClose={onClose}>
|
||||
<ModalBody>
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={'0 0 80px'}>{t('common.Require Input')}</Box>
|
||||
<Switch {...register('required')} />
|
||||
</Flex>
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={'0 0 80px'}>{t('core.module.Field key')}</Box>
|
||||
<Box flex={'1 0 0'}>
|
||||
<MySelect
|
||||
list={selectTypeList.current}
|
||||
value={getValues('valueType')}
|
||||
onchange={(e: any) => {
|
||||
setValue('valueType', e);
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Flex>
|
||||
<Flex alignItems={'center'} mb={5}>
|
||||
<Box flex={'0 0 80px'}>{'字段key'}</Box>
|
||||
<Input
|
||||
bg={'myGray.50'}
|
||||
{...register('key', {
|
||||
required: true,
|
||||
pattern: {
|
||||
value: /^[a-zA-Z]+[0-9]*$/,
|
||||
message: '字段key必须是纯英文字母或数字,并且不能以数字开头。'
|
||||
}
|
||||
})}
|
||||
/>
|
||||
</Flex>
|
||||
<Box mb={5}>
|
||||
<Box flex={'0 0 80px'}>{t('core.module.Field Description')}</Box>
|
||||
<Textarea
|
||||
bg={'myGray.50'}
|
||||
rows={5}
|
||||
{...register('toolDescription', {
|
||||
required: true
|
||||
})}
|
||||
/>
|
||||
</Box>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant={'whiteBase'} mr={2} onClick={onClose}>
|
||||
{t('common.Close')}
|
||||
</Button>
|
||||
<Button onClick={handleSubmit((data) => onclickSubmit(data), onclickSubmitError)}>
|
||||
{t('common.Confirm')}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</MyModal>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(EditFieldModal);
|
||||
@@ -0,0 +1,11 @@
|
||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';
|
||||
import { FlowNodeInputTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
|
||||
export const defaultEditFormData: FlowNodeInputItemType = {
|
||||
valueType: 'string',
|
||||
type: FlowNodeInputTypeEnum.hidden,
|
||||
key: '',
|
||||
label: '',
|
||||
toolDescription: '',
|
||||
required: true
|
||||
};
|
||||
@@ -0,0 +1,120 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import type {
|
||||
FlowNodeInputItemType,
|
||||
FlowNodeOutputItemType
|
||||
} from '@fastgpt/global/core/module/node/type';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Table,
|
||||
Thead,
|
||||
Tbody,
|
||||
Tr,
|
||||
Th,
|
||||
Td,
|
||||
TableContainer,
|
||||
Flex
|
||||
} from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { defaultEditFormData } from './constants';
|
||||
import { onChangeNode } from '../../../FlowProvider';
|
||||
const EditFieldModal = dynamic(() => import('./EditFieldModal'));
|
||||
|
||||
const RenderToolInput = ({
|
||||
moduleId,
|
||||
inputs,
|
||||
canEdit = false
|
||||
}: {
|
||||
moduleId: string;
|
||||
inputs: FlowNodeInputItemType[];
|
||||
canEdit?: boolean;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [editField, setEditField] = React.useState<FlowNodeInputItemType>();
|
||||
|
||||
return (
|
||||
<>
|
||||
{canEdit && (
|
||||
<Flex mb={2} alignItems={'center'}>
|
||||
<Box flex={'1 0 0'}>{t('common.Field')}</Box>
|
||||
<Button
|
||||
variant={'unstyled'}
|
||||
leftIcon={<MyIcon name={'common/addLight'} w={'14px'} />}
|
||||
size={'sm'}
|
||||
px={3}
|
||||
_hover={{
|
||||
bg: 'myGray.150'
|
||||
}}
|
||||
onClick={() => setEditField(defaultEditFormData)}
|
||||
>
|
||||
{t('core.module.extract.Add field')}
|
||||
</Button>
|
||||
</Flex>
|
||||
)}
|
||||
<Box borderRadius={'md'} overflow={'hidden'} borderWidth={'1px'} borderBottom="none">
|
||||
<TableContainer>
|
||||
<Table bg={'white'}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th bg={'myGray.50'}>字段名</Th>
|
||||
<Th bg={'myGray.50'}>字段描述</Th>
|
||||
<Th bg={'myGray.50'}>必须</Th>
|
||||
{canEdit && <Th bg={'myGray.50'}></Th>}
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{inputs.map((item, index) => (
|
||||
<Tr
|
||||
key={index}
|
||||
position={'relative'}
|
||||
whiteSpace={'pre-wrap'}
|
||||
wordBreak={'break-all'}
|
||||
>
|
||||
<Td>{item.key}</Td>
|
||||
<Td>{item.toolDescription}</Td>
|
||||
<Td>{item.required ? '✔' : ''}</Td>
|
||||
{canEdit && (
|
||||
<Td whiteSpace={'nowrap'}>
|
||||
<MyIcon
|
||||
mr={3}
|
||||
name={'common/settingLight'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => setEditField(item)}
|
||||
/>
|
||||
<MyIcon
|
||||
name={'delete'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'delInput',
|
||||
key: item.key,
|
||||
value: ''
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Td>
|
||||
)}
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Box>
|
||||
|
||||
{!!editField && (
|
||||
<EditFieldModal
|
||||
defaultValue={editField}
|
||||
moduleId={moduleId}
|
||||
onClose={() => setEditField(undefined)}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default React.memo(RenderToolInput);
|
||||
5
projects/app/src/components/core/module/Flow/components/render/RenderToolInput/type.d.ts
vendored
Normal file
5
projects/app/src/components/core/module/Flow/components/render/RenderToolInput/type.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export type EditFieldModalProps = {
|
||||
defaultValue?: EditFieldFormProps;
|
||||
moduleId: string;
|
||||
onClose: () => void;
|
||||
};
|
||||
@@ -28,7 +28,7 @@ const SourceHandle = ({ handleKey, valueType, ...props }: Props) => {
|
||||
<Box
|
||||
position={'absolute'}
|
||||
top={'50%'}
|
||||
right={'-18px'}
|
||||
right={'-20px'}
|
||||
transform={'translate(50%,-50%)'}
|
||||
{...props}
|
||||
>
|
||||
@@ -42,6 +42,8 @@ const SourceHandle = ({ handleKey, valueType, ...props }: Props) => {
|
||||
style={{
|
||||
width: '14px',
|
||||
height: '14px',
|
||||
borderWidth: '3.5px',
|
||||
backgroundColor: 'white',
|
||||
...valueStyle
|
||||
}}
|
||||
type="source"
|
||||
|
||||
@@ -27,7 +27,7 @@ const TargetHandle = ({ handleKey, valueType, ...props }: Props) => {
|
||||
<Box
|
||||
position={'absolute'}
|
||||
top={'50%'}
|
||||
left={'-18px'}
|
||||
left={'-20px'}
|
||||
transform={'translate(50%,-50%)'}
|
||||
{...props}
|
||||
>
|
||||
@@ -41,6 +41,8 @@ const TargetHandle = ({ handleKey, valueType, ...props }: Props) => {
|
||||
style={{
|
||||
width: '14px',
|
||||
height: '14px',
|
||||
borderWidth: '3.5px',
|
||||
backgroundColor: 'white',
|
||||
...valueStyle
|
||||
}}
|
||||
type="target"
|
||||
|
||||
@@ -0,0 +1,103 @@
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import { FlowValueTypeMap } from '@/web/core/modules/constants/dataType';
|
||||
import { Box, BoxProps } from '@chakra-ui/react';
|
||||
import {
|
||||
ModuleIOValueTypeEnum,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { Connection, Handle, Position } from 'reactflow';
|
||||
import { useFlowProviderStore } from '../../FlowProvider';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
type ToolHandleProps = BoxProps & {
|
||||
moduleId: string;
|
||||
};
|
||||
export const ToolTargetHandle = ({ moduleId, ...props }: ToolHandleProps) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const valueTypeMap = FlowValueTypeMap[ModuleIOValueTypeEnum.tools];
|
||||
|
||||
return (
|
||||
<Box position={'absolute'} left={'50%'} transform={'translate(-17px,-10px)'} {...props}>
|
||||
<MyTooltip
|
||||
label={t('app.module.type', {
|
||||
type: t(valueTypeMap?.label),
|
||||
description: valueTypeMap?.description
|
||||
})}
|
||||
>
|
||||
<Handle
|
||||
style={{
|
||||
width: '14px',
|
||||
height: '14px',
|
||||
border: '4px solid #5E8FFF',
|
||||
borderRadius: '0',
|
||||
backgroundColor: 'transparent',
|
||||
transformOrigin: 'center',
|
||||
transform: 'rotate(45deg)'
|
||||
}}
|
||||
type="target"
|
||||
id={ModuleOutputKeyEnum.selectedTools}
|
||||
position={Position.Top}
|
||||
/>
|
||||
</MyTooltip>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
|
||||
export const ToolSourceHandle = ({ moduleId, ...props }: ToolHandleProps) => {
|
||||
const { t } = useTranslation();
|
||||
const { setEdges, nodes } = useFlowProviderStore();
|
||||
|
||||
const valueTypeMap = FlowValueTypeMap[ModuleIOValueTypeEnum.tools];
|
||||
|
||||
/* onConnect edge, delete tool input and switch */
|
||||
const onConnect = useCallback(
|
||||
(e: Connection) => {
|
||||
const node = nodes.find((node) => node.id === e.target);
|
||||
if (!node) return;
|
||||
const inputs = node.data.inputs;
|
||||
setEdges((edges) =>
|
||||
edges.filter((edge) => {
|
||||
const input = inputs.find((input) => input.key === edge.targetHandle);
|
||||
if (
|
||||
edge.target === node.id &&
|
||||
(!!input?.toolDescription || input?.key === ModuleInputKeyEnum.switch)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
);
|
||||
},
|
||||
[nodes, setEdges]
|
||||
);
|
||||
|
||||
return (
|
||||
<Box position={'absolute'} left={'50%'} transform={'translate(-16px,-14px)'} {...props}>
|
||||
<MyTooltip
|
||||
label={t('app.module.type', {
|
||||
type: t(valueTypeMap?.label),
|
||||
description: valueTypeMap?.description
|
||||
})}
|
||||
>
|
||||
<Handle
|
||||
style={{
|
||||
width: '14px',
|
||||
height: '14px',
|
||||
border: '4px solid #5E8FFF',
|
||||
borderRadius: '0',
|
||||
backgroundColor: 'transparent',
|
||||
transformOrigin: 'center',
|
||||
transform: 'rotate(45deg)'
|
||||
}}
|
||||
type="source"
|
||||
id={ModuleOutputKeyEnum.selectedTools}
|
||||
position={Position.Bottom}
|
||||
onConnect={onConnect}
|
||||
/>
|
||||
</MyTooltip>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
@@ -33,7 +33,8 @@ const nodeTypes: Record<`${FlowNodeTypeEnum}`, any> = {
|
||||
[FlowNodeTypeEnum.pluginInput]: dynamic(() => import('./components/nodes/NodePluginInput')),
|
||||
[FlowNodeTypeEnum.pluginOutput]: dynamic(() => import('./components/nodes/NodePluginOutput')),
|
||||
[FlowNodeTypeEnum.pluginModule]: NodeSimple,
|
||||
[FlowNodeTypeEnum.queryExtension]: NodeSimple
|
||||
[FlowNodeTypeEnum.queryExtension]: NodeSimple,
|
||||
[FlowNodeTypeEnum.tools]: dynamic(() => import('./components/nodes/NodeTools'))
|
||||
};
|
||||
const edgeTypes = {
|
||||
[EDGE_TYPE]: ButtonEdge
|
||||
|
||||
@@ -14,6 +14,7 @@ export const flowNode2Modules = ({
|
||||
const modules: ModuleItemType[] = nodes.map((item) => ({
|
||||
moduleId: item.data.moduleId,
|
||||
name: item.data.name,
|
||||
intro: item.data.intro,
|
||||
avatar: item.data.avatar,
|
||||
flowType: item.data.flowType,
|
||||
showStatus: item.data.showStatus,
|
||||
@@ -38,10 +39,15 @@ export const flowNode2Modules = ({
|
||||
|
||||
module.outputs.forEach((output) => {
|
||||
output.targets = edges
|
||||
.filter(
|
||||
(edge) =>
|
||||
edge.source === module.moduleId && edge.sourceHandle === output.key && edge.targetHandle
|
||||
)
|
||||
.filter((edge) => {
|
||||
if (
|
||||
edge.source === module.moduleId &&
|
||||
edge.sourceHandle === output.key &&
|
||||
edge.targetHandle
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.map((edge) => ({
|
||||
moduleId: edge.target,
|
||||
key: edge.targetHandle || ''
|
||||
|
||||
4
projects/app/src/global/core/ai/api.d.ts
vendored
4
projects/app/src/global/core/ai/api.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
import { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
|
||||
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
|
||||
import type { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat.d';
|
||||
|
||||
export type CreateQuestionGuideParams = OutLinkChatAuthProps & {
|
||||
messages: ChatMessageItemType[];
|
||||
messages: ChatCompletionMessageParam[];
|
||||
};
|
||||
|
||||
2
projects/app/src/global/core/chat/api.d.ts
vendored
2
projects/app/src/global/core/chat/api.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
import type { AppTTSConfigType } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleItemType } from '../module/type';
|
||||
import { AdminFbkType, ChatItemType, moduleDispatchResType } from '@fastgpt/global/core/chat/type';
|
||||
import { AdminFbkType, ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import type { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat.d';
|
||||
|
||||
export type GetChatSpeechProps = {
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { sseErrRes } from '@fastgpt/service/common/response';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { pushChatUsage } from '@/service/support/wallet/usage/push';
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { authApp } from '@fastgpt/service/support/permission/auth/app';
|
||||
import { dispatchModules } from '@/service/moduleDispatch';
|
||||
import { dispatchWorkFlow } from '@/service/moduleDispatch';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
|
||||
import { setEntryEntries } from '@/service/moduleDispatch/utils';
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
|
||||
export type Props = {
|
||||
history: ChatItemType[];
|
||||
prompt: string;
|
||||
prompt: ChatItemValueItemType[];
|
||||
modules: ModuleItemType[];
|
||||
variables: Record<string, any>;
|
||||
appId: string;
|
||||
@@ -33,7 +35,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
let { modules = [], history = [], prompt, variables = {}, appName, appId } = req.body as Props;
|
||||
try {
|
||||
await connectToDatabase();
|
||||
if (!history || !modules || !prompt) {
|
||||
if (!history || !modules || !prompt || prompt.length === 0) {
|
||||
throw new Error('Prams Error');
|
||||
}
|
||||
if (!Array.isArray(modules)) {
|
||||
@@ -52,19 +54,22 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
// auth balance
|
||||
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
|
||||
|
||||
const { text, files } = chatValue2RuntimePrompt(prompt);
|
||||
|
||||
/* start process */
|
||||
const { responseData, moduleDispatchBills } = await dispatchModules({
|
||||
const { flowResponses, flowUsages } = await dispatchWorkFlow({
|
||||
res,
|
||||
mode: 'test',
|
||||
teamId,
|
||||
tmbId,
|
||||
user,
|
||||
appId,
|
||||
modules,
|
||||
modules: setEntryEntries(modules),
|
||||
variables,
|
||||
inputFiles: files,
|
||||
histories: history,
|
||||
startParams: {
|
||||
userChatInput: prompt
|
||||
userChatInput: text
|
||||
},
|
||||
stream: true,
|
||||
detail: true
|
||||
@@ -72,13 +77,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
responseWrite({
|
||||
res,
|
||||
event: sseResponseEventEnum.answer,
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: '[DONE]'
|
||||
});
|
||||
responseWrite({
|
||||
res,
|
||||
event: sseResponseEventEnum.appStreamResponse,
|
||||
data: JSON.stringify(responseData)
|
||||
event: SseResponseEventEnum.flowResponses,
|
||||
data: JSON.stringify(flowResponses)
|
||||
});
|
||||
res.end();
|
||||
|
||||
@@ -88,7 +93,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
teamId,
|
||||
tmbId,
|
||||
source: UsageSourceEnum.fastgpt,
|
||||
moduleDispatchBills
|
||||
flowUsages
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500);
|
||||
|
||||
@@ -4,11 +4,11 @@ import { connectToDatabase } from '@/service/mongo';
|
||||
import { authApp } from '@fastgpt/service/support/permission/auth/app';
|
||||
import { getGuideModule } from '@fastgpt/global/core/module/utils';
|
||||
import { getChatModelNameListByModules } from '@/service/core/app/module';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import type { InitChatProps, InitChatResponse } from '@/global/core/chat/api.d';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { getChatItems } from '@fastgpt/service/core/chat/controller';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -45,7 +45,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
chatId,
|
||||
limit: 30,
|
||||
field: `dataId obj value adminFeedback userBadFeedback userGoodFeedback ${
|
||||
ModuleOutputKeyEnum.responseData
|
||||
DispatchNodeResponseKeyEnum.nodeResponse
|
||||
} ${loadCustomFeedbacks ? 'customFeedbacks' : ''}`
|
||||
});
|
||||
|
||||
|
||||
@@ -4,15 +4,16 @@ import { connectToDatabase } from '@/service/mongo';
|
||||
import type { InitChatResponse, InitOutLinkChatProps } from '@/global/core/chat/api.d';
|
||||
import { getGuideModule } from '@fastgpt/global/core/module/utils';
|
||||
import { getChatModelNameListByModules } from '@/service/core/app/module';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { getChatItems } from '@fastgpt/service/core/chat/controller';
|
||||
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
|
||||
import { authOutLink } from '@/service/support/permission/auth/outLink';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { selectSimpleChatResponse } from '@/utils/service/core/chat';
|
||||
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -44,13 +45,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
chatId,
|
||||
limit: 30,
|
||||
field: `dataId obj value userGoodFeedback userBadFeedback ${
|
||||
shareChat.responseDetail ? `adminFeedback ${ModuleOutputKeyEnum.responseData}` : ''
|
||||
shareChat.responseDetail ? `adminFeedback ${DispatchNodeResponseKeyEnum.nodeResponse}` : ''
|
||||
} `
|
||||
});
|
||||
|
||||
// pick share response field
|
||||
history.forEach((item) => {
|
||||
item.responseData = selectSimpleChatResponse({ responseData: item.responseData });
|
||||
if (item.obj === ChatRoleEnum.AI) {
|
||||
item.responseData = filterPublicNodeResponseData({ flowResponses: item.responseData });
|
||||
}
|
||||
});
|
||||
|
||||
jsonRes<InitChatResponse>(res, {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { getGuideModule } from '@fastgpt/global/core/module/utils';
|
||||
import { getChatModelNameListByModules } from '@/service/core/app/module';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import type { InitChatResponse, InitTeamChatProps } from '@/global/core/chat/api.d';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
@@ -12,7 +12,8 @@ import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
|
||||
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { selectSimpleChatResponse } from '@/utils/service/core/chat';
|
||||
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -49,12 +50,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
appId,
|
||||
chatId,
|
||||
limit: 30,
|
||||
field: `dataId obj value userGoodFeedback userBadFeedback adminFeedback ${ModuleOutputKeyEnum.responseData}`
|
||||
field: `dataId obj value userGoodFeedback userBadFeedback adminFeedback ${DispatchNodeResponseKeyEnum.nodeResponse}`
|
||||
});
|
||||
|
||||
// pick share response field
|
||||
history.forEach((item) => {
|
||||
item.responseData = selectSimpleChatResponse({ responseData: item.responseData });
|
||||
if (item.obj === ChatRoleEnum.AI) {
|
||||
item.responseData = filterPublicNodeResponseData({ flowResponses: item.responseData });
|
||||
}
|
||||
});
|
||||
|
||||
jsonRes<InitChatResponse>(res, {
|
||||
|
||||
@@ -9,6 +9,7 @@ import type { InitChatProps, InitChatResponse } from '@/global/core/chat/api.d';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { getChatItems } from '@fastgpt/service/core/chat/controller';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -45,7 +46,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
chatId,
|
||||
limit: 30,
|
||||
field: `dataId obj value adminFeedback userBadFeedback userGoodFeedback ${
|
||||
ModuleOutputKeyEnum.responseData
|
||||
DispatchNodeResponseKeyEnum.nodeResponse
|
||||
} ${loadCustomFeedbacks ? 'customFeedbacks' : ''}`
|
||||
});
|
||||
|
||||
|
||||
@@ -5,11 +5,12 @@ import { sseErrRes, jsonRes } from '@fastgpt/service/common/response';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { withNextCors } from '@fastgpt/service/common/middle/cors';
|
||||
import { ChatRoleEnum, ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { dispatchModules } from '@/service/moduleDispatch';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { dispatchWorkFlow } from '@/service/moduleDispatch';
|
||||
import type { ChatCompletionCreateParams } from '@fastgpt/global/core/ai/type.d';
|
||||
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
|
||||
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
|
||||
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import { GPTMessages2Chats, chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { getChatItems } from '@fastgpt/service/core/chat/controller';
|
||||
import { saveChat } from '@/service/utils/chat/saveChat';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
@@ -19,7 +20,7 @@ import { pushResult2Remote, addOutLinkUsage } from '@fastgpt/service/support/out
|
||||
import requestIp from 'request-ip';
|
||||
import { getUsageSourceByAuthType } from '@fastgpt/global/support/wallet/usage/tools';
|
||||
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
|
||||
import { selectSimpleChatResponse } from '@/utils/service/core/chat';
|
||||
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
|
||||
@@ -31,6 +32,9 @@ import { AuthOutLinkChatProps } from '@fastgpt/global/support/outLink/api';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import { setEntryEntries } from '@/service/moduleDispatch/utils';
|
||||
import { UserChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
|
||||
type FastGptWebChatProps = {
|
||||
chatId?: string; // undefined: nonuse history, '': new chat, 'xxxxx': use history
|
||||
@@ -40,7 +44,7 @@ type FastGptWebChatProps = {
|
||||
export type Props = ChatCompletionCreateParams &
|
||||
FastGptWebChatProps &
|
||||
OutLinkChatAuthProps & {
|
||||
messages: ChatMessageItemType[];
|
||||
messages: ChatCompletionMessageParam[];
|
||||
stream?: boolean;
|
||||
detail?: boolean;
|
||||
variables: Record<string, any>;
|
||||
@@ -102,17 +106,19 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
|
||||
let startTime = Date.now();
|
||||
|
||||
const chatMessages = gptMessage2ChatType(messages);
|
||||
const chatMessages = GPTMessages2Chats(messages);
|
||||
if (chatMessages[chatMessages.length - 1].obj !== ChatRoleEnum.Human) {
|
||||
chatMessages.pop();
|
||||
}
|
||||
|
||||
// user question
|
||||
const question = chatMessages.pop();
|
||||
const question = chatMessages.pop() as UserChatItemType;
|
||||
if (!question) {
|
||||
throw new Error('Question is empty');
|
||||
}
|
||||
|
||||
const { text, files } = chatValue2RuntimePrompt(question.value);
|
||||
|
||||
/*
|
||||
1. auth app permission
|
||||
2. auth balance
|
||||
@@ -128,7 +134,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
outLinkUid,
|
||||
chatId,
|
||||
ip: originIp,
|
||||
question: question.value
|
||||
question: text
|
||||
});
|
||||
}
|
||||
// team space chat
|
||||
@@ -161,7 +167,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
const responseChatItemId: string | undefined = messages[messages.length - 1].dataId;
|
||||
|
||||
/* start flow controller */
|
||||
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
|
||||
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
|
||||
res,
|
||||
mode: 'chat',
|
||||
user,
|
||||
@@ -170,11 +176,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
appId: String(app._id),
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
modules: app.modules,
|
||||
modules: setEntryEntries(app.modules),
|
||||
variables,
|
||||
inputFiles: files,
|
||||
histories: concatHistories,
|
||||
startParams: {
|
||||
userChatInput: question.value
|
||||
userChatInput: text
|
||||
},
|
||||
stream,
|
||||
detail
|
||||
@@ -209,8 +216,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
{
|
||||
dataId: responseChatItemId,
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answerText,
|
||||
responseData
|
||||
value: assistantResponses,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: flowResponses
|
||||
}
|
||||
],
|
||||
metadata: {
|
||||
@@ -222,12 +229,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
addLog.info(`completions running time: ${(Date.now() - startTime) / 1000}s`);
|
||||
|
||||
/* select fe response field */
|
||||
const feResponseData = canWrite ? responseData : selectSimpleChatResponse({ responseData });
|
||||
const feResponseData = canWrite
|
||||
? flowResponses
|
||||
: filterPublicNodeResponseData({ flowResponses });
|
||||
|
||||
if (stream) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: null,
|
||||
finish_reason: 'stop'
|
||||
@@ -235,20 +244,26 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
});
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: '[DONE]'
|
||||
});
|
||||
|
||||
if (responseDetail && detail) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: sseResponseEventEnum.appStreamResponse,
|
||||
event: SseResponseEventEnum.flowResponses,
|
||||
data: JSON.stringify(feResponseData)
|
||||
});
|
||||
}
|
||||
|
||||
res.end();
|
||||
} else {
|
||||
const responseContent = (() => {
|
||||
if (assistantResponses.length === 0) return '';
|
||||
if (assistantResponses.length === 1 && assistantResponses[0].text?.content)
|
||||
return assistantResponses[0].text?.content;
|
||||
return assistantResponses;
|
||||
})();
|
||||
res.json({
|
||||
...(detail ? { responseData: feResponseData } : {}),
|
||||
id: chatId || '',
|
||||
@@ -256,7 +271,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
|
||||
choices: [
|
||||
{
|
||||
message: { role: 'assistant', content: answerText },
|
||||
message: { role: 'assistant', content: responseContent },
|
||||
finish_reason: 'stop',
|
||||
index: 0
|
||||
}
|
||||
@@ -271,11 +286,11 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
teamId,
|
||||
tmbId: tmbId,
|
||||
source: getUsageSourceByAuthType({ shareId, authType }),
|
||||
moduleDispatchBills
|
||||
flowUsages
|
||||
});
|
||||
|
||||
if (shareId) {
|
||||
pushResult2Remote({ outLinkUid, shareId, appName: app.name, responseData });
|
||||
pushResult2Remote({ outLinkUid, shareId, appName: app.name, flowResponses });
|
||||
addOutLinkUsage({
|
||||
shareId,
|
||||
totalPoints
|
||||
|
||||
@@ -12,7 +12,7 @@ import dynamic from 'next/dynamic';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import ChatTest, { type ChatTestComponentRef } from '@/components/core/module/Flow/ChatTest';
|
||||
import { getFlowStore } from '@/components/core/module/Flow/FlowProvider';
|
||||
import { useFlowProviderStore } from '@/components/core/module/Flow/FlowProvider';
|
||||
import { flowNode2Modules, filterExportModules } from '@/components/core/module/utils';
|
||||
import { useAppStore } from '@/web/core/app/store/useAppStore';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
@@ -42,17 +42,18 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
|
||||
});
|
||||
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
|
||||
const { updateAppDetail } = useAppStore();
|
||||
const { nodes, edges, splitToolInputs } = useFlowProviderStore();
|
||||
|
||||
const flow2ModulesAndCheck = useCallback(async () => {
|
||||
const { nodes, edges } = await getFlowStore();
|
||||
|
||||
const modules = flowNode2Modules({ nodes, edges });
|
||||
// check required connect
|
||||
for (let i = 0; i < modules.length; i++) {
|
||||
const item = modules[i];
|
||||
|
||||
const { isTool } = splitToolInputs(item.inputs, item.moduleId);
|
||||
|
||||
const unconnected = item.inputs.find((input) => {
|
||||
if (!input.required || input.connected) {
|
||||
if (!input.required || input.connected || (isTool && input.toolDescription)) {
|
||||
return false;
|
||||
}
|
||||
if (input.value === undefined || input.value === '' || input.value?.length === 0) {
|
||||
@@ -72,7 +73,7 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
|
||||
}
|
||||
}
|
||||
return modules;
|
||||
}, [t, toast]);
|
||||
}, [edges, nodes, splitToolInputs, t, toast]);
|
||||
|
||||
const { mutate: onclickSave, isLoading } = useRequest({
|
||||
mutationFn: async (modules: ModuleItemType[]) => {
|
||||
@@ -97,6 +98,7 @@ const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
|
||||
borderBottom={theme.borders.base}
|
||||
alignItems={'center'}
|
||||
userSelect={'none'}
|
||||
bg={'myGray.25'}
|
||||
>
|
||||
<IconButton
|
||||
size={'smSquare'}
|
||||
|
||||
@@ -4,7 +4,7 @@ import Header from './Header';
|
||||
import Flow from '@/components/core/module/Flow';
|
||||
import FlowProvider, { useFlowProviderStore } from '@/components/core/module/Flow/FlowProvider';
|
||||
import type { FlowModuleTemplateType } from '@fastgpt/global/core/module/type.d';
|
||||
import { appSystemModuleTemplates } from '@/web/core/modules/template/system';
|
||||
import { appSystemModuleTemplates } from '@fastgpt/global/core/module/template/constants';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { usePluginStore } from '@/web/core/plugin/store/plugin';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
@@ -21,7 +21,8 @@ import { ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import { HUMAN_ICON } from '@fastgpt/global/common/system/constants';
|
||||
import { AppLogsListItemType } from '@/types/app';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import ChatBox, { type ComponentRef } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef } from '@/components/ChatBox/type.d';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { getInitChatInfo } from '@/web/core/chat/api';
|
||||
import Tag from '@/components/Tag';
|
||||
@@ -30,6 +31,7 @@ import { addDays } from 'date-fns';
|
||||
import MyBox from '@/components/common/MyBox';
|
||||
import { usePagination } from '@fastgpt/web/hooks/usePagination';
|
||||
import DateRangePicker, { DateRangeType } from '@fastgpt/web/components/common/DateRangePicker';
|
||||
import { formatChatValue2InputType } from '@/components/ChatBox/utils';
|
||||
|
||||
const Logs = ({ appId }: { appId: string }) => {
|
||||
const { t } = useTranslation();
|
||||
@@ -248,7 +250,8 @@ const DetailLogsModal = ({
|
||||
const history = useMemo(() => (chat?.history ? chat.history : []), [chat]);
|
||||
|
||||
const title = useMemo(() => {
|
||||
return history[history.length - 2]?.value?.slice(0, 8);
|
||||
const { text } = formatChatValue2InputType(history[history.length - 2]?.value);
|
||||
return text?.slice(0, 8);
|
||||
}, [history]);
|
||||
|
||||
return (
|
||||
|
||||
@@ -128,9 +128,7 @@ const Share = ({ appId }: { appId: string }) => {
|
||||
<Th>{item?.limit?.hookUrl ? '✔' : '✖'}</Th>
|
||||
</>
|
||||
)}
|
||||
<Td>
|
||||
{item.lastTime ? t(formatTimeToChatTime(item.lastTime)) : t('common.Un used')}
|
||||
</Td>
|
||||
<Td>{item.lastTime ? formatTimeToChatTime(item.lastTime) : t('common.Un used')}</Td>
|
||||
<Td display={'flex'} alignItems={'center'}>
|
||||
<MyMenu
|
||||
Button={
|
||||
|
||||
@@ -3,7 +3,8 @@ import { useUserStore } from '@/web/support/user/useUserStore';
|
||||
import { Box, Flex, IconButton } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
|
||||
import { ModuleItemType } from '@fastgpt/global/core/module/type';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
|
||||
@@ -2,13 +2,13 @@ import React, { useMemo } from 'react';
|
||||
import { Flex, useTheme, Box } from '@chakra-ui/react';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import Tag from '@/components/Tag';
|
||||
import Avatar from '@/components/Avatar';
|
||||
import ToolMenu from './ToolMenu';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { useRouter } from 'next/router';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
|
||||
import FillTag from '@fastgpt/web/components/common/Tag/Fill';
|
||||
|
||||
const ChatHeader = ({
|
||||
history,
|
||||
@@ -33,10 +33,8 @@ const ChatHeader = ({
|
||||
const { isPc } = useSystemStore();
|
||||
const title = useMemo(
|
||||
() =>
|
||||
chatContentReplaceBlock(history[history.length - 2]?.value)?.slice(0, 8) ||
|
||||
appName ||
|
||||
t('core.chat.New Chat'),
|
||||
[appName, history]
|
||||
getChatTitleFromChatMessage(history[history.length - 2], appName || t('core.chat.New Chat')),
|
||||
[appName, history, t]
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -52,19 +50,19 @@ const ChatHeader = ({
|
||||
<Box mr={3} color={'myGray.1000'}>
|
||||
{title}
|
||||
</Box>
|
||||
<Tag>
|
||||
<FillTag>
|
||||
<MyIcon name={'history'} w={'14px'} />
|
||||
<Box ml={1}>
|
||||
{history.length === 0
|
||||
? t('core.chat.New Chat')
|
||||
: t('core.chat.History Amount', { amount: history.length })}
|
||||
</Box>
|
||||
</Tag>
|
||||
</FillTag>
|
||||
{!!chatModels && chatModels.length > 0 && (
|
||||
<Tag ml={2} colorSchema={'green'}>
|
||||
<FillTag ml={2} colorSchema={'green'}>
|
||||
<MyIcon name={'core/chat/chatModelTag'} w={'14px'} />
|
||||
<Box ml={1}>{chatModels.join(',')}</Box>
|
||||
</Tag>
|
||||
</FillTag>
|
||||
)}
|
||||
<Box flex={1} />
|
||||
</>
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { useChatBox } from '@/components/ChatBox';
|
||||
import { useChatBox } from '@/components/ChatBox/hooks/useChatBox';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { Menu, MenuButton, MenuList, MenuItem, Box } from '@chakra-ui/react';
|
||||
import { Menu, MenuButton, MenuList, MenuItem, Box, IconButton } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useRouter } from 'next/router';
|
||||
import MyMenu from '@/components/MyMenu';
|
||||
|
||||
const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
|
||||
const { t } = useTranslation();
|
||||
@@ -41,30 +42,21 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
|
||||
onClick: () => onExportChat({ type: 'pdf', history })
|
||||
}
|
||||
],
|
||||
[history, onExportChat, router]
|
||||
[history, onExportChat, router, t]
|
||||
);
|
||||
|
||||
return history.length > 0 ? (
|
||||
<Menu autoSelect={false} isLazy>
|
||||
<MenuButton
|
||||
_hover={{ bg: 'myWhite.600 ' }}
|
||||
cursor={'pointer'}
|
||||
borderRadius={'md'}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
<MyIcon name={'more'} w={'14px'} p={2} />
|
||||
</MenuButton>
|
||||
<MenuList color={'myGray.700'} minW={`120px !important`} zIndex={10}>
|
||||
{menuList.map((item) => (
|
||||
<MenuItem key={item.label} onClick={item.onClick} py={[2, 3]}>
|
||||
<MyIcon name={item.icon as any} w={['14px', '16px']} />
|
||||
<Box ml={[1, 2]}>{item.label}</Box>
|
||||
</MenuItem>
|
||||
))}
|
||||
</MenuList>
|
||||
</Menu>
|
||||
<MyMenu
|
||||
Button={
|
||||
<IconButton
|
||||
icon={<MyIcon name={'more'} w={'14px'} p={2} />}
|
||||
aria-label={''}
|
||||
size={'sm'}
|
||||
variant={'whitePrimary'}
|
||||
/>
|
||||
}
|
||||
menuList={menuList}
|
||||
/>
|
||||
) : (
|
||||
<Box w={'28px'} h={'28px'} />
|
||||
);
|
||||
|
||||
@@ -22,7 +22,8 @@ const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||
import type { ChatHistoryItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
|
||||
import PageContainer from '@/components/PageContainer';
|
||||
import SideBar from '@/components/SideBar';
|
||||
import ChatHistorySlider from './components/ChatHistorySlider';
|
||||
@@ -33,8 +34,9 @@ import { useUserStore } from '@/web/support/user/useUserStore';
|
||||
import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||
import { useAppStore } from '@/web/core/app/store/useAppStore';
|
||||
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
|
||||
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
|
||||
|
||||
const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
|
||||
const router = useRouter();
|
||||
@@ -83,10 +85,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
|
||||
abortCtrl: controller
|
||||
});
|
||||
|
||||
const newTitle =
|
||||
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
|
||||
prompts[1]?.value?.slice(0, 20) ||
|
||||
t('core.chat.New Chat');
|
||||
const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(prompts)[0]);
|
||||
|
||||
// new chat
|
||||
if (completionChatId !== chatId) {
|
||||
@@ -126,7 +125,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current };
|
||||
},
|
||||
[appId, chatId, histories, pushHistory, router, setChatData, t, updateHistory]
|
||||
[appId, chatId, histories, pushHistory, router, setChatData, updateHistory]
|
||||
);
|
||||
|
||||
useQuery(['loadModels'], () => loadMyApps(false));
|
||||
|
||||
@@ -8,13 +8,14 @@ import { useQuery } from '@tanstack/react-query';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
import { useShareChatStore } from '@/web/core/chat/storeShareChat';
|
||||
import SideBar from '@/components/SideBar';
|
||||
import { gptMessage2ChatType } from '@/utils/adapt';
|
||||
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import type { ChatHistoryItemType, ChatSiteItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||
|
||||
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
|
||||
import PageContainer from '@/components/PageContainer';
|
||||
import ChatHeader from './components/ChatHeader';
|
||||
import ChatHistorySlider from './components/ChatHistorySlider';
|
||||
@@ -22,9 +23,9 @@ import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { getInitOutLinkChatInfo } from '@/web/core/chat/api';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
|
||||
import { useChatStore } from '@/web/core/chat/storeChat';
|
||||
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import MyBox from '@/components/common/MyBox';
|
||||
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
|
||||
import { OutLinkWithAppType } from '@fastgpt/global/support/outLink/type';
|
||||
@@ -96,10 +97,7 @@ const OutLink = ({
|
||||
abortCtrl: controller
|
||||
});
|
||||
|
||||
const newTitle =
|
||||
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
|
||||
prompts[1]?.value?.slice(0, 20) ||
|
||||
t('core.chat.New Chat');
|
||||
const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(prompts)[0]);
|
||||
|
||||
// new chat
|
||||
if (completionChatId !== chatId) {
|
||||
@@ -141,19 +139,17 @@ const OutLink = ({
|
||||
}));
|
||||
|
||||
/* post message to report result */
|
||||
const result: ChatSiteItemType[] = gptMessage2ChatType(prompts).map((item) => ({
|
||||
const result: ChatSiteItemType[] = GPTMessages2Chats(prompts).map((item) => ({
|
||||
...item,
|
||||
status: 'finish'
|
||||
}));
|
||||
result[1].value = responseText;
|
||||
result[1].responseData = responseData;
|
||||
|
||||
window.top?.postMessage(
|
||||
{
|
||||
type: 'shareChatFinish',
|
||||
data: {
|
||||
question: result[0]?.value,
|
||||
answer: result[1]?.value
|
||||
answer: responseText
|
||||
}
|
||||
},
|
||||
'*'
|
||||
@@ -166,7 +162,6 @@ const OutLink = ({
|
||||
customVariables,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
t,
|
||||
setChatData,
|
||||
appId,
|
||||
pushHistory,
|
||||
|
||||
@@ -25,14 +25,16 @@ import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
|
||||
import { useChatStore } from '@/web/core/chat/storeChat';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
|
||||
import ChatBox from '@/components/ChatBox';
|
||||
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
import type { ChatHistoryItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
|
||||
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import MyBox from '@/components/common/MyBox';
|
||||
import SliderApps from './components/SliderApps';
|
||||
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
|
||||
|
||||
const OutLink = ({
|
||||
teamId,
|
||||
@@ -85,10 +87,8 @@ const OutLink = ({
|
||||
onMessage: generatingMessage,
|
||||
abortCtrl: controller
|
||||
});
|
||||
const newTitle =
|
||||
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
|
||||
prompts[1]?.value?.slice(0, 20) ||
|
||||
t('core.chat.New Chat');
|
||||
|
||||
const newTitle = getChatTitleFromChatMessage(GPTMessages2Chats(prompts)[0]);
|
||||
|
||||
// new chat
|
||||
if (completionChatId !== chatId) {
|
||||
@@ -130,18 +130,7 @@ const OutLink = ({
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current };
|
||||
},
|
||||
[
|
||||
appId,
|
||||
teamToken,
|
||||
chatId,
|
||||
histories,
|
||||
pushHistory,
|
||||
router,
|
||||
setChatData,
|
||||
teamId,
|
||||
t,
|
||||
updateHistory
|
||||
]
|
||||
[appId, teamToken, chatId, histories, pushHistory, router, setChatData, teamId, updateHistory]
|
||||
);
|
||||
|
||||
/* replace router query to last chat */
|
||||
|
||||
@@ -65,7 +65,7 @@ function DataProcess({
|
||||
|
||||
return list.filter(([key, value]) => {
|
||||
if (feConfigs?.isPlus) return true;
|
||||
return value.isPlus;
|
||||
return value.openSource;
|
||||
});
|
||||
}, [feConfigs?.isPlus]);
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import { useRouter } from 'next/router';
|
||||
import { Dispatch, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 8);
|
||||
|
||||
interface Props {
|
||||
|
||||
@@ -4,7 +4,7 @@ import Header from './Header';
|
||||
import Flow from '@/components/core/module/Flow';
|
||||
import FlowProvider, { useFlowProviderStore } from '@/components/core/module/Flow/FlowProvider';
|
||||
import { FlowModuleTemplateType } from '@fastgpt/global/core/module/type.d';
|
||||
import { pluginSystemModuleTemplates } from '@/web/core/modules/template/system';
|
||||
import { pluginSystemModuleTemplates } from '@fastgpt/global/core/module/template/constants';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
|
||||
export const initRunningModuleType: Record<string, boolean> = {
|
||||
[FlowNodeTypeEnum.historyNode]: true,
|
||||
[FlowNodeTypeEnum.questionInput]: true,
|
||||
[FlowNodeTypeEnum.pluginInput]: true
|
||||
};
|
||||
@@ -2,7 +2,7 @@ import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/sch
|
||||
import { pushQAUsage } from '@/service/support/wallet/usage/push';
|
||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
|
||||
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
@@ -101,7 +101,7 @@ export async function generateQA(): Promise<any> {
|
||||
${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
|
||||
|
||||
// request LLM to get QA
|
||||
const messages: ChatMessageItemType[] = [
|
||||
const messages: ChatCompletionMessageParam[] = [
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
|
||||
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { filterGPTMessageByMaxTokens } from '@fastgpt/service/core/chat/utils';
|
||||
import {
|
||||
countGptMessagesTokens,
|
||||
countMessagesTokens
|
||||
} from '@fastgpt/global/common/string/tiktoken';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type {
|
||||
ClassifyQuestionAgentItemType,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||
@@ -16,6 +17,13 @@ import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
|
||||
import { getHistories } from '../utils';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import {
|
||||
ChatCompletionCreateParams,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionTool
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
@@ -24,9 +32,10 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
|
||||
}>;
|
||||
type CQResponse = ModuleDispatchResponse<{
|
||||
type CQResponse = DispatchNodeResultType<{
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type ActionProps = Props & { cqModel: LLMModelItemType };
|
||||
|
||||
const agentFunName = 'classify_question';
|
||||
|
||||
@@ -55,6 +64,13 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
cqModel
|
||||
});
|
||||
}
|
||||
if (cqModel.functionCall) {
|
||||
return functionCall({
|
||||
...props,
|
||||
histories: chatHistories,
|
||||
cqModel
|
||||
});
|
||||
}
|
||||
return completions({
|
||||
...props,
|
||||
histories: chatHistories,
|
||||
@@ -72,7 +88,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
|
||||
return {
|
||||
[result.key]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
query: userChatInput,
|
||||
@@ -81,7 +97,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
cqResult: result.value,
|
||||
contextTotalLen: chatHistories.length + 2
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
@@ -92,37 +108,43 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
};
|
||||
};
|
||||
|
||||
async function toolChoice({
|
||||
user,
|
||||
const getFunctionCallSchema = ({
|
||||
cqModel,
|
||||
histories,
|
||||
params: { agents, systemPrompt, userChatInput }
|
||||
}: Props & { cqModel: LLMModelItemType }) {
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
...histories,
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: systemPrompt
|
||||
? `<背景知识>
|
||||
${systemPrompt}
|
||||
</背景知识>
|
||||
|
||||
问题: "${userChatInput}"
|
||||
`
|
||||
: userChatInput
|
||||
value: [
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: systemPrompt
|
||||
? `<背景知识>
|
||||
${systemPrompt}
|
||||
</背景知识>
|
||||
|
||||
问题: "${userChatInput}"
|
||||
`
|
||||
: userChatInput
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
|
||||
const filterMessages = filterGPTMessageByMaxTokens({
|
||||
messages: adaptMessages,
|
||||
maxTokens: cqModel.maxContext
|
||||
});
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
|
||||
// function body
|
||||
const agentFunction = {
|
||||
name: agentFunName,
|
||||
description: '根据对话记录及背景知识,对问题进行分类,并返回对应的类型字段',
|
||||
description: '结合对话记录及背景知识,对问题进行分类,并返回对应的类型字段',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -137,7 +159,19 @@ ${systemPrompt}
|
||||
required: ['type']
|
||||
}
|
||||
};
|
||||
const tools: any = [
|
||||
|
||||
return {
|
||||
agentFunction,
|
||||
filterMessages
|
||||
};
|
||||
};
|
||||
|
||||
const toolChoice = async (props: ActionProps) => {
|
||||
const { user, cqModel } = props;
|
||||
|
||||
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
|
||||
// function body
|
||||
const tools: ChatCompletionTool[] = [
|
||||
{
|
||||
type: 'function',
|
||||
function: agentFunction
|
||||
@@ -152,7 +186,7 @@ ${systemPrompt}
|
||||
const response = await ai.chat.completions.create({
|
||||
model: cqModel.model,
|
||||
temperature: 0,
|
||||
messages: adaptMessages,
|
||||
messages: filterMessages,
|
||||
tools,
|
||||
tool_choice: { type: 'function', function: { name: agentFunName } }
|
||||
});
|
||||
@@ -161,13 +195,19 @@ ${systemPrompt}
|
||||
const arg = JSON.parse(
|
||||
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
|
||||
);
|
||||
const completeMessages: ChatCompletionMessageParam[] = [
|
||||
...filterMessages,
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
tool_calls: response.choices?.[0]?.message?.tool_calls
|
||||
}
|
||||
];
|
||||
|
||||
return {
|
||||
arg,
|
||||
tokens: countMessagesTokens(messages, tools)
|
||||
tokens: countGptMessagesTokens(completeMessages, tools)
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(agentFunction.parameters);
|
||||
console.log(response.choices?.[0]?.message);
|
||||
|
||||
console.log('Your model may not support toll_call', error);
|
||||
@@ -177,25 +217,79 @@ ${systemPrompt}
|
||||
tokens: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function completions({
|
||||
const functionCall = async (props: ActionProps) => {
|
||||
const { user, cqModel } = props;
|
||||
|
||||
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
|
||||
const functions: ChatCompletionCreateParams.Function[] = [agentFunction];
|
||||
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const response = await ai.chat.completions.create({
|
||||
model: cqModel.model,
|
||||
temperature: 0,
|
||||
messages: filterMessages,
|
||||
function_call: {
|
||||
name: agentFunName
|
||||
},
|
||||
functions
|
||||
});
|
||||
|
||||
try {
|
||||
const arg = JSON.parse(response?.choices?.[0]?.message?.function_call?.arguments || '');
|
||||
const completeMessages: ChatCompletionMessageParam[] = [
|
||||
...filterMessages,
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
function_call: response.choices?.[0]?.message?.function_call
|
||||
}
|
||||
];
|
||||
|
||||
return {
|
||||
arg,
|
||||
tokens: countGptMessagesTokens(completeMessages, undefined, functions)
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(response.choices?.[0]?.message);
|
||||
|
||||
console.log('Your model may not support toll_call', error);
|
||||
|
||||
return {
|
||||
arg: {},
|
||||
tokens: 0
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const completions = async ({
|
||||
cqModel,
|
||||
user,
|
||||
histories,
|
||||
params: { agents, systemPrompt = '', userChatInput }
|
||||
}: Props & { cqModel: LLMModelItemType }) {
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
|
||||
systemPrompt: systemPrompt || 'null',
|
||||
typeList: agents
|
||||
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
|
||||
.join('\n'),
|
||||
history: histories.map((item) => `${item.obj}:${item.value}`).join('\n'),
|
||||
question: userChatInput
|
||||
})
|
||||
value: [
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
|
||||
systemPrompt: systemPrompt || 'null',
|
||||
typeList: agents
|
||||
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
|
||||
.join('\n'),
|
||||
history: histories.map((item) => `${item.obj}:${item.value}`).join('\n'),
|
||||
question: userChatInput
|
||||
})
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
@@ -207,7 +301,7 @@ async function completions({
|
||||
const data = await ai.chat.completions.create({
|
||||
model: cqModel.model,
|
||||
temperature: 0.01,
|
||||
messages: adaptChat2GptMessages({ messages, reserveId: false }),
|
||||
messages: chats2GPTMessages({ messages, reserveId: false }),
|
||||
stream: false
|
||||
});
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
@@ -219,4 +313,4 @@ async function completions({
|
||||
tokens: countMessagesTokens(messages),
|
||||
arg: { type: id }
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { filterGPTMessageByMaxTokens } from '@fastgpt/service/core/chat/utils';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
countGptMessagesTokens,
|
||||
countMessagesTokens
|
||||
} from '@fastgpt/global/common/string/tiktoken';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type {
|
||||
ContextExtractAgentItemType,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type';
|
||||
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
@@ -17,6 +18,13 @@ import { getHistories } from '../utils';
|
||||
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import json5 from 'json5';
|
||||
import {
|
||||
ChatCompletionCreateParams,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionTool
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[];
|
||||
@@ -25,12 +33,14 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.description]: string;
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
type Response = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.success]?: boolean;
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[ModuleOutputKeyEnum.contextExtractFields]: string;
|
||||
}>;
|
||||
|
||||
type ActionProps = Props & { extractModel: LLMModelItemType };
|
||||
|
||||
const agentFunName = 'extract_json_data';
|
||||
|
||||
export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
@@ -56,6 +66,13 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
extractModel
|
||||
});
|
||||
}
|
||||
if (extractModel.functionCall) {
|
||||
return functionCall({
|
||||
...props,
|
||||
histories: chatHistories,
|
||||
extractModel
|
||||
});
|
||||
}
|
||||
return completions({
|
||||
...props,
|
||||
histories: chatHistories,
|
||||
@@ -105,7 +122,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[ModuleOutputKeyEnum.failed]: success ? undefined : true,
|
||||
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
|
||||
...arg,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
query: content,
|
||||
@@ -114,7 +131,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
extractResult: arg,
|
||||
contextTotalLen: chatHistories.length + 2
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
@@ -125,30 +142,36 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
};
|
||||
}
|
||||
|
||||
async function toolChoice({
|
||||
const getFunctionCallSchema = ({
|
||||
extractModel,
|
||||
user,
|
||||
histories,
|
||||
params: { content, extractKeys, description }
|
||||
}: Props & { extractModel: LLMModelItemType }) {
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
...histories,
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: `你的任务是根据上下文获取适当的 JSON 字符串。要求:
|
||||
"""
|
||||
- 字符串不要换行。
|
||||
- 结合上下文和当前问题进行获取。
|
||||
"""
|
||||
|
||||
当前问题: "${content}"`
|
||||
value: [
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: `你的任务是根据上下文获取适当的 JSON 字符串。要求:
|
||||
"""
|
||||
- 字符串不要换行。
|
||||
- 结合上下文和当前问题进行获取。
|
||||
"""
|
||||
|
||||
当前问题: "${content}"`
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
|
||||
const filterMessages = filterGPTMessageByMaxTokens({
|
||||
messages: adaptMessages,
|
||||
maxTokens: extractModel.maxContext
|
||||
});
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
|
||||
const properties: Record<
|
||||
string,
|
||||
@@ -164,7 +187,6 @@ async function toolChoice({
|
||||
...(item.enum ? { enum: item.enum.split('\n') } : {})
|
||||
};
|
||||
});
|
||||
|
||||
// function body
|
||||
const agentFunction = {
|
||||
name: agentFunName,
|
||||
@@ -174,7 +196,19 @@ async function toolChoice({
|
||||
properties
|
||||
}
|
||||
};
|
||||
const tools: any = [
|
||||
|
||||
return {
|
||||
filterMessages,
|
||||
agentFunction
|
||||
};
|
||||
};
|
||||
|
||||
const toolChoice = async (props: ActionProps) => {
|
||||
const { user, extractModel } = props;
|
||||
|
||||
const { filterMessages, agentFunction } = getFunctionCallSchema(props);
|
||||
|
||||
const tools: ChatCompletionTool[] = [
|
||||
{
|
||||
type: 'function',
|
||||
function: agentFunction
|
||||
@@ -189,7 +223,7 @@ async function toolChoice({
|
||||
const response = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
temperature: 0,
|
||||
messages: [...adaptMessages],
|
||||
messages: filterMessages,
|
||||
tools,
|
||||
tool_choice: { type: 'function', function: { name: agentFunName } }
|
||||
});
|
||||
@@ -207,35 +241,96 @@ async function toolChoice({
|
||||
}
|
||||
})();
|
||||
|
||||
const completeMessages: ChatCompletionMessageParam[] = [
|
||||
...filterMessages,
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
tool_calls: response.choices?.[0]?.message?.tool_calls
|
||||
}
|
||||
];
|
||||
|
||||
return {
|
||||
rawResponse: response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '',
|
||||
tokens: countMessagesTokens(messages, tools),
|
||||
tokens: countGptMessagesTokens(completeMessages, tools),
|
||||
arg
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
async function completions({
|
||||
const functionCall = async (props: ActionProps) => {
|
||||
const { user, extractModel } = props;
|
||||
|
||||
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
|
||||
const functions: ChatCompletionCreateParams.Function[] = [agentFunction];
|
||||
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const response = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
temperature: 0,
|
||||
messages: filterMessages,
|
||||
function_call: {
|
||||
name: agentFunName
|
||||
},
|
||||
functions
|
||||
});
|
||||
|
||||
try {
|
||||
const arg = JSON.parse(response?.choices?.[0]?.message?.function_call?.arguments || '');
|
||||
const completeMessages: ChatCompletionMessageParam[] = [
|
||||
...filterMessages,
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
function_call: response.choices?.[0]?.message?.function_call
|
||||
}
|
||||
];
|
||||
|
||||
return {
|
||||
arg,
|
||||
tokens: countGptMessagesTokens(completeMessages, undefined, functions)
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(response.choices?.[0]?.message);
|
||||
|
||||
console.log('Your model may not support toll_call', error);
|
||||
|
||||
return {
|
||||
arg: {},
|
||||
tokens: 0
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const completions = async ({
|
||||
extractModel,
|
||||
user,
|
||||
histories,
|
||||
params: { content, extractKeys, description }
|
||||
}: Props & { extractModel: LLMModelItemType }) {
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: replaceVariable(extractModel.customExtractPrompt || Prompt_ExtractJson, {
|
||||
description,
|
||||
json: extractKeys
|
||||
.map(
|
||||
(item) =>
|
||||
`{"key":"${item.key}", "description":"${item.desc}"${
|
||||
item.enum ? `, "enum":"[${item.enum.split('\n')}]"` : ''
|
||||
}}`
|
||||
)
|
||||
.join('\n'),
|
||||
text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')}
|
||||
Human: ${content}`
|
||||
})
|
||||
value: [
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: replaceVariable(extractModel.customExtractPrompt || Prompt_ExtractJson, {
|
||||
description,
|
||||
json: extractKeys
|
||||
.map(
|
||||
(item) =>
|
||||
`{"key":"${item.key}", "description":"${item.desc}"${
|
||||
item.enum ? `, "enum":"[${item.enum.split('\n')}]"` : ''
|
||||
}}`
|
||||
)
|
||||
.join('\n'),
|
||||
text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')}
|
||||
Human: ${content}`
|
||||
})
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
@@ -246,7 +341,7 @@ Human: ${content}`
|
||||
const data = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
temperature: 0.01,
|
||||
messages: adaptChat2GptMessages({ messages, reserveId: false }),
|
||||
messages: chats2GPTMessages({ messages, reserveId: false }),
|
||||
stream: false
|
||||
});
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
@@ -276,4 +371,4 @@ Human: ${content}`
|
||||
arg: {}
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -0,0 +1,359 @@
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import { filterGPTMessageByMaxTokens } from '@fastgpt/service/core/chat/utils';
|
||||
import {
|
||||
ChatCompletion,
|
||||
StreamChatType,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionCreateParams,
|
||||
ChatCompletionMessageFunctionCall,
|
||||
ChatCompletionFunctionMessageParam,
|
||||
ChatCompletionAssistantMessageParam
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { NextApiResponse } from 'next';
|
||||
import {
|
||||
responseWrite,
|
||||
responseWriteController,
|
||||
responseWriteNodeStatus
|
||||
} from '@fastgpt/service/common/response';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import { dispatchWorkFlow } from '../../index';
|
||||
import { DispatchToolModuleProps, RunToolResponse, ToolModuleItemType } from './type.d';
|
||||
import json5 from 'json5';
|
||||
import { DispatchFlowResponse } from '../../type';
|
||||
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
type ToolRunResponseType = {
|
||||
moduleRunResponse: DispatchFlowResponse;
|
||||
functionCallMsg: ChatCompletionFunctionMessageParam;
|
||||
}[];
|
||||
|
||||
export const runToolWithFunctionCall = async (
|
||||
props: DispatchToolModuleProps & {
|
||||
messages: ChatCompletionMessageParam[];
|
||||
toolModules: ToolModuleItemType[];
|
||||
toolModel: LLMModelItemType;
|
||||
},
|
||||
response?: RunToolResponse
|
||||
): Promise<RunToolResponse> => {
|
||||
const {
|
||||
toolModel,
|
||||
toolModules,
|
||||
messages,
|
||||
res,
|
||||
runtimeModules,
|
||||
detail = false,
|
||||
module,
|
||||
stream
|
||||
} = props;
|
||||
|
||||
const functions: ChatCompletionCreateParams.Function[] = toolModules.map((module) => {
|
||||
const properties: Record<
|
||||
string,
|
||||
{
|
||||
type: string;
|
||||
description: string;
|
||||
required?: boolean;
|
||||
}
|
||||
> = {};
|
||||
module.toolParams.forEach((item) => {
|
||||
properties[item.key] = {
|
||||
type: 'string',
|
||||
description: item.toolDescription || ''
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
name: module.moduleId,
|
||||
description: module.intro,
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties,
|
||||
required: module.toolParams.filter((item) => item.required).map((item) => item.key)
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const filterMessages = filterGPTMessageByMaxTokens({
|
||||
messages,
|
||||
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
|
||||
});
|
||||
|
||||
/* Run llm */
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
});
|
||||
const aiResponse = await ai.chat.completions.create(
|
||||
{
|
||||
...toolModel?.defaultConfig,
|
||||
model: toolModel.model,
|
||||
temperature: 0,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
functions,
|
||||
function_call: 'auto'
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const { answer, functionCalls } = await (async () => {
|
||||
if (stream) {
|
||||
return streamResponse({
|
||||
res,
|
||||
detail,
|
||||
toolModules,
|
||||
stream: aiResponse
|
||||
});
|
||||
} else {
|
||||
const result = aiResponse as ChatCompletion;
|
||||
const function_call = result.choices?.[0]?.message?.function_call;
|
||||
const toolModule = toolModules.find((module) => module.moduleId === function_call?.name);
|
||||
|
||||
const toolCalls = function_call
|
||||
? [
|
||||
{
|
||||
...function_call,
|
||||
id: getNanoid(),
|
||||
toolName: toolModule?.name,
|
||||
toolAvatar: toolModule?.avatar
|
||||
}
|
||||
]
|
||||
: [];
|
||||
|
||||
return {
|
||||
answer: result.choices?.[0]?.message?.content || '',
|
||||
functionCalls: toolCalls
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
// Run the selected tool.
|
||||
const toolsRunResponse = (
|
||||
await Promise.all(
|
||||
functionCalls.map(async (tool) => {
|
||||
if (!tool) return;
|
||||
|
||||
const toolModule = toolModules.find((module) => module.moduleId === tool.name);
|
||||
|
||||
if (!toolModule) return;
|
||||
|
||||
const startParams = (() => {
|
||||
try {
|
||||
return json5.parse(tool.arguments);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
})();
|
||||
|
||||
const moduleRunResponse = await dispatchWorkFlow({
|
||||
...props,
|
||||
runtimeModules: runtimeModules.map((module) => ({
|
||||
...module,
|
||||
isEntry: module.moduleId === toolModule.moduleId
|
||||
})),
|
||||
startParams
|
||||
});
|
||||
|
||||
const functionCallMsg: ChatCompletionFunctionMessageParam = {
|
||||
role: ChatCompletionRequestMessageRoleEnum.Function,
|
||||
name: tool.name,
|
||||
content: JSON.stringify(moduleRunResponse.toolResponses, null, 2)
|
||||
};
|
||||
|
||||
if (stream && detail) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: SseResponseEventEnum.toolResponse,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: tool.id,
|
||||
toolName: '',
|
||||
toolAvatar: '',
|
||||
params: '',
|
||||
response: JSON.stringify(moduleRunResponse.toolResponses, null, 2)
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
moduleRunResponse,
|
||||
functionCallMsg
|
||||
};
|
||||
})
|
||||
)
|
||||
).filter(Boolean) as ToolRunResponseType;
|
||||
|
||||
const flatToolsResponseData = toolsRunResponse.map((item) => item.moduleRunResponse).flat();
|
||||
|
||||
const functionCall = functionCalls[0];
|
||||
if (functionCall && !res.closed) {
|
||||
// Run the tool, combine its results, and perform another round of AI calls
|
||||
const assistantToolMsgParams: ChatCompletionAssistantMessageParam = {
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
function_call: functionCall
|
||||
};
|
||||
const concatToolMessages = [
|
||||
...filterMessages,
|
||||
assistantToolMsgParams
|
||||
] as ChatCompletionMessageParam[];
|
||||
|
||||
const tokens = countGptMessagesTokens(concatToolMessages, undefined, functions);
|
||||
|
||||
// console.log(tokens, 'tool');
|
||||
|
||||
if (stream && detail) {
|
||||
responseWriteNodeStatus({
|
||||
res,
|
||||
name: module.name
|
||||
});
|
||||
}
|
||||
|
||||
return runToolWithFunctionCall(
|
||||
{
|
||||
...props,
|
||||
messages: [...concatToolMessages, ...toolsRunResponse.map((item) => item?.functionCallMsg)]
|
||||
},
|
||||
{
|
||||
dispatchFlowResponse: response
|
||||
? response.dispatchFlowResponse.concat(flatToolsResponseData)
|
||||
: flatToolsResponseData,
|
||||
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// No tool is invoked, indicating that the process is over
|
||||
const completeMessages = filterMessages.concat({
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
content: answer
|
||||
});
|
||||
|
||||
const tokens = countGptMessagesTokens(completeMessages, undefined, functions);
|
||||
|
||||
// console.log(tokens, 'response token');
|
||||
|
||||
return {
|
||||
dispatchFlowResponse: response?.dispatchFlowResponse || [],
|
||||
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
|
||||
completeMessages
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
async function streamResponse({
|
||||
res,
|
||||
detail,
|
||||
toolModules,
|
||||
stream
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
detail: boolean;
|
||||
toolModules: ToolModuleItemType[];
|
||||
stream: StreamChatType;
|
||||
}) {
|
||||
const write = responseWriteController({
|
||||
res,
|
||||
readStream: stream
|
||||
});
|
||||
|
||||
let textAnswer = '';
|
||||
let functionCalls: ChatCompletionMessageFunctionCall[] = [];
|
||||
let functionId = getNanoid();
|
||||
|
||||
for await (const part of stream) {
|
||||
if (res.closed) {
|
||||
stream.controller?.abort();
|
||||
break;
|
||||
}
|
||||
|
||||
const responseChoice = part.choices?.[0]?.delta;
|
||||
if (responseChoice.content) {
|
||||
const content = responseChoice?.content || '';
|
||||
textAnswer += content;
|
||||
|
||||
responseWrite({
|
||||
write,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: content
|
||||
})
|
||||
});
|
||||
} else if (responseChoice.function_call) {
|
||||
const functionCall: {
|
||||
arguments: string;
|
||||
name?: string;
|
||||
} = responseChoice.function_call;
|
||||
|
||||
// 流响应中,每次只会返回一个函数,如果带了name,说明触发某个函数
|
||||
if (functionCall?.name) {
|
||||
functionId = getNanoid();
|
||||
const toolModule = toolModules.find((module) => module.moduleId === functionCall?.name);
|
||||
|
||||
if (toolModule) {
|
||||
if (functionCall?.arguments === undefined) {
|
||||
functionCall.arguments = '';
|
||||
}
|
||||
functionCalls.push({
|
||||
...functionCall,
|
||||
id: functionId,
|
||||
name: functionCall.name,
|
||||
toolName: toolModule.name,
|
||||
toolAvatar: toolModule.avatar
|
||||
});
|
||||
|
||||
if (detail) {
|
||||
responseWrite({
|
||||
write,
|
||||
event: SseResponseEventEnum.toolCall,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: functionId,
|
||||
toolName: toolModule.name,
|
||||
toolAvatar: toolModule.avatar,
|
||||
functionName: functionCall.name,
|
||||
params: functionCall.arguments,
|
||||
response: ''
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/* arg 插入最后一个工具的参数里 */
|
||||
const arg: string = functionCall?.arguments || '';
|
||||
const currentTool = functionCalls[functionCalls.length - 1];
|
||||
if (currentTool) {
|
||||
currentTool.arguments += arg;
|
||||
|
||||
if (detail) {
|
||||
responseWrite({
|
||||
write,
|
||||
event: SseResponseEventEnum.toolParams,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: functionId,
|
||||
toolName: '',
|
||||
toolAvatar: '',
|
||||
params: arg,
|
||||
response: ''
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!textAnswer && functionCalls.length === 0) {
|
||||
return Promise.reject('LLM api response empty');
|
||||
}
|
||||
|
||||
return { answer: textAnswer, functionCalls };
|
||||
}
|
||||
147
projects/app/src/service/moduleDispatch/agent/runTool/index.ts
Normal file
147
projects/app/src/service/moduleDispatch/agent/runTool/index.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import type {
|
||||
DispatchNodeResultType,
|
||||
RunningModuleItemType
|
||||
} from '@fastgpt/global/core/module/runtime/type';
|
||||
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
|
||||
import { getHistories } from '../../utils';
|
||||
import { runToolWithToolChoice } from './toolChoice';
|
||||
import { DispatchToolModuleProps, ToolModuleItemType } from './type.d';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
GPTMessages2Chats,
|
||||
chats2GPTMessages,
|
||||
getSystemPrompt,
|
||||
runtimePrompt2ChatsValue
|
||||
} from '@fastgpt/global/core/chat/adapt';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
|
||||
import { runToolWithFunctionCall } from './functionCall';
|
||||
|
||||
type Response = DispatchNodeResultType<{}>;
|
||||
|
||||
export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<Response> => {
|
||||
const {
|
||||
module: { name, outputs },
|
||||
runtimeModules,
|
||||
histories,
|
||||
params: { model, systemPrompt, userChatInput, history = 6 }
|
||||
} = props;
|
||||
|
||||
const toolModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
/* get tool params */
|
||||
|
||||
// get tool output targets
|
||||
const toolOutput = outputs.find((output) => output.key === ModuleOutputKeyEnum.selectedTools);
|
||||
|
||||
if (!toolOutput) {
|
||||
return Promise.reject('No tool output found');
|
||||
}
|
||||
|
||||
const targets = toolOutput.targets;
|
||||
|
||||
// Gets the module to which the tool is connected
|
||||
const toolModules = targets
|
||||
.map((item) => {
|
||||
const tool = runtimeModules.find((module) => module.moduleId === item.moduleId);
|
||||
return tool;
|
||||
})
|
||||
.filter(Boolean)
|
||||
.map<ToolModuleItemType>((tool) => {
|
||||
const toolParams = tool?.inputs.filter((input) => !!input.toolDescription) || [];
|
||||
return {
|
||||
...(tool as RunningModuleItemType),
|
||||
toolParams
|
||||
};
|
||||
});
|
||||
|
||||
const messages: ChatItemType[] = [
|
||||
...getSystemPrompt(systemPrompt),
|
||||
...chatHistories,
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: runtimePrompt2ChatsValue({
|
||||
text: userChatInput,
|
||||
files: []
|
||||
})
|
||||
}
|
||||
];
|
||||
|
||||
const {
|
||||
dispatchFlowResponse,
|
||||
totalTokens,
|
||||
completeMessages = []
|
||||
} = await (async () => {
|
||||
if (toolModel.toolChoice) {
|
||||
return runToolWithToolChoice({
|
||||
...props,
|
||||
toolModules,
|
||||
toolModel,
|
||||
messages: chats2GPTMessages({ messages, reserveId: false })
|
||||
});
|
||||
}
|
||||
if (toolModel.functionCall) {
|
||||
return runToolWithFunctionCall({
|
||||
...props,
|
||||
toolModules,
|
||||
toolModel,
|
||||
messages: chats2GPTMessages({ messages, reserveId: false })
|
||||
});
|
||||
}
|
||||
return {
|
||||
dispatchFlowResponse: [],
|
||||
totalTokens: 0,
|
||||
completeMessages: []
|
||||
};
|
||||
})();
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
model,
|
||||
tokens: totalTokens,
|
||||
modelType: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
const adaptMessages = GPTMessages2Chats(completeMessages);
|
||||
//@ts-ignore
|
||||
const startIndex = adaptMessages.findLastIndex((item) => item.obj === ChatRoleEnum.Human);
|
||||
const assistantResponse = adaptMessages.slice(startIndex + 1);
|
||||
|
||||
// flat child tool response
|
||||
const childToolResponse = dispatchFlowResponse.map((item) => item.flowResponses).flat();
|
||||
|
||||
// concat tool usage
|
||||
const totalPointsUsage =
|
||||
totalPoints +
|
||||
dispatchFlowResponse.reduce((sum, item) => {
|
||||
const childrenTotal = item.flowUsages.reduce((sum, item) => sum + item.totalPoints, 0);
|
||||
return sum + childrenTotal;
|
||||
}, 0);
|
||||
const flatUsages = dispatchFlowResponse.map((item) => item.flowUsages).flat();
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]: assistantResponse
|
||||
.map((item) => item.value)
|
||||
.flat(),
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: totalPointsUsage,
|
||||
toolCallTokens: totalTokens,
|
||||
model: modelName,
|
||||
query: userChatInput,
|
||||
historyPreview: getHistoryPreview(GPTMessages2Chats(completeMessages, false)),
|
||||
toolDetail: childToolResponse
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
tokens: totalTokens
|
||||
},
|
||||
...flatUsages
|
||||
]
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,371 @@
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import { filterGPTMessageByMaxTokens } from '@fastgpt/service/core/chat/utils';
|
||||
import {
|
||||
ChatCompletion,
|
||||
ChatCompletionMessageToolCall,
|
||||
StreamChatType,
|
||||
ChatCompletionToolMessageParam,
|
||||
ChatCompletionAssistantToolParam,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionTool
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { NextApiResponse } from 'next';
|
||||
import {
|
||||
responseWrite,
|
||||
responseWriteController,
|
||||
responseWriteNodeStatus
|
||||
} from '@fastgpt/service/common/response';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import { dispatchWorkFlow } from '../../index';
|
||||
import { DispatchToolModuleProps, RunToolResponse, ToolModuleItemType } from './type.d';
|
||||
import json5 from 'json5';
|
||||
import { DispatchFlowResponse } from '../../type';
|
||||
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
|
||||
|
||||
type ToolRunResponseType = {
|
||||
moduleRunResponse: DispatchFlowResponse;
|
||||
toolMsgParams: ChatCompletionToolMessageParam;
|
||||
}[];
|
||||
|
||||
export const runToolWithToolChoice = async (
|
||||
props: DispatchToolModuleProps & {
|
||||
messages: ChatCompletionMessageParam[];
|
||||
toolModules: ToolModuleItemType[];
|
||||
toolModel: LLMModelItemType;
|
||||
},
|
||||
response?: RunToolResponse
|
||||
): Promise<RunToolResponse> => {
|
||||
const {
|
||||
toolModel,
|
||||
toolModules,
|
||||
messages,
|
||||
res,
|
||||
runtimeModules,
|
||||
detail = false,
|
||||
module,
|
||||
stream
|
||||
} = props;
|
||||
|
||||
const tools: ChatCompletionTool[] = toolModules.map((module) => {
|
||||
const properties: Record<
|
||||
string,
|
||||
{
|
||||
type: string;
|
||||
description: string;
|
||||
required?: boolean;
|
||||
}
|
||||
> = {};
|
||||
module.toolParams.forEach((item) => {
|
||||
properties[item.key] = {
|
||||
type: 'string',
|
||||
description: item.toolDescription || ''
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: module.moduleId,
|
||||
description: module.intro,
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties,
|
||||
required: module.toolParams.filter((item) => item.required).map((item) => item.key)
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const filterMessages = filterGPTMessageByMaxTokens({
|
||||
messages,
|
||||
maxTokens: toolModel.maxContext - 300 // filter token. not response maxToken
|
||||
});
|
||||
|
||||
/* Run llm */
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
});
|
||||
const aiResponse = await ai.chat.completions.create(
|
||||
{
|
||||
...toolModel?.defaultConfig,
|
||||
model: toolModel.model,
|
||||
temperature: 0,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
tools,
|
||||
tool_choice: 'auto'
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const { answer, toolCalls } = await (async () => {
|
||||
if (stream) {
|
||||
return streamResponse({
|
||||
res,
|
||||
detail,
|
||||
toolModules,
|
||||
stream: aiResponse
|
||||
});
|
||||
} else {
|
||||
const result = aiResponse as ChatCompletion;
|
||||
const calls = result.choices?.[0]?.message?.tool_calls || [];
|
||||
|
||||
// 加上name和avatar
|
||||
const toolCalls = calls.map((tool) => {
|
||||
const toolModule = toolModules.find((module) => module.moduleId === tool.function?.name);
|
||||
return {
|
||||
...tool,
|
||||
toolName: toolModule?.name || '',
|
||||
toolAvatar: toolModule?.avatar || ''
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
answer: result.choices?.[0]?.message?.content || '',
|
||||
toolCalls: toolCalls
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
// Run the selected tool.
|
||||
const toolsRunResponse = (
|
||||
await Promise.all(
|
||||
toolCalls.map(async (tool) => {
|
||||
const toolModule = toolModules.find((module) => module.moduleId === tool.function?.name);
|
||||
|
||||
if (!toolModule) return;
|
||||
|
||||
const startParams = (() => {
|
||||
try {
|
||||
return json5.parse(tool.function.arguments);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
})();
|
||||
|
||||
const moduleRunResponse = await dispatchWorkFlow({
|
||||
...props,
|
||||
runtimeModules: runtimeModules.map((module) => ({
|
||||
...module,
|
||||
isEntry: module.moduleId === toolModule.moduleId
|
||||
})),
|
||||
startParams
|
||||
});
|
||||
|
||||
const toolMsgParams: ChatCompletionToolMessageParam = {
|
||||
tool_call_id: tool.id,
|
||||
role: ChatCompletionRequestMessageRoleEnum.Tool,
|
||||
name: tool.function.name,
|
||||
content: JSON.stringify(moduleRunResponse.toolResponses, null, 2)
|
||||
};
|
||||
|
||||
if (stream && detail) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: SseResponseEventEnum.toolResponse,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: tool.id,
|
||||
toolName: '',
|
||||
toolAvatar: '',
|
||||
params: '',
|
||||
response: JSON.stringify(moduleRunResponse.toolResponses, null, 2)
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
moduleRunResponse,
|
||||
toolMsgParams
|
||||
};
|
||||
})
|
||||
)
|
||||
).filter(Boolean) as ToolRunResponseType;
|
||||
|
||||
const flatToolsResponseData = toolsRunResponse.map((item) => item.moduleRunResponse).flat();
|
||||
|
||||
if (toolCalls.length > 0 && !res.closed) {
|
||||
// Run the tool, combine its results, and perform another round of AI calls
|
||||
const assistantToolMsgParams: ChatCompletionAssistantToolParam = {
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
tool_calls: toolCalls
|
||||
};
|
||||
const concatToolMessages = [
|
||||
...filterMessages,
|
||||
assistantToolMsgParams
|
||||
] as ChatCompletionMessageParam[];
|
||||
|
||||
const tokens = countGptMessagesTokens(concatToolMessages, tools);
|
||||
// console.log(
|
||||
// JSON.stringify(
|
||||
// {
|
||||
// messages: concatToolMessages,
|
||||
// tools
|
||||
// },
|
||||
// null,
|
||||
// 2
|
||||
// )
|
||||
// );
|
||||
// console.log(tokens, 'tool');
|
||||
|
||||
if (stream && detail) {
|
||||
responseWriteNodeStatus({
|
||||
res,
|
||||
name: module.name
|
||||
});
|
||||
}
|
||||
|
||||
return runToolWithToolChoice(
|
||||
{
|
||||
...props,
|
||||
messages: [...concatToolMessages, ...toolsRunResponse.map((item) => item?.toolMsgParams)]
|
||||
},
|
||||
{
|
||||
dispatchFlowResponse: response
|
||||
? response.dispatchFlowResponse.concat(flatToolsResponseData)
|
||||
: flatToolsResponseData,
|
||||
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// No tool is invoked, indicating that the process is over
|
||||
const completeMessages = filterMessages.concat({
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
content: answer
|
||||
});
|
||||
|
||||
const tokens = countGptMessagesTokens(completeMessages, tools);
|
||||
// console.log(
|
||||
// JSON.stringify(
|
||||
// {
|
||||
// messages: completeMessages,
|
||||
// tools
|
||||
// },
|
||||
// null,
|
||||
// 2
|
||||
// )
|
||||
// );
|
||||
// console.log(tokens, 'response token');
|
||||
|
||||
return {
|
||||
dispatchFlowResponse: response?.dispatchFlowResponse || [],
|
||||
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
|
||||
completeMessages
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
async function streamResponse({
|
||||
res,
|
||||
detail,
|
||||
toolModules,
|
||||
stream
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
detail: boolean;
|
||||
toolModules: ToolModuleItemType[];
|
||||
stream: StreamChatType;
|
||||
}) {
|
||||
const write = responseWriteController({
|
||||
res,
|
||||
readStream: stream
|
||||
});
|
||||
|
||||
let textAnswer = '';
|
||||
let toolCalls: ChatCompletionMessageToolCall[] = [];
|
||||
|
||||
for await (const part of stream) {
|
||||
if (res.closed) {
|
||||
stream.controller?.abort();
|
||||
break;
|
||||
}
|
||||
|
||||
const responseChoice = part.choices?.[0]?.delta;
|
||||
// console.log(JSON.stringify(responseChoice, null, 2));
|
||||
if (responseChoice.content) {
|
||||
const content = responseChoice?.content || '';
|
||||
textAnswer += content;
|
||||
|
||||
responseWrite({
|
||||
write,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: content
|
||||
})
|
||||
});
|
||||
} else if (responseChoice.tool_calls?.[0]) {
|
||||
const toolCall: ChatCompletionMessageToolCall = responseChoice.tool_calls[0];
|
||||
|
||||
// 流响应中,每次只会返回一个工具. 如果带了 id,说明是执行一个工具
|
||||
if (toolCall.id) {
|
||||
const toolModule = toolModules.find(
|
||||
(module) => module.moduleId === toolCall.function?.name
|
||||
);
|
||||
|
||||
if (toolModule) {
|
||||
if (toolCall.function?.arguments === undefined) {
|
||||
toolCall.function.arguments = '';
|
||||
}
|
||||
toolCalls.push({
|
||||
...toolCall,
|
||||
toolName: toolModule.name,
|
||||
toolAvatar: toolModule.avatar
|
||||
});
|
||||
|
||||
if (detail) {
|
||||
responseWrite({
|
||||
write,
|
||||
event: SseResponseEventEnum.toolCall,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: toolCall.id,
|
||||
toolName: toolModule.name,
|
||||
toolAvatar: toolModule.avatar,
|
||||
functionName: toolCall.function.name,
|
||||
params: toolCall.function.arguments,
|
||||
response: ''
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/* arg 插入最后一个工具的参数里 */
|
||||
const arg: string = responseChoice.tool_calls?.[0]?.function?.arguments;
|
||||
const currentTool = toolCalls[toolCalls.length - 1];
|
||||
if (currentTool) {
|
||||
currentTool.function.arguments += arg;
|
||||
|
||||
if (detail) {
|
||||
responseWrite({
|
||||
write,
|
||||
event: SseResponseEventEnum.toolParams,
|
||||
data: JSON.stringify({
|
||||
tool: {
|
||||
id: currentTool.id,
|
||||
toolName: '',
|
||||
toolAvatar: '',
|
||||
params: arg,
|
||||
response: ''
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!textAnswer && toolCalls.length === 0) {
|
||||
return Promise.reject('LLM api response empty');
|
||||
}
|
||||
|
||||
return { answer: textAnswer, toolCalls };
|
||||
}
|
||||
26
projects/app/src/service/moduleDispatch/agent/runTool/type.d.ts
vendored
Normal file
26
projects/app/src/service/moduleDispatch/agent/runTool/type.d.ts
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
DispatchNodeResponseType
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { RunningModuleItemType } from '@fastgpt/global/core/module/runtime/type';
|
||||
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import type { DispatchFlowResponse } from '../../type.d';
|
||||
|
||||
export type DispatchToolModuleProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[];
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
[ModuleInputKeyEnum.aiSystemPrompt]: string;
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
}>;
|
||||
|
||||
export type RunToolResponse = {
|
||||
dispatchFlowResponse: DispatchFlowResponse[];
|
||||
totalTokens: number;
|
||||
completeMessages?: ChatCompletionMessageParam[];
|
||||
};
|
||||
export type ToolModuleItemType = RunningModuleItemType & {
|
||||
toolParams: RunningModuleItemType['inputs'];
|
||||
};
|
||||
@@ -1,18 +1,35 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import {
|
||||
filterGPTMessageByMaxTokens,
|
||||
formatGPTMessagesInRequestBefore,
|
||||
loadChatImgToBase64
|
||||
} from '@fastgpt/service/core/chat/utils';
|
||||
import type { ChatItemType, UserChatItemValueItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
|
||||
import type {
|
||||
ChatCompletion,
|
||||
ChatCompletionMessageParam,
|
||||
StreamChatType
|
||||
} from '@fastgpt/global/core/ai/type.d';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { postTextCensor } from '@/service/common/censor';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
|
||||
import type { ModuleDispatchResponse, ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import type { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
import {
|
||||
countGptMessagesTokens,
|
||||
countMessagesTokens
|
||||
} from '@fastgpt/global/common/string/tiktoken';
|
||||
import {
|
||||
chats2GPTMessages,
|
||||
getSystemPrompt,
|
||||
GPTMessages2Chats,
|
||||
runtimePrompt2ChatsValue
|
||||
} from '@fastgpt/global/core/chat/adapt';
|
||||
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
|
||||
import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
@@ -20,10 +37,11 @@ import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { getLLMModel, ModelTypeEnum } from '@fastgpt/service/core/ai/model';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { getHistories } from '../utils';
|
||||
import { filterSearchResultsByMaxChars } from '@fastgpt/global/core/dataset/search/utils';
|
||||
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
|
||||
|
||||
export type ChatProps = ModuleDispatchProps<
|
||||
AIChatModuleProps & {
|
||||
@@ -32,7 +50,7 @@ export type ChatProps = ModuleDispatchProps<
|
||||
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
|
||||
}
|
||||
>;
|
||||
export type ChatResponse = ModuleDispatchResponse<{
|
||||
export type ChatResponse = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
[ModuleOutputKeyEnum.history]: ChatItemType[];
|
||||
}>;
|
||||
@@ -46,6 +64,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
user,
|
||||
histories,
|
||||
module: { name, outputs },
|
||||
inputFiles = [],
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
@@ -59,10 +78,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
quotePrompt
|
||||
}
|
||||
} = props;
|
||||
if (!userChatInput) {
|
||||
if (!userChatInput && inputFiles.length === 0) {
|
||||
return Promise.reject('Question is empty');
|
||||
}
|
||||
|
||||
stream = stream && isResponseAnswerText;
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
@@ -74,7 +92,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
return Promise.reject('The chat model is undefined, you need to select a chat model.');
|
||||
}
|
||||
|
||||
const { filterQuoteQA, quoteText } = filterQuote({
|
||||
const { quoteText } = filterQuote({
|
||||
quoteQA,
|
||||
model: modelConstantsData,
|
||||
quoteTemplate
|
||||
@@ -90,14 +108,16 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
});
|
||||
}
|
||||
|
||||
const { messages, filterMessages } = getChatMessages({
|
||||
const { filterMessages } = getChatMessages({
|
||||
model: modelConstantsData,
|
||||
histories: chatHistories,
|
||||
quoteText,
|
||||
quotePrompt,
|
||||
userChatInput,
|
||||
inputFiles,
|
||||
systemPrompt
|
||||
});
|
||||
|
||||
const { max_tokens } = await getMaxTokens({
|
||||
model: modelConstantsData,
|
||||
maxToken,
|
||||
@@ -121,20 +141,26 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...(await Promise.all(
|
||||
messages.map(async (item) => ({
|
||||
...item,
|
||||
content: modelConstantsData.vision
|
||||
? await formatStr2ChatContent(item.content)
|
||||
: item.content
|
||||
}))
|
||||
))
|
||||
];
|
||||
...formatGPTMessagesInRequestBefore(filterMessages)
|
||||
] as ChatCompletionMessageParam[];
|
||||
|
||||
if (concatMessages.length === 0) {
|
||||
return Promise.reject('core.chat.error.Messages empty');
|
||||
}
|
||||
|
||||
const loadMessages = await Promise.all(
|
||||
concatMessages.map(async (item) => {
|
||||
if (item.role === ChatCompletionRequestMessageRoleEnum.User) {
|
||||
return {
|
||||
...item,
|
||||
content: await loadChatImgToBase64(item.content)
|
||||
};
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
const response = await ai.chat.completions.create(
|
||||
{
|
||||
...modelConstantsData?.defaultConfig,
|
||||
@@ -142,7 +168,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
temperature,
|
||||
max_tokens,
|
||||
stream,
|
||||
messages: concatMessages
|
||||
messages: loadMessages
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
@@ -151,7 +177,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
}
|
||||
);
|
||||
|
||||
const { answerText, completeMessages } = await (async () => {
|
||||
const { answerText } = await (async () => {
|
||||
if (stream) {
|
||||
// sse response
|
||||
const { answer } = await streamResponse({
|
||||
@@ -159,35 +185,29 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
detail,
|
||||
stream: response
|
||||
});
|
||||
// count tokens
|
||||
const completeMessages = filterMessages.concat({
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answer
|
||||
});
|
||||
|
||||
targetResponse({ res, detail, outputs });
|
||||
|
||||
return {
|
||||
answerText: answer,
|
||||
completeMessages
|
||||
answerText: answer
|
||||
};
|
||||
} else {
|
||||
const unStreamResponse = response as ChatCompletion;
|
||||
const answer = unStreamResponse.choices?.[0]?.message?.content || '';
|
||||
|
||||
const completeMessages = filterMessages.concat({
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answer
|
||||
});
|
||||
|
||||
return {
|
||||
answerText: answer,
|
||||
completeMessages
|
||||
answerText: answer
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
const tokens = countMessagesTokens(completeMessages);
|
||||
const completeMessages = filterMessages.concat({
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
content: answerText
|
||||
});
|
||||
const chatCompleteMessages = GPTMessages2Chats(completeMessages);
|
||||
|
||||
const tokens = countMessagesTokens(chatCompleteMessages);
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
model,
|
||||
tokens,
|
||||
@@ -196,17 +216,16 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
|
||||
return {
|
||||
answerText,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
tokens,
|
||||
query: `${userChatInput}`,
|
||||
maxToken: max_tokens,
|
||||
quoteList: filterQuoteQA,
|
||||
historyPreview: getHistoryPreview(completeMessages),
|
||||
historyPreview: getHistoryPreview(chatCompleteMessages),
|
||||
contextTotalLen: completeMessages.length
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
@@ -214,7 +233,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
tokens
|
||||
}
|
||||
],
|
||||
history: completeMessages
|
||||
history: chatCompleteMessages
|
||||
};
|
||||
};
|
||||
|
||||
@@ -256,6 +275,7 @@ function getChatMessages({
|
||||
histories = [],
|
||||
systemPrompt,
|
||||
userChatInput,
|
||||
inputFiles,
|
||||
model
|
||||
}: {
|
||||
quotePrompt?: string;
|
||||
@@ -263,9 +283,10 @@ function getChatMessages({
|
||||
histories: ChatItemType[];
|
||||
systemPrompt: string;
|
||||
userChatInput: string;
|
||||
inputFiles: UserChatItemValueItemType['file'][];
|
||||
model: LLMModelItemType;
|
||||
}) {
|
||||
const question = quoteText
|
||||
const replaceInputValue = quoteText
|
||||
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
|
||||
quote: quoteText,
|
||||
question: userChatInput
|
||||
@@ -273,30 +294,24 @@ function getChatMessages({
|
||||
: userChatInput;
|
||||
|
||||
const messages: ChatItemType[] = [
|
||||
...(systemPrompt
|
||||
? [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: systemPrompt
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...getSystemPrompt(systemPrompt),
|
||||
...histories,
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: question
|
||||
value: runtimePrompt2ChatsValue({
|
||||
files: inputFiles,
|
||||
text: replaceInputValue
|
||||
})
|
||||
}
|
||||
];
|
||||
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
|
||||
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
const filterMessages = filterGPTMessageByMaxTokens({
|
||||
messages: adaptMessages,
|
||||
maxTokens: model.maxContext - 300 // filter token. not response maxToken
|
||||
});
|
||||
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
|
||||
return {
|
||||
messages: adaptMessages,
|
||||
filterMessages
|
||||
};
|
||||
}
|
||||
@@ -307,17 +322,17 @@ function getMaxTokens({
|
||||
}: {
|
||||
maxToken: number;
|
||||
model: LLMModelItemType;
|
||||
filterMessages: ChatItemType[];
|
||||
filterMessages: ChatCompletionMessageParam[];
|
||||
}) {
|
||||
maxToken = Math.min(maxToken, model.maxResponse);
|
||||
const tokensLimit = model.maxContext;
|
||||
|
||||
/* count response max token */
|
||||
const promptsToken = countMessagesTokens(filterMessages);
|
||||
const promptsToken = countGptMessagesTokens(filterMessages);
|
||||
maxToken = promptsToken + maxToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
|
||||
|
||||
if (maxToken <= 0) {
|
||||
return Promise.reject('Over max token');
|
||||
maxToken = 200;
|
||||
}
|
||||
return {
|
||||
max_tokens: maxToken
|
||||
@@ -339,7 +354,7 @@ function targetResponse({
|
||||
if (targets.length === 0) return;
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: '\n'
|
||||
})
|
||||
@@ -370,7 +385,7 @@ async function streamResponse({
|
||||
|
||||
responseWrite({
|
||||
write,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: content
|
||||
})
|
||||
@@ -383,14 +398,3 @@ async function streamResponse({
|
||||
|
||||
return { answer };
|
||||
}
|
||||
|
||||
function getHistoryPreview(completeMessages: ChatItemType[]) {
|
||||
return completeMessages.map((item, i) => {
|
||||
if (item.obj === ChatRoleEnum.System) return item;
|
||||
if (i >= completeMessages.length - 2) return item;
|
||||
return {
|
||||
...item,
|
||||
value: item.value.length > 15 ? `${item.value.slice(0, 15)}...` : item.value
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import {
|
||||
DispatchNodeResponseType,
|
||||
DispatchNodeResultType
|
||||
} from '@fastgpt/global/core/module/runtime/type.d';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModelTypeEnum, getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { searchDatasetData } from '@/service/core/dataset/data/controller';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { getHistories } from '../utils';
|
||||
import { datasetSearchQueryExtension } from '@fastgpt/service/core/dataset/search/utils';
|
||||
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { checkTeamReRankPermission } from '@fastgpt/service/support/permission/teamLimit';
|
||||
|
||||
type DatasetSearchProps = ModuleDispatchProps<{
|
||||
@@ -26,7 +27,7 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.datasetSearchExtensionModel]: string;
|
||||
[ModuleInputKeyEnum.datasetSearchExtensionBg]: string;
|
||||
}>;
|
||||
export type DatasetSearchResponse = ModuleDispatchResponse<{
|
||||
export type DatasetSearchResponse = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.datasetIsEmpty]?: boolean;
|
||||
[ModuleOutputKeyEnum.datasetUnEmpty]?: boolean;
|
||||
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
|
||||
@@ -107,7 +108,7 @@ export async function dispatchDatasetSearch(
|
||||
tokens,
|
||||
modelType: ModelTypeEnum.vector
|
||||
});
|
||||
const responseData: moduleDispatchResType & { totalPoints: number } = {
|
||||
const responseData: DispatchNodeResponseType & { totalPoints: number } = {
|
||||
totalPoints,
|
||||
query: concatQueries.join('\n'),
|
||||
model: modelName,
|
||||
@@ -115,9 +116,10 @@ export async function dispatchDatasetSearch(
|
||||
similarity: usingSimilarityFilter ? similarity : undefined,
|
||||
limit,
|
||||
searchMode,
|
||||
searchUsingReRank: searchUsingReRank
|
||||
searchUsingReRank: searchUsingReRank,
|
||||
quoteList: searchRes
|
||||
};
|
||||
const moduleDispatchBills: ChatModuleUsageType[] = [
|
||||
const nodeDispatchUsages: ChatNodeUsageType[] = [
|
||||
{
|
||||
totalPoints,
|
||||
moduleName: module.name,
|
||||
@@ -140,7 +142,7 @@ export async function dispatchDatasetSearch(
|
||||
aiExtensionResult.extensionQueries?.join('\n') ||
|
||||
JSON.stringify(aiExtensionResult.extensionQueries);
|
||||
|
||||
moduleDispatchBills.push({
|
||||
nodeDispatchUsages.push({
|
||||
totalPoints,
|
||||
moduleName: 'core.module.template.Query extension',
|
||||
model: modelName,
|
||||
@@ -152,7 +154,11 @@ export async function dispatchDatasetSearch(
|
||||
isEmpty: searchRes.length === 0 ? true : undefined,
|
||||
unEmpty: searchRes.length > 0 ? true : undefined,
|
||||
quoteQA: searchRes,
|
||||
responseData,
|
||||
moduleDispatchBills
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: responseData,
|
||||
nodeDispatchUsages,
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: searchRes.map((item) => ({
|
||||
text: `${item.q}\n${item.a}`.trim(),
|
||||
chunkIndex: item.chunkIndex
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import { NextApiResponse } from 'next';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import type { ChatDispatchProps, RunningModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ChatDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import type { RunningModuleItemType } from '@fastgpt/global/core/module/runtime/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import type {
|
||||
AIChatItemValueItemType,
|
||||
ChatHistoryItemResType,
|
||||
ToolRunResponseItemType
|
||||
} from '@fastgpt/global/core/chat/type.d';
|
||||
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleItemType } from '@fastgpt/global/core/module/type';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { responseWriteNodeStatus } from '@fastgpt/service/common/response';
|
||||
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
|
||||
import { initRunningModuleType } from '../core/modules/constant';
|
||||
|
||||
import { dispatchHistory } from './init/history';
|
||||
import { dispatchChatInput } from './init/userChatInput';
|
||||
@@ -27,8 +31,11 @@ import { dispatchQueryExtension } from './tools/queryExternsion';
|
||||
import { dispatchRunPlugin } from './plugin/run';
|
||||
import { dispatchPluginInput } from './plugin/runInput';
|
||||
import { dispatchPluginOutput } from './plugin/runOutput';
|
||||
import { valueTypeFormat } from './utils';
|
||||
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { checkTheModuleConnectedByTool, valueTypeFormat } from './utils';
|
||||
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { dispatchRunTools } from './agent/runTool/index';
|
||||
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { DispatchFlowResponse } from './type';
|
||||
|
||||
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
|
||||
@@ -46,26 +53,29 @@ const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.pluginInput]: dispatchPluginInput,
|
||||
[FlowNodeTypeEnum.pluginOutput]: dispatchPluginOutput,
|
||||
[FlowNodeTypeEnum.queryExtension]: dispatchQueryExtension,
|
||||
[FlowNodeTypeEnum.tools]: dispatchRunTools,
|
||||
|
||||
// none
|
||||
[FlowNodeTypeEnum.userGuide]: () => Promise.resolve()
|
||||
};
|
||||
|
||||
/* running */
|
||||
export async function dispatchModules({
|
||||
export async function dispatchWorkFlow({
|
||||
res,
|
||||
modules,
|
||||
histories = [],
|
||||
modules = [],
|
||||
runtimeModules,
|
||||
startParams = {},
|
||||
histories = [],
|
||||
variables = {},
|
||||
user,
|
||||
stream = false,
|
||||
detail = false,
|
||||
...props
|
||||
}: ChatDispatchProps & {
|
||||
modules: ModuleItemType[];
|
||||
startParams?: Record<string, any>;
|
||||
}) {
|
||||
modules?: ModuleItemType[]; // app modules
|
||||
runtimeModules?: RunningModuleItemType[];
|
||||
startParams?: Record<string, any>; // entry module params
|
||||
}): Promise<DispatchFlowResponse> {
|
||||
// set sse response headers
|
||||
if (stream) {
|
||||
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
|
||||
@@ -78,48 +88,70 @@ export async function dispatchModules({
|
||||
...getSystemVariable({ timezone: user.timezone }),
|
||||
...variables
|
||||
};
|
||||
const runningModules = loadModules(modules, variables);
|
||||
const runningModules = runtimeModules ? runtimeModules : loadModules(modules, variables);
|
||||
|
||||
// let storeData: Record<string, any> = {}; // after module used
|
||||
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
|
||||
let chatAnswerText = ''; // AI answer
|
||||
let chatModuleBills: ChatModuleUsageType[] = [];
|
||||
let chatResponses: ChatHistoryItemResType[] = []; // response request and save to database
|
||||
let chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
|
||||
let chatNodeUsages: ChatNodeUsageType[] = [];
|
||||
let toolRunResponse: ToolRunResponseItemType[] = [];
|
||||
let runningTime = Date.now();
|
||||
|
||||
/* Store special response field */
|
||||
function pushStore(
|
||||
{ inputs = [] }: RunningModuleItemType,
|
||||
{
|
||||
answerText = '',
|
||||
responseData,
|
||||
moduleDispatchBills
|
||||
nodeDispatchUsages,
|
||||
toolResponses,
|
||||
assistantResponses
|
||||
}: {
|
||||
answerText?: string;
|
||||
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
|
||||
moduleDispatchBills?: ChatModuleUsageType[];
|
||||
[ModuleOutputKeyEnum.answerText]?: string;
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType;
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[];
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType;
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // tool module, save the response value
|
||||
}
|
||||
) {
|
||||
const time = Date.now();
|
||||
|
||||
if (responseData) {
|
||||
if (Array.isArray(responseData)) {
|
||||
chatResponse = chatResponse.concat(responseData);
|
||||
} else {
|
||||
chatResponse.push({
|
||||
...responseData,
|
||||
runningTime: +((time - runningTime) / 1000).toFixed(2)
|
||||
chatResponses.push({
|
||||
...responseData,
|
||||
runningTime: +((time - runningTime) / 1000).toFixed(2)
|
||||
});
|
||||
}
|
||||
if (nodeDispatchUsages) {
|
||||
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
|
||||
}
|
||||
if (toolResponses) {
|
||||
if (Array.isArray(toolResponses) && toolResponses.length > 0) {
|
||||
toolRunResponse.push(toolResponses);
|
||||
} else if (Object.keys(toolResponses).length > 0) {
|
||||
toolRunResponse.push(toolResponses);
|
||||
}
|
||||
}
|
||||
if (assistantResponses) {
|
||||
chatAssistantResponse = chatAssistantResponse.concat(assistantResponses);
|
||||
}
|
||||
|
||||
// save assistant text response
|
||||
if (answerText) {
|
||||
const isResponseAnswerText =
|
||||
inputs.find((item) => item.key === ModuleInputKeyEnum.aiChatIsResponseText)?.value ?? true;
|
||||
if (isResponseAnswerText) {
|
||||
chatAssistantResponse.push({
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: answerText
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
if (moduleDispatchBills) {
|
||||
chatModuleBills = chatModuleBills.concat(moduleDispatchBills);
|
||||
}
|
||||
runningTime = time;
|
||||
|
||||
const isResponseAnswerText =
|
||||
inputs.find((item) => item.key === ModuleInputKeyEnum.aiChatIsResponseText)?.value ?? true;
|
||||
if (isResponseAnswerText) {
|
||||
chatAnswerText += answerText;
|
||||
}
|
||||
runningTime = time;
|
||||
}
|
||||
/* Inject data into module input */
|
||||
function moduleInput(module: RunningModuleItemType, data: Record<string, any> = {}) {
|
||||
const updateInputValue = (key: string, value: any) => {
|
||||
const index = module.inputs.findIndex((item: any) => item.key === key);
|
||||
@@ -132,6 +164,7 @@ export async function dispatchModules({
|
||||
|
||||
return;
|
||||
}
|
||||
/* Pass the output of the module to the next stage */
|
||||
function moduleOutput(
|
||||
module: RunningModuleItemType,
|
||||
result: Record<string, any> = {}
|
||||
@@ -207,6 +240,7 @@ export async function dispatchModules({
|
||||
stream,
|
||||
detail,
|
||||
module,
|
||||
runtimeModules: runningModules,
|
||||
params
|
||||
};
|
||||
|
||||
@@ -218,20 +252,23 @@ export async function dispatchModules({
|
||||
return {};
|
||||
})();
|
||||
|
||||
// format response data. Add modulename and moduletype
|
||||
const formatResponseData = (() => {
|
||||
if (!dispatchRes[ModuleOutputKeyEnum.responseData]) return undefined;
|
||||
if (Array.isArray(dispatchRes[ModuleOutputKeyEnum.responseData])) {
|
||||
return dispatchRes[ModuleOutputKeyEnum.responseData];
|
||||
}
|
||||
|
||||
// format response data. Add modulename and module type
|
||||
const formatResponseData: ChatHistoryItemResType = (() => {
|
||||
if (!dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]) return undefined;
|
||||
return {
|
||||
moduleName: module.name,
|
||||
moduleType: module.flowType,
|
||||
...dispatchRes[ModuleOutputKeyEnum.responseData]
|
||||
...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]
|
||||
};
|
||||
})();
|
||||
|
||||
// Add output default value
|
||||
module.outputs.forEach((item) => {
|
||||
if (!item.required) return;
|
||||
if (dispatchRes[item.key] !== undefined) return;
|
||||
dispatchRes[item.key] = valueTypeFormat(item.defaultValue, item.valueType);
|
||||
});
|
||||
|
||||
// Pass userChatInput
|
||||
const hasUserChatInputTarget = !!module.outputs.find(
|
||||
(item) => item.key === ModuleOutputKeyEnum.userChatInput
|
||||
@@ -243,17 +280,17 @@ export async function dispatchModules({
|
||||
? params[ModuleOutputKeyEnum.userChatInput]
|
||||
: undefined,
|
||||
...dispatchRes,
|
||||
[ModuleOutputKeyEnum.responseData]: formatResponseData,
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]:
|
||||
dispatchRes[ModuleOutputKeyEnum.moduleDispatchBills]
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: formatResponseData,
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]:
|
||||
dispatchRes[DispatchNodeResponseKeyEnum.nodeDispatchUsages]
|
||||
});
|
||||
}
|
||||
// start process width initInput
|
||||
const initModules = runningModules.filter((item) => initRunningModuleType[item.flowType]);
|
||||
|
||||
// runningModules.forEach((item) => {
|
||||
// console.log(item);
|
||||
// });
|
||||
const initModules = runningModules.filter((item) => item.isEntry);
|
||||
// reset entry
|
||||
modules.forEach((item) => {
|
||||
item.isEntry = false;
|
||||
});
|
||||
|
||||
initModules.map((module) =>
|
||||
moduleInput(module, {
|
||||
@@ -272,9 +309,11 @@ export async function dispatchModules({
|
||||
}
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.answerText]: chatAnswerText,
|
||||
[ModuleOutputKeyEnum.responseData]: chatResponse,
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: chatModuleBills
|
||||
flowResponses: chatResponses,
|
||||
flowUsages: chatNodeUsages,
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]:
|
||||
concatAssistantResponseAnswerText(chatAssistantResponse),
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: toolRunResponse
|
||||
};
|
||||
}
|
||||
|
||||
@@ -287,18 +326,36 @@ function loadModules(
|
||||
.filter((item) => {
|
||||
return ![FlowNodeTypeEnum.userGuide].includes(item.moduleId as any);
|
||||
})
|
||||
.map((module) => {
|
||||
.map<RunningModuleItemType>((module) => {
|
||||
return {
|
||||
moduleId: module.moduleId,
|
||||
name: module.name,
|
||||
avatar: module.avatar,
|
||||
intro: module.intro,
|
||||
flowType: module.flowType,
|
||||
showStatus: module.showStatus,
|
||||
isEntry: module.isEntry,
|
||||
inputs: module.inputs
|
||||
.filter(
|
||||
(item) =>
|
||||
item.type === FlowNodeInputTypeEnum.systemInput ||
|
||||
item.connected ||
|
||||
item.value !== undefined
|
||||
/*
|
||||
1. system input must be save
|
||||
2. connected by source handle
|
||||
3. manual input value or have default value
|
||||
4. For the module connected by the tool, leave the toolDescription input
|
||||
*/
|
||||
(item) => {
|
||||
const isTool = checkTheModuleConnectedByTool(modules, module);
|
||||
|
||||
if (isTool && item.toolDescription) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
item.type === FlowNodeInputTypeEnum.systemInput ||
|
||||
item.connected ||
|
||||
item.value !== undefined
|
||||
);
|
||||
}
|
||||
) // filter unconnected target input
|
||||
.map((item) => {
|
||||
const replace = ['string'].includes(typeof item.value);
|
||||
@@ -307,12 +364,16 @@ function loadModules(
|
||||
key: item.key,
|
||||
// variables replace
|
||||
value: replace ? replaceVariable(item.value, variables) : item.value,
|
||||
valueType: item.valueType
|
||||
valueType: item.valueType,
|
||||
required: item.required,
|
||||
toolDescription: item.toolDescription
|
||||
};
|
||||
}),
|
||||
outputs: module.outputs
|
||||
.map((item) => ({
|
||||
key: item.key,
|
||||
required: item.required,
|
||||
defaultValue: item.defaultValue,
|
||||
answer: item.key === ModuleOutputKeyEnum.answerText,
|
||||
value: undefined,
|
||||
valueType: item.valueType,
|
||||
@@ -339,13 +400,9 @@ export function responseStatus({
|
||||
name?: string;
|
||||
}) {
|
||||
if (!name) return;
|
||||
responseWrite({
|
||||
responseWriteNodeStatus({
|
||||
res,
|
||||
event: sseResponseEventEnum.moduleStatus,
|
||||
data: JSON.stringify({
|
||||
status: 'running',
|
||||
name
|
||||
})
|
||||
name
|
||||
});
|
||||
}
|
||||
|
||||
@@ -355,3 +412,22 @@ export function getSystemVariable({ timezone }: { timezone: string }) {
|
||||
cTime: getSystemTime(timezone)
|
||||
};
|
||||
}
|
||||
|
||||
export const concatAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) => {
|
||||
const result: AIChatItemValueItemType[] = [];
|
||||
// 合并连续的text
|
||||
for (let i = 0; i < response.length; i++) {
|
||||
const item = response[i];
|
||||
if (item.type === ChatItemValueTypeEnum.text) {
|
||||
let text = item.text?.content || '';
|
||||
const lastItem = result[result.length - 1];
|
||||
if (lastItem && lastItem.type === ChatItemValueTypeEnum.text && lastItem.text?.content) {
|
||||
lastItem.text.content += text;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
result.push(item);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
@@ -1,30 +1,25 @@
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import { dispatchModules } from '../index';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { dispatchWorkFlow } from '../index';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { DYNAMIC_INPUT_KEY, ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { getPluginRuntimeById } from '@fastgpt/service/core/plugin/controller';
|
||||
import { authPluginCanUse } from '@fastgpt/service/support/permission/auth/plugin';
|
||||
import { setEntryEntries } from '../utils';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type RunPluginProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.pluginId]: string;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type RunPluginResponse = ModuleDispatchResponse<{
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
}>;
|
||||
type RunPluginResponse = DispatchNodeResultType<{}>;
|
||||
|
||||
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
|
||||
const {
|
||||
mode,
|
||||
teamId,
|
||||
tmbId,
|
||||
module,
|
||||
params: { pluginId, ...data }
|
||||
} = props;
|
||||
|
||||
@@ -59,45 +54,46 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
return params;
|
||||
})();
|
||||
|
||||
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
|
||||
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
|
||||
...props,
|
||||
modules: plugin.modules.map((module) => ({
|
||||
modules: setEntryEntries(plugin.modules).map((module) => ({
|
||||
...module,
|
||||
showStatus: false
|
||||
})),
|
||||
runtimeModules: undefined, // must reset
|
||||
startParams
|
||||
});
|
||||
|
||||
const output = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
|
||||
const output = flowResponses.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
|
||||
|
||||
if (output) {
|
||||
output.moduleLogo = plugin.avatar;
|
||||
}
|
||||
|
||||
return {
|
||||
answerText,
|
||||
assistantResponses,
|
||||
// responseData, // debug
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
moduleLogo: plugin.avatar,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0),
|
||||
totalPoints: flowResponses.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
pluginOutput: output?.pluginOutput,
|
||||
pluginDetail:
|
||||
mode === 'test' && plugin.teamId === teamId
|
||||
? responseData.filter((item) => {
|
||||
? flowResponses.filter((item) => {
|
||||
const filterArr = [FlowNodeTypeEnum.pluginOutput];
|
||||
return !filterArr.includes(item.moduleType as any);
|
||||
})
|
||||
: undefined
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: plugin.name,
|
||||
totalPoints: moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
totalPoints: flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
model: plugin.name,
|
||||
tokens: 0
|
||||
}
|
||||
],
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: output?.pluginOutput ? output.pluginOutput : {},
|
||||
...(output ? output.pluginOutput : {})
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type.d';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
|
||||
export type PluginOutputProps = ModuleDispatchProps<{
|
||||
[key: string]: any;
|
||||
}>;
|
||||
export type PluginOutputResponse = {
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
};
|
||||
export type PluginOutputResponse = DispatchNodeResultType<{}>;
|
||||
|
||||
export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResponse => {
|
||||
const { params } = props;
|
||||
|
||||
return {
|
||||
responseData: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
pluginOutput: params
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
export type AnswerProps = ModuleDispatchProps<{
|
||||
@@ -23,7 +23,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
||||
if (stream) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.response : undefined,
|
||||
event: detail ? SseResponseEventEnum.fastAnswer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: `\n${formatText}`
|
||||
})
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import axios from 'axios';
|
||||
import { valueTypeFormat } from '../utils';
|
||||
import { SERVICE_LOCAL_HOST } from '@fastgpt/service/common/system/tools';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type HttpRequestProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.abandon_httpUrl]: string;
|
||||
@@ -19,7 +17,7 @@ type HttpRequestProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.httpHeaders]: string;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type HttpResponse = ModuleDispatchResponse<{
|
||||
type HttpResponse = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
@@ -99,7 +97,7 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
}
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
body: formatBody,
|
||||
httpResult: response
|
||||
@@ -111,7 +109,7 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.failed]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
body: formatBody,
|
||||
httpResult: { error }
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import axios from 'axios';
|
||||
import { valueTypeFormat } from '../utils';
|
||||
import { SERVICE_LOCAL_HOST } from '@fastgpt/service/common/system/tools';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type PropsArrType = {
|
||||
key: string;
|
||||
@@ -27,7 +26,7 @@ type HttpRequestProps = ModuleDispatchProps<{
|
||||
[DYNAMIC_INPUT_KEY]: Record<string, any>;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type HttpResponse = ModuleDispatchResponse<{
|
||||
type HttpResponse = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
@@ -40,7 +39,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
variables,
|
||||
module: { outputs },
|
||||
module: { moduleId, outputs },
|
||||
histories,
|
||||
params: {
|
||||
system_httpMethod: httpMethod = 'POST',
|
||||
@@ -119,20 +118,22 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
}
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
params: Object.keys(params).length > 0 ? params : undefined,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
headers: Object.keys(headers).length > 0 ? headers : undefined,
|
||||
httpResult: rawResponse
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: results,
|
||||
[ModuleOutputKeyEnum.httpRawResponse]: rawResponse,
|
||||
...results
|
||||
};
|
||||
} catch (error) {
|
||||
addLog.error('Http request error', error);
|
||||
return {
|
||||
[ModuleOutputKeyEnum.failed]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
params: Object.keys(params).length > 0 ? params : undefined,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
|
||||
import { getHistories } from '../utils';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
@@ -16,7 +15,7 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
type Response = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.text]: string;
|
||||
}>;
|
||||
|
||||
@@ -57,14 +56,14 @@ export const dispatchQueryExtension = async ({
|
||||
});
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
tokens,
|
||||
query: userChatInput,
|
||||
textOutput: JSON.stringify(filterSameQueries)
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: module.name,
|
||||
totalPoints,
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
|
||||
import { dispatchModules } from '../index';
|
||||
import { dispatchWorkFlow } from '../index';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { getHistories } from '../utils';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { getHistories, setEntryEntries } from '../utils';
|
||||
import { chatValue2RuntimePrompt, runtimePrompt2ChatsValue } from '@fastgpt/global/core/chat/adapt';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
app: SelectAppItemType;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
type Response = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
[ModuleOutputKeyEnum.history]: ChatItemType[];
|
||||
}>;
|
||||
@@ -30,6 +30,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
stream,
|
||||
detail,
|
||||
histories,
|
||||
inputFiles,
|
||||
params: { userChatInput, history, app }
|
||||
} = props;
|
||||
let start = Date.now();
|
||||
@@ -50,7 +51,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
if (stream) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
event: detail ? SseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: '\n'
|
||||
})
|
||||
@@ -59,11 +60,13 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
|
||||
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
|
||||
...props,
|
||||
appId: app.id,
|
||||
modules: appData.modules,
|
||||
modules: setEntryEntries(appData.modules),
|
||||
runtimeModules: undefined, // must reset
|
||||
histories: chatHistories,
|
||||
inputFiles,
|
||||
startParams: {
|
||||
userChatInput
|
||||
}
|
||||
@@ -72,28 +75,33 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
const completeMessages = chatHistories.concat([
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: userChatInput
|
||||
value: runtimePrompt2ChatsValue({
|
||||
files: inputFiles,
|
||||
text: userChatInput
|
||||
})
|
||||
},
|
||||
{
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answerText
|
||||
value: assistantResponses
|
||||
}
|
||||
]);
|
||||
|
||||
const { text } = chatValue2RuntimePrompt(assistantResponses);
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
moduleLogo: appData.avatar,
|
||||
query: userChatInput,
|
||||
textOutput: answerText,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
|
||||
textOutput: text,
|
||||
totalPoints: flowResponses.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: appData.name,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
|
||||
totalPoints: flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
|
||||
}
|
||||
],
|
||||
answerText: answerText,
|
||||
answerText: text,
|
||||
history: completeMessages
|
||||
};
|
||||
};
|
||||
|
||||
16
projects/app/src/service/moduleDispatch/type.d.ts
vendored
Normal file
16
projects/app/src/service/moduleDispatch/type.d.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import {
|
||||
AIChatItemValueItemType,
|
||||
ChatHistoryItemResType,
|
||||
ChatItemValueItemType,
|
||||
ToolRunResponseItemType
|
||||
} from '@fastgpt/global/core/chat/type';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
|
||||
export type DispatchFlowResponse = {
|
||||
flowResponses: ChatHistoryItemResType[];
|
||||
flowUsages: ChatNodeUsageType[];
|
||||
// [DispatchNodeResponseKeyEnum.nodeDispatchUsages]: ChatNodeUsageType[];
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: ToolRunResponseItemType[];
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]: AIChatItemValueItemType[];
|
||||
};
|
||||
@@ -1,5 +1,43 @@
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { DYNAMIC_INPUT_KEY, ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { ModuleIOValueTypeEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
|
||||
export const setEntryEntries = (modules: ModuleItemType[]) => {
|
||||
const initRunningModuleType: Record<string, boolean> = {
|
||||
[FlowNodeTypeEnum.historyNode]: true,
|
||||
[FlowNodeTypeEnum.questionInput]: true,
|
||||
[FlowNodeTypeEnum.pluginInput]: true
|
||||
};
|
||||
|
||||
modules.forEach((item) => {
|
||||
if (initRunningModuleType[item.flowType]) {
|
||||
item.isEntry = true;
|
||||
}
|
||||
});
|
||||
return modules;
|
||||
};
|
||||
|
||||
export const checkTheModuleConnectedByTool = (
|
||||
modules: ModuleItemType[],
|
||||
module: ModuleItemType
|
||||
) => {
|
||||
let sign = false;
|
||||
const toolModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.tools);
|
||||
|
||||
toolModules.forEach((item) => {
|
||||
const toolOutput = item.outputs.find(
|
||||
(output) => output.key === ModuleOutputKeyEnum.selectedTools
|
||||
);
|
||||
toolOutput?.targets.forEach((target) => {
|
||||
if (target.moduleId === module.moduleId) {
|
||||
sign = true;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return sign;
|
||||
};
|
||||
|
||||
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
|
||||
if (!history) return [];
|
||||
|
||||
@@ -3,7 +3,7 @@ import { ModelTypeEnum } from '@fastgpt/service/core/ai/model';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { createUsage, concatUsage } from './controller';
|
||||
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
|
||||
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
|
||||
export const pushChatUsage = ({
|
||||
appName,
|
||||
@@ -11,16 +11,16 @@ export const pushChatUsage = ({
|
||||
teamId,
|
||||
tmbId,
|
||||
source,
|
||||
moduleDispatchBills
|
||||
flowUsages
|
||||
}: {
|
||||
appName: string;
|
||||
appId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${UsageSourceEnum}`;
|
||||
moduleDispatchBills: ChatModuleUsageType[];
|
||||
flowUsages: ChatNodeUsageType[];
|
||||
}) => {
|
||||
const totalPoints = moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
|
||||
const totalPoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
|
||||
|
||||
createUsage({
|
||||
teamId,
|
||||
@@ -29,7 +29,7 @@ export const pushChatUsage = ({
|
||||
appId,
|
||||
totalPoints,
|
||||
source,
|
||||
list: moduleDispatchBills.map((item) => ({
|
||||
list: flowUsages.map((item) => ({
|
||||
moduleName: item.moduleName,
|
||||
amount: item.totalPoints || 0,
|
||||
model: item.model,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user