diff --git a/client/src/api/chat.ts b/client/src/api/chat.ts
index b1f3cac72..fffe7f152 100644
--- a/client/src/api/chat.ts
+++ b/client/src/api/chat.ts
@@ -4,7 +4,7 @@ import type { InitChatResponse, InitShareChatResponse } from './response/chat';
import { RequestPaging } from '../types/index';
import type { ShareChatSchema } from '@/types/mongoSchema';
import type { ShareChatEditType } from '@/types/app';
-import type { QuoteItemType } from '@/pages/api/app/modules/kb/search';
+import type { QuoteItemType } from '@/types/chat';
import type { Props as UpdateHistoryProps } from '@/pages/api/chat/history/updateChatHistory';
/**
diff --git a/client/src/api/fetch.ts b/client/src/api/fetch.ts
index 58f295c66..0255866cb 100644
--- a/client/src/api/fetch.ts
+++ b/client/src/api/fetch.ts
@@ -1,8 +1,7 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
import { parseStreamChunk } from '@/utils/adapt';
-import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
-import { rawSearchKey } from '@/constants/chat';
+import { QuoteItemType } from '@/types/chat';
interface StreamFetchProps {
url?: string;
@@ -20,7 +19,6 @@ export const streamFetch = ({
responseText: string;
errMsg: string;
newChatId: string | null;
- [rawSearchKey]: QuoteItemType[];
}>(async (resolve, reject) => {
try {
const response = await window.fetch(url, {
@@ -43,7 +41,6 @@ export const streamFetch = ({
// response data
let responseText = '';
- let rawSearch: QuoteItemType[] = [];
let errMsg = '';
const newChatId = response.headers.get('newChatId');
@@ -55,8 +52,7 @@ export const streamFetch = ({
return resolve({
responseText,
errMsg,
- newChatId,
- rawSearch
+ newChatId
});
} else {
return reject({
@@ -82,7 +78,6 @@ export const streamFetch = ({
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.appStreamResponse) {
- rawSearch = data?.[rawSearchKey] ? data[rawSearchKey] : rawSearch;
} else if (item.event === sseResponseEventEnum.error) {
errMsg = getErrText(data, '流响应错误');
}
@@ -93,8 +88,7 @@ export const streamFetch = ({
return resolve({
responseText,
errMsg,
- newChatId,
- rawSearch
+ newChatId
});
}
reject(getErrText(err, '请求异常'));
diff --git a/client/src/components/ChatBox/QuoteModal.tsx b/client/src/components/ChatBox/QuoteModal.tsx
index 0af5dbd35..6bf3ef68e 100644
--- a/client/src/components/ChatBox/QuoteModal.tsx
+++ b/client/src/components/ChatBox/QuoteModal.tsx
@@ -17,7 +17,7 @@ import { useQuery } from '@tanstack/react-query';
import { getHistoryQuote, updateHistoryQuote } from '@/api/chat';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
-import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
+import { QuoteItemType } from '@/types/chat';
const QuoteModal = ({
chatId,
diff --git a/client/src/components/ChatBox/index.tsx b/client/src/components/ChatBox/index.tsx
index f8c53eb55..b39fa71ad 100644
--- a/client/src/components/ChatBox/index.tsx
+++ b/client/src/components/ChatBox/index.tsx
@@ -22,7 +22,6 @@ import { Box, Card, Flex, Input, Textarea, Button, useTheme } from '@chakra-ui/r
import { useUserStore } from '@/store/user';
import { feConfigs } from '@/store/static';
import { Types } from 'mongoose';
-import { HUMAN_ICON, quoteLenKey, rawSearchKey } from '@/constants/chat';
import { EventNameEnum } from '../Markdown/constant';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
@@ -35,7 +34,7 @@ import { fileDownload } from '@/utils/file';
import { htmlTemplate } from '@/constants/common';
import { useRouter } from 'next/router';
import { useGlobalStore } from '@/store/global';
-import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
+import { QuoteItemType } from '@/types/chat';
import { FlowModuleTypeEnum } from '@/constants/flow';
import dynamic from 'next/dynamic';
@@ -598,7 +597,7 @@ const ChatBox = (
source={item.value}
isChatting={index === chatHistory.length - 1 && isChatting}
/>
- {(!!item[quoteLenKey] || !!item[rawSearchKey]?.length) && (
+ {/* {(!!item[quoteLenKey] || !!item[rawSearchKey]?.length) && (
- )}
+ )} */}
diff --git a/client/src/constants/app.ts b/client/src/constants/app.ts
index 758471851..e950479a6 100644
--- a/client/src/constants/app.ts
+++ b/client/src/constants/app.ts
@@ -1,12 +1,4 @@
/* app */
-export enum AppModuleItemTypeEnum {
- 'variable' = 'variable',
- 'userGuide' = 'userGuide',
- 'initInput' = 'initInput',
- 'http' = 'http', // send a http request
- 'switch' = 'switch', // one input and two outputs
- 'answer' = 'answer' // redirect response
-}
export enum SystemInputEnum {
'welcomeText' = 'welcomeText',
'variables' = 'variables',
@@ -14,10 +6,7 @@ export enum SystemInputEnum {
'history' = 'history',
'userChatInput' = 'userChatInput'
}
-export enum TaskResponseKeyEnum {
- 'answerText' = 'answerText', // answer module text key
- 'responseData' = 'responseData'
-}
+
export enum VariableInputEnum {
input = 'input',
select = 'select'
diff --git a/client/src/constants/chat.ts b/client/src/constants/chat.ts
index 3f0ed25b6..695346a04 100644
--- a/client/src/constants/chat.ts
+++ b/client/src/constants/chat.ts
@@ -12,6 +12,11 @@ export enum ChatRoleEnum {
AI = 'AI'
}
+export enum TaskResponseKeyEnum {
+ 'answerText' = 'answerText', // answer module text key
+ 'responseData' = 'responseData'
+}
+
export const ChatRoleMap = {
[ChatRoleEnum.System]: {
name: '系统提示词'
@@ -46,10 +51,5 @@ export const ChatSourceMap = {
}
};
-export const responseDataKey = 'responseData';
-
-export const rawSearchKey = 'rawSearch';
-export const quoteLenKey = 'quoteLen';
-
export const HUMAN_ICON = `https://fastgpt.run/icon/human.png`;
export const LOGO_ICON = `https://fastgpt.run/icon/logo.png`;
diff --git a/client/src/constants/flow/ModuleTemplate.ts b/client/src/constants/flow/ModuleTemplate.ts
index 44020104c..68aa13d5c 100644
--- a/client/src/constants/flow/ModuleTemplate.ts
+++ b/client/src/constants/flow/ModuleTemplate.ts
@@ -1,13 +1,14 @@
-import { AppModuleItemTypeEnum, SystemInputEnum, TaskResponseKeyEnum } from '../app';
+import { SystemInputEnum } from '../app';
+import { TaskResponseKeyEnum } from '../chat';
import { FlowModuleTypeEnum, FlowInputItemTypeEnum, FlowOutputItemTypeEnum } from './index';
-import type { AppItemType, AppModuleTemplateItemType } from '@/types/app';
+import type { AppItemType } from '@/types/app';
+import type { FlowModuleTemplateType } from '@/types/flow';
import { chatModelList } from '@/store/static';
import {
Input_Template_History,
Input_Template_TFSwitch,
Input_Template_UserChatInput
} from './inputTemplate';
-import { rawSearchKey } from '../chat';
export const ChatModelSystemTip =
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}';
@@ -17,13 +18,12 @@ export const userGuideTip = '可以添加特殊的对话前后引导模块,更
export const welcomeTextTip =
'每次对话开始前,发送一个初始内容。支持标准 Markdown 语法,可使用的额外标记:\n[快捷按键]: 用户点击后可以直接发送该问题';
-export const VariableModule: AppModuleTemplateItemType = {
+export const VariableModule: FlowModuleTemplateType = {
logo: '/imgs/module/variable.png',
name: '全局变量',
intro: '可以在对话开始前,要求用户填写一些内容作为本轮对话的变量。该模块位于开场引导之后。',
description:
'全局变量可以通过 {{变量key}} 的形式注入到其他模块的文本中。目前支持:提示词、限定词。',
- type: AppModuleItemTypeEnum.variable,
flowType: FlowModuleTypeEnum.variable,
inputs: [
{
@@ -35,11 +35,10 @@ export const VariableModule: AppModuleTemplateItemType = {
],
outputs: []
};
-export const UserGuideModule: AppModuleTemplateItemType = {
+export const UserGuideModule: FlowModuleTemplateType = {
logo: '/imgs/module/userGuide.png',
name: '用户引导',
intro: userGuideTip,
- type: AppModuleItemTypeEnum.userGuide,
flowType: FlowModuleTypeEnum.userGuide,
inputs: [
{
@@ -50,12 +49,11 @@ export const UserGuideModule: AppModuleTemplateItemType = {
],
outputs: []
};
-export const UserInputModule: AppModuleTemplateItemType = {
+export const UserInputModule: FlowModuleTemplateType = {
logo: '/imgs/module/userChatInput.png',
name: '用户问题',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
- type: AppModuleItemTypeEnum.initInput,
- flowType: FlowModuleTypeEnum.questionInputNode,
+ flowType: FlowModuleTypeEnum.questionInput,
url: '/app/modules/init/userChatInput',
inputs: [
{
@@ -73,11 +71,10 @@ export const UserInputModule: AppModuleTemplateItemType = {
}
]
};
-export const HistoryModule: AppModuleTemplateItemType = {
+export const HistoryModule: FlowModuleTemplateType = {
logo: '/imgs/module/history.png',
name: '聊天记录',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
- type: AppModuleItemTypeEnum.initInput,
flowType: FlowModuleTypeEnum.historyNode,
url: '/app/modules/init/history',
inputs: [
@@ -106,12 +103,11 @@ export const HistoryModule: AppModuleTemplateItemType = {
};
const defaultModel = chatModelList[0];
-export const ChatModule: AppModuleTemplateItemType = {
+export const ChatModule: FlowModuleTemplateType = {
logo: '/imgs/module/AI.png',
name: 'AI 对话',
intro: 'AI 大模型对话',
flowType: FlowModuleTypeEnum.chatNode,
- type: AppModuleItemTypeEnum.http,
url: '/app/modules/chat/gpt',
inputs: [
{
@@ -166,9 +162,9 @@ export const ChatModule: AppModuleTemplateItemType = {
placeholder: ChatModelLimitTip,
value: ''
},
- // Input_Template_TFSwitch,
+ Input_Template_TFSwitch,
{
- key: 'quotePrompt',
+ key: 'quoteQA',
type: FlowInputItemTypeEnum.target,
label: '引用内容'
},
@@ -186,12 +182,11 @@ export const ChatModule: AppModuleTemplateItemType = {
]
};
-export const KBSearchModule: AppModuleTemplateItemType = {
+export const KBSearchModule: FlowModuleTemplateType = {
logo: '/imgs/module/db.png',
name: '知识库搜索',
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: FlowModuleTypeEnum.kbSearchNode,
- type: AppModuleItemTypeEnum.http,
url: '/app/modules/kb/search',
inputs: [
{
@@ -232,13 +227,6 @@ export const KBSearchModule: AppModuleTemplateItemType = {
Input_Template_UserChatInput
],
outputs: [
- {
- key: rawSearchKey,
- label: '源搜索数据',
- type: FlowOutputItemTypeEnum.hidden,
- response: true,
- targets: []
- },
{
key: 'isEmpty',
label: '搜索结果为空',
@@ -246,7 +234,13 @@ export const KBSearchModule: AppModuleTemplateItemType = {
targets: []
},
{
- key: 'quotePrompt',
+ key: 'unEmpty',
+ label: '搜索结果不为空',
+ type: FlowOutputItemTypeEnum.source,
+ targets: []
+ },
+ {
+ key: 'quoteQA',
label: '引用内容',
description: '搜索结果为空时不返回',
type: FlowOutputItemTypeEnum.source,
@@ -255,16 +249,15 @@ export const KBSearchModule: AppModuleTemplateItemType = {
]
};
-export const AnswerModule: AppModuleTemplateItemType = {
+export const AnswerModule: FlowModuleTemplateType = {
logo: '/imgs/module/reply.png',
name: '指定回复',
intro: '该模块可以直接回复一段指定的内容。常用于引导、提示。',
- type: AppModuleItemTypeEnum.answer,
flowType: FlowModuleTypeEnum.answerNode,
inputs: [
Input_Template_TFSwitch,
{
- key: TaskResponseKeyEnum.answerText,
+ key: 'text',
value: '',
type: FlowInputItemTypeEnum.textarea,
label: '回复的内容'
@@ -272,11 +265,10 @@ export const AnswerModule: AppModuleTemplateItemType = {
],
outputs: []
};
-export const TFSwitchModule: AppModuleTemplateItemType = {
+export const TFSwitchModule: FlowModuleTemplateType = {
logo: '',
name: 'TF开关',
intro: '可以判断输入的内容为 True 或者 False,从而执行不同操作。',
- type: AppModuleItemTypeEnum.switch,
flowType: FlowModuleTypeEnum.tfSwitchNode,
inputs: [
{
@@ -300,13 +292,12 @@ export const TFSwitchModule: AppModuleTemplateItemType = {
}
]
};
-export const ClassifyQuestionModule: AppModuleTemplateItemType = {
+export const ClassifyQuestionModule: FlowModuleTemplateType = {
logo: '/imgs/module/cq.png',
name: '问题分类',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
description:
'根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于 laf 通用问题\n类型3: 关于 laf 代码问题\n类型4: 其他问题',
- type: AppModuleItemTypeEnum.http,
url: '/app/modules/agent/classifyQuestion',
flowType: FlowModuleTypeEnum.classifyQuestion,
inputs: [
@@ -362,6 +353,15 @@ export const ClassifyQuestionModule: AppModuleTemplateItemType = {
}
]
};
+export const EmptyModule: FlowModuleTemplateType = {
+ logo: '/imgs/module/cq.png',
+ name: '该模块已被移除',
+ intro: '',
+ description: '',
+ flowType: FlowModuleTypeEnum.empty,
+ inputs: [],
+ outputs: []
+};
export const ModuleTemplates = [
{
@@ -381,1302 +381,1301 @@ export const ModuleTemplates = [
list: [ClassifyQuestionModule]
}
];
+export const ModuleTemplatesFlat = ModuleTemplates.map((templates) => templates.list)?.flat();
// template
export const appTemplates: (AppItemType & { avatar: string; intro: string })[] = [
- {
- id: 'simpleChat',
- avatar: '/imgs/module/AI.png',
- name: '简单的对话',
- intro: '一个极其简单的 AI 对话应用',
- modules: [
- {
- ...UserInputModule,
- inputs: [
- {
- key: 'userChatInput',
- type: 'systemInput',
- label: '用户问题',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'userChatInput',
- label: '用户问题',
- type: 'source',
- targets: [
- {
- moduleId: '7pacf0',
- key: 'userChatInput'
- }
- ]
- }
- ],
- position: {
- x: 477.9074315528994,
- y: 1604.2106242223683
- },
- moduleId: '7z5g5h'
- },
- {
- ...ChatModule,
- inputs: [
- {
- key: 'model',
- type: 'custom',
- label: '对话模型',
- value: 'gpt-3.5-turbo-16k',
- list: [
- {
- label: 'FastAI-4k',
- value: 'gpt-3.5-turbo'
- },
- {
- label: 'FastAI-16k',
- value: 'gpt-3.5-turbo-16k'
- },
- {
- label: 'FastAI-Plus',
- value: 'gpt-4'
- }
- ],
- connected: false
- },
- {
- key: 'temperature',
- type: 'custom',
- label: '温度',
- value: 0,
- min: 0,
- max: 10,
- step: 1,
- markList: [
- {
- label: '严谨',
- value: 0
- },
- {
- label: '发散',
- value: 10
- }
- ],
- connected: false
- },
- {
- key: 'maxToken',
- type: 'custom',
- label: '回复上限',
- value: 8000,
- min: 100,
- max: 16000,
- step: 50,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '16000',
- value: 16000
- }
- ],
- connected: false
- },
- {
- key: 'systemPrompt',
- type: 'textarea',
- label: '系统提示词',
- description:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- placeholder:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- value: '',
- connected: false
- },
- {
- key: 'limitPrompt',
- type: 'textarea',
- label: '限定词',
- description:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- placeholder:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- value: '',
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: false
- },
- {
- key: 'quotePrompt',
- type: 'target',
- label: '引用内容',
- connected: false
- },
- {
- key: 'history',
- type: 'target',
- label: '聊天记录',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: 'answerText',
- label: '模型回复',
- description: '直接响应,无需配置',
- type: 'hidden',
- targets: []
- }
- ],
- position: {
- x: 981.9682828103937,
- y: 890.014595014464
- },
- moduleId: '7pacf0'
- },
- {
- ...HistoryModule,
- inputs: [
- {
- key: 'maxContext',
- type: 'numberInput',
- label: '最长记录数',
- value: 10,
- min: 0,
- max: 50,
- connected: false
- },
- {
- key: 'history',
- type: 'hidden',
- label: '聊天记录',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'history',
- label: '聊天记录',
- type: 'source',
- targets: [
- {
- moduleId: '7pacf0',
- key: 'history'
- }
- ]
- }
- ],
- position: {
- x: 452.5466249541586,
- y: 1276.3930310334215
- },
- moduleId: 'xj0c9p'
- }
- ]
- },
- {
- id: 'simpleKbChat',
- avatar: '/imgs/module/db.png',
- name: '知识库 + 对话引导',
- intro: '每次提问时进行一次知识库搜索,将搜索结果注入 LLM 模型进行参考回答',
- modules: [
- {
- ...UserInputModule,
- inputs: [
- {
- key: 'userChatInput',
- type: 'systemInput',
- label: '用户问题',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'userChatInput',
- label: '用户问题',
- type: 'source',
- targets: [
- {
- moduleId: 'q9v14m',
- key: 'userChatInput'
- },
- {
- moduleId: 'qbf8td',
- key: 'userChatInput'
- }
- ]
- }
- ],
- position: {
- x: -196.84632684738483,
- y: 797.3401378431948
- },
- moduleId: 'v0nc1s'
- },
- {
- ...HistoryModule,
- inputs: [
- {
- key: 'maxContext',
- type: 'numberInput',
- label: '最长记录数',
- value: 10,
- min: 0,
- max: 50,
- connected: false
- },
- {
- key: 'history',
- type: 'hidden',
- label: '聊天记录',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'history',
- label: '聊天记录',
- type: 'source',
- targets: [
- {
- moduleId: 'qbf8td',
- key: 'history'
- }
- ]
- }
- ],
- position: {
- x: 211.58250540918442,
- y: 611.8700401034965
- },
- moduleId: 'k9y3jm'
- },
- {
- ...ChatModule,
- inputs: [
- {
- key: 'model',
- type: 'custom',
- label: '对话模型',
- value: 'gpt-3.5-turbo-16k',
- list: [
- {
- label: 'FastAI-4k',
- value: 'gpt-3.5-turbo'
- },
- {
- label: 'FastAI-16k',
- value: 'gpt-3.5-turbo-16k'
- },
- {
- label: 'FastAI-Plus',
- value: 'gpt-4'
- }
- ],
- connected: false
- },
- {
- key: 'temperature',
- type: 'custom',
- label: '温度',
- value: 0,
- min: 0,
- max: 10,
- step: 1,
- markList: [
- {
- label: '严谨',
- value: 0
- },
- {
- label: '发散',
- value: 10
- }
- ],
- connected: false
- },
- {
- key: 'maxToken',
- type: 'custom',
- label: '回复上限',
- value: 8000,
- min: 100,
- max: 16000,
- step: 50,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '16000',
- value: 16000
- }
- ],
- connected: false
- },
- {
- key: 'systemPrompt',
- type: 'textarea',
- label: '系统提示词',
- description:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- placeholder:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- value: '',
- connected: false
- },
- {
- key: 'limitPrompt',
- type: 'textarea',
- label: '限定词',
- description:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- placeholder:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- value: '知识库是关于 Laf 的内容,参考知识库回答我的问题。',
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'quotePrompt',
- type: 'target',
- label: '引用内容',
- connected: true
- },
- {
- key: 'history',
- type: 'target',
- label: '聊天记录',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: 'answerText',
- label: '模型回复',
- description: '直接响应,无需配置',
- type: 'hidden',
- targets: []
- }
- ],
- position: {
- x: 745.484449528062,
- y: 259.9361900288137
- },
- moduleId: 'qbf8td'
- },
- {
- ...KBSearchModule,
- inputs: [
- {
- key: 'kbList',
- type: 'custom',
- label: '关联的知识库',
- value: [],
- list: [],
- connected: false
- },
- {
- key: 'similarity',
- type: 'custom',
- label: '相似度',
- value: 0.8,
- min: 0,
- max: 1,
- step: 0.01,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '1',
- value: 1
- }
- ],
- connected: false
- },
- {
- key: 'limit',
- type: 'custom',
- label: '单次搜索上限',
- description: '最多取 n 条记录作为本次问题引用',
- value: 5,
- min: 1,
- max: 20,
- step: 1,
- markList: [
- {
- label: '1',
- value: 1
- },
- {
- label: '20',
- value: 20
- }
- ],
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: rawSearchKey,
- label: '源搜索数据',
- type: 'hidden',
- response: true,
- targets: []
- },
- {
- key: 'isEmpty',
- label: '搜索结果为空',
- type: 'source',
- targets: [
- {
- moduleId: 'w8av9y',
- key: 'switch'
- }
- ]
- },
- {
- key: 'quotePrompt',
- label: '引用内容',
- description: '搜索结果为空时不返回',
- type: 'source',
- targets: [
- {
- moduleId: 'qbf8td',
- key: 'quotePrompt'
- }
- ]
- }
- ],
- position: {
- x: 101.2612930583856,
- y: -31.342317423453437
- },
- moduleId: 'q9v14m'
- },
- {
- ...AnswerModule,
- inputs: [
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'answerText',
- value: '对不起,我没有找到你的问题',
- type: 'input',
- label: '回复的内容',
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: 673.6108151684664,
- y: -84.13355134221933
- },
- moduleId: 'w8av9y'
- },
- {
- ...UserGuideModule,
- inputs: [
- {
- key: 'welcomeText',
- type: 'input',
- label: '开场白',
- value:
- '你好,我是 Laf 助手,请问有什么可以帮助你的么?\n[laf 是什么?]\n[官网是多少?]',
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: -338.02984747117785,
- y: 203.21398144017178
- },
- moduleId: 'v7lq0x'
- }
- ]
- },
- {
- id: 'chatGuide',
- avatar: '/imgs/module/userGuide.png',
- name: '对话引导 + 变量',
- intro: '可以在对话开始发送一段提示,或者让用户填写一些内容,作为本次对话的变量',
- modules: [
- {
- ...UserInputModule,
- inputs: [
- {
- key: 'userChatInput',
- type: 'systemInput',
- label: '用户问题',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'userChatInput',
- label: '用户问题',
- type: 'source',
- targets: [
- {
- moduleId: '7pacf0',
- key: 'userChatInput'
- }
- ]
- }
- ],
- position: {
- x: 485.8457451202796,
- y: 1601.0352987954163
- },
- moduleId: '7z5g5h'
- },
- {
- ...ChatModule,
- inputs: [
- {
- key: 'model',
- type: 'custom',
- label: '对话模型',
- value: 'gpt-3.5-turbo-16k',
- list: [
- {
- label: 'FastAI-4k',
- value: 'gpt-3.5-turbo'
- },
- {
- label: 'FastAI-16k',
- value: 'gpt-3.5-turbo-16k'
- },
- {
- label: 'FastAI-Plus',
- value: 'gpt-4'
- }
- ],
- connected: false
- },
- {
- key: 'temperature',
- type: 'custom',
- label: '温度',
- value: 0,
- min: 0,
- max: 10,
- step: 1,
- markList: [
- {
- label: '严谨',
- value: 0
- },
- {
- label: '发散',
- value: 10
- }
- ],
- connected: false
- },
- {
- key: 'maxToken',
- type: 'custom',
- label: '回复上限',
- value: 8000,
- min: 100,
- max: 16000,
- step: 50,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '16000',
- value: 16000
- }
- ],
- connected: false
- },
- {
- key: 'systemPrompt',
- type: 'textarea',
- label: '系统提示词',
- description:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- placeholder:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- value: '',
- connected: false
- },
- {
- key: 'limitPrompt',
- type: 'textarea',
- label: '限定词',
- description:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- placeholder:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- value: '将我发送的任何内容,直接翻译成{{language}}',
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: false
- },
- {
- key: 'quotePrompt',
- type: 'target',
- label: '引用内容',
- connected: false
- },
- {
- key: 'history',
- type: 'target',
- label: '聊天记录',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: 'answerText',
- label: '模型回复',
- description: '直接响应,无需配置',
- type: 'hidden',
- targets: []
- }
- ],
- position: {
- x: 981.9682828103937,
- y: 890.014595014464
- },
- moduleId: '7pacf0'
- },
- {
- ...HistoryModule,
- inputs: [
- {
- key: 'maxContext',
- type: 'numberInput',
- label: '最长记录数',
- value: 10,
- min: 0,
- max: 50,
- connected: false
- },
- {
- key: 'history',
- type: 'hidden',
- label: '聊天记录',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'history',
- label: '聊天记录',
- type: 'source',
- targets: [
- {
- moduleId: '7pacf0',
- key: 'history'
- }
- ]
- }
- ],
- position: {
- x: 446.2698477029736,
- y: 1281.1006139718102
- },
- moduleId: 'xj0c9p'
- },
- {
- ...VariableModule,
- inputs: [
- {
- key: 'variables',
- type: 'systemInput',
- label: '变量输入',
- value: [
- {
- id: 'z3bs2f',
- key: 'language',
- label: '目标语言',
- type: 'select',
- required: true,
- maxLen: 50,
- enums: [
- {
- value: '英语'
- },
- {
- value: '法语'
- },
- {
- value: '日语'
- }
- ]
- }
- ],
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: 513.9049244392417,
- y: 996.8739106932076
- },
- moduleId: '7blchb'
- },
- {
- ...UserGuideModule,
- inputs: [
- {
- key: 'welcomeText',
- type: 'input',
- label: '开场白',
- value: '你好,我是翻译助手,可以帮你翻译任何语言。请告诉我,你需要翻译成什么语言?',
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: 173.17995039750167,
- y: 982.945778706804
- },
- moduleId: 'w35iml'
- }
- ]
- },
- {
- id: 'CQ',
- avatar: '/imgs/module/cq.png',
- name: '问题分类 + 知识库',
- intro: '先对用户的问题进行分类,再根据不同类型问题,执行不同的操作',
- modules: [
- {
- ...UserInputModule,
- inputs: [
- {
- key: 'userChatInput',
- type: 'systemInput',
- label: '用户问题',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'userChatInput',
- label: '用户问题',
- type: 'source',
- targets: [
- {
- moduleId: '3n49vn',
- key: 'userChatInput'
- },
- {
- moduleId: 's7qnhf',
- key: 'userChatInput'
- },
- {
- moduleId: '15c9bv',
- key: 'userChatInput'
- }
- ]
- }
- ],
- position: {
- x: -216.08819066976912,
- y: 585.9302721518841
- },
- moduleId: 'xzj0oo'
- },
- {
- ...HistoryModule,
- inputs: [
- {
- key: 'maxContext',
- type: 'numberInput',
- label: '最长记录数',
- value: 10,
- min: 0,
- max: 50,
- connected: false
- },
- {
- key: 'history',
- type: 'hidden',
- label: '聊天记录',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'history',
- label: '聊天记录',
- type: 'source',
- targets: [
- {
- moduleId: '3n49vn',
- key: 'history'
- }
- ]
- }
- ],
- position: {
- x: 1146.0216647621794,
- y: 236.92269104756855
- },
- moduleId: 'hh6of9'
- },
- {
- ...ChatModule,
- inputs: [
- {
- key: 'model',
- type: 'select',
- label: '对话模型',
- value: 'gpt-3.5-turbo',
- list: [
- {
- label: 'FastAI-16k',
- value: 'gpt-3.5-turbo-16k'
- },
- {
- label: 'FastAI-4k',
- value: 'gpt-3.5-turbo'
- },
- {
- label: 'FastAI-Plus-8k',
- value: 'gpt-4'
- }
- ],
- connected: false
- },
- {
- key: 'temperature',
- type: 'custom',
- label: '温度',
- value: 0,
- min: 0,
- max: 10,
- step: 1,
- markList: [
- {
- label: '严谨',
- value: 0
- },
- {
- label: '发散',
- value: 10
- }
- ],
- connected: false
- },
- {
- key: 'maxToken',
- type: 'custom',
- label: '回复上限',
- value: 3000,
- min: 100,
- max: 4000,
- step: 50,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '4000',
- value: 4000
- }
- ],
- connected: false
- },
- {
- key: 'systemPrompt',
- type: 'textarea',
- label: '系统提示词',
- description:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- placeholder:
- '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
- value: '你是 Laf 助手,可以回答 Laf 相关问题。',
- connected: false
- },
- {
- key: 'limitPrompt',
- type: 'textarea',
- label: '限定词',
- description:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- placeholder:
- '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
- value: '知识库是 Laf 的内容,参考知识库回答问题。',
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: false
- },
- {
- key: 'quotePrompt',
- type: 'target',
- label: '引用内容',
- connected: true
- },
- {
- key: 'history',
- type: 'target',
- label: '聊天记录',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: 'answerText',
- label: '模型回复',
- description: '直接响应,无需配置',
- type: 'hidden',
- targets: []
- }
- ],
- position: {
- x: 1494.4843114348841,
- y: -13.57201521210618
- },
- moduleId: '3n49vn'
- },
- {
- ...KBSearchModule,
- inputs: [
- {
- key: 'kbList',
- type: 'custom',
- label: '关联的知识库',
- value: [],
- list: [],
- connected: false
- },
- {
- key: 'similarity',
- type: 'custom',
- label: '相似度',
- value: 0.8,
- min: 0,
- max: 1,
- step: 0.01,
- markList: [
- {
- label: '0',
- value: 0
- },
- {
- label: '1',
- value: 1
- }
- ],
- connected: false
- },
- {
- key: 'limit',
- type: 'custom',
- label: '单次搜索上限',
- description: '最多取 n 条记录作为本次问题引用',
- value: 5,
- min: 1,
- max: 20,
- step: 1,
- markList: [
- {
- label: '1',
- value: 1
- },
- {
- label: '20',
- value: 20
- }
- ],
- connected: false
- },
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- }
- ],
- outputs: [
- {
- key: rawSearchKey,
- label: '源搜索数据',
- type: 'hidden',
- response: true,
- targets: []
- },
- {
- key: 'isEmpty',
- label: '搜索结果为空',
- type: 'source',
- targets: [
- {
- moduleId: 'phwr0u',
- key: 'switch'
- }
- ]
- },
- {
- key: 'quotePrompt',
- label: '引用内容',
- description: '搜索结果为空时不返回',
- type: 'source',
- targets: [
- {
- moduleId: '3n49vn',
- key: 'quotePrompt'
- }
- ]
- }
- ],
- position: {
- x: 690.1930900957847,
- y: 102.10119978743109
- },
- moduleId: 's7qnhf'
- },
- {
- ...HistoryModule,
- inputs: [
- {
- key: 'maxContext',
- type: 'numberInput',
- label: '最长记录数',
- value: 2,
- min: 0,
- max: 50,
- connected: false
- },
- {
- key: 'history',
- type: 'hidden',
- label: '聊天记录',
- connected: false
- }
- ],
- outputs: [
- {
- key: 'history',
- label: '聊天记录',
- type: 'source',
- targets: [
- {
- moduleId: '15c9bv',
- key: 'history'
- }
- ]
- }
- ],
- position: {
- x: -274.2362185453961,
- y: 152.19755525696058
- },
- moduleId: 'qiwrjt'
- },
- {
- ...AnswerModule,
- inputs: [
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'answerText',
- value: '你好,我是 Laf 助手,有什么可以帮助你的么?',
- type: 'input',
- label: '回复的内容',
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: 686.1260929408212,
- y: -142.16731465682332
- },
- moduleId: 'l4e36k'
- },
- {
- ...AnswerModule,
- inputs: [
- {
- key: 'switch',
- type: 'target',
- label: '触发器',
- connected: true
- },
- {
- key: 'answerText',
- value: '对不起,我无法回答你的问题,请问有什么关于 Laf 的问题么?',
- type: 'input',
- label: '回复的内容',
- connected: false
- }
- ],
- outputs: [],
- position: {
- x: 1469.3636235179692,
- y: 937.5555811306511
- },
- moduleId: 'phwr0u'
- },
- {
- ...ClassifyQuestionModule,
- inputs: [
- {
- key: 'systemPrompt',
- type: 'textarea',
- label: '系统提示词',
- description:
- '你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
- placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统',
- value:
- ' laf 是什么\nlaf 是云开发平台,可以快速的开发应用\nlaf 是一个开源的 BaaS 开发平台(Backend as a Service)\nlaf 是一个开箱即用的 serverless 开发平台\nlaf 是一个集「函数计算」、「数据库」、「对象存储」等于一身的一站式开发平台\nlaf 可以是开源版的腾讯云开发、开源版的 Google Firebase、开源版的 UniCloud\nlaf 让每个开发团队都可以随时拥有一个自己的云开发平台!',
- connected: false
- },
- {
- key: 'history',
- type: 'target',
- label: '聊天记录',
- connected: true
- },
- {
- key: 'userChatInput',
- type: 'target',
- label: '用户问题',
- connected: true
- },
- {
- key: 'agents',
- type: 'custom',
- label: '',
- value: [
- {
- value: '打招呼、问候等',
- key: 'fasw'
- },
- {
- value: '关于 laf 云函数的问题',
- key: 'fqsw'
- },
- {
- value: '其他问题',
- key: 'q73b'
- }
- ],
- connected: false
- }
- ],
- outputs: [
- {
- key: 'fasw',
- label: '',
- type: 'hidden',
- targets: [
- {
- moduleId: 'l4e36k',
- key: 'switch'
- }
- ]
- },
- {
- key: 'fqsw',
- label: '',
- type: 'hidden',
- targets: [
- {
- moduleId: 's7qnhf',
- key: 'switch'
- }
- ]
- },
- {
- key: 'q73b',
- label: '',
- type: 'hidden',
- targets: [
- {
- moduleId: 'phwr0u',
- key: 'switch'
- }
- ]
- }
- ],
- position: {
- x: 154.9724540917009,
- y: -37.48714632270105
- },
- moduleId: '15c9bv'
- }
- ]
- }
+ // {
+ // id: 'simpleChat',
+ // avatar: '/imgs/module/AI.png',
+ // name: '简单的对话',
+ // intro: '一个极其简单的 AI 对话应用',
+ // modules: [
+ // {
+ // ...UserInputModule,
+ // inputs: [
+ // {
+ // key: 'userChatInput',
+ // type: 'systemInput',
+ // label: '用户问题',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'userChatInput',
+ // label: '用户问题',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '7pacf0',
+ // key: 'userChatInput'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 477.9074315528994,
+ // y: 1604.2106242223683
+ // },
+ // moduleId: '7z5g5h'
+ // },
+ // {
+ // ...ChatModule,
+ // inputs: [
+ // {
+ // key: 'model',
+ // type: 'custom',
+ // label: '对话模型',
+ // value: 'gpt-3.5-turbo-16k',
+ // list: [
+ // {
+ // label: 'FastAI-4k',
+ // value: 'gpt-3.5-turbo'
+ // },
+ // {
+ // label: 'FastAI-16k',
+ // value: 'gpt-3.5-turbo-16k'
+ // },
+ // {
+ // label: 'FastAI-Plus',
+ // value: 'gpt-4'
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'temperature',
+ // type: 'custom',
+ // label: '温度',
+ // value: 0,
+ // min: 0,
+ // max: 10,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '严谨',
+ // value: 0
+ // },
+ // {
+ // label: '发散',
+ // value: 10
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'maxToken',
+ // type: 'custom',
+ // label: '回复上限',
+ // value: 8000,
+ // min: 100,
+ // max: 16000,
+ // step: 50,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '16000',
+ // value: 16000
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'systemPrompt',
+ // type: 'textarea',
+ // label: '系统提示词',
+ // description:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // placeholder:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // value: '',
+ // connected: false
+ // },
+ // {
+ // key: 'limitPrompt',
+ // type: 'textarea',
+ // label: '限定词',
+ // description:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // placeholder:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // value: '',
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: false
+ // },
+ // {
+ // key: 'quoteQA',
+ // type: 'target',
+ // label: '引用内容',
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'target',
+ // label: '聊天记录',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'answerText',
+ // label: '模型回复',
+ // description: '直接响应,无需配置',
+ // type: 'hidden',
+ // targets: []
+ // }
+ // ],
+ // position: {
+ // x: 981.9682828103937,
+ // y: 890.014595014464
+ // },
+ // moduleId: '7pacf0'
+ // },
+ // {
+ // ...HistoryModule,
+ // inputs: [
+ // {
+ // key: 'maxContext',
+ // type: 'numberInput',
+ // label: '最长记录数',
+ // value: 10,
+ // min: 0,
+ // max: 50,
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'hidden',
+ // label: '聊天记录',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'history',
+ // label: '聊天记录',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '7pacf0',
+ // key: 'history'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 452.5466249541586,
+ // y: 1276.3930310334215
+ // },
+ // moduleId: 'xj0c9p'
+ // }
+ // ]
+ // },
+ // {
+ // id: 'simpleKbChat',
+ // avatar: '/imgs/module/db.png',
+ // name: '知识库 + 对话引导',
+ // intro: '每次提问时进行一次知识库搜索,将搜索结果注入 LLM 模型进行参考回答',
+ // modules: [
+ // {
+ // ...UserInputModule,
+ // inputs: [
+ // {
+ // key: 'userChatInput',
+ // type: 'systemInput',
+ // label: '用户问题',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'userChatInput',
+ // label: '用户问题',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: 'q9v14m',
+ // key: 'userChatInput'
+ // },
+ // {
+ // moduleId: 'qbf8td',
+ // key: 'userChatInput'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: -196.84632684738483,
+ // y: 797.3401378431948
+ // },
+ // moduleId: 'v0nc1s'
+ // },
+ // {
+ // ...HistoryModule,
+ // inputs: [
+ // {
+ // key: 'maxContext',
+ // type: 'numberInput',
+ // label: '最长记录数',
+ // value: 10,
+ // min: 0,
+ // max: 50,
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'hidden',
+ // label: '聊天记录',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'history',
+ // label: '聊天记录',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: 'qbf8td',
+ // key: 'history'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 211.58250540918442,
+ // y: 611.8700401034965
+ // },
+ // moduleId: 'k9y3jm'
+ // },
+ // {
+ // ...ChatModule,
+ // inputs: [
+ // {
+ // key: 'model',
+ // type: 'custom',
+ // label: '对话模型',
+ // value: 'gpt-3.5-turbo-16k',
+ // list: [
+ // {
+ // label: 'FastAI-4k',
+ // value: 'gpt-3.5-turbo'
+ // },
+ // {
+ // label: 'FastAI-16k',
+ // value: 'gpt-3.5-turbo-16k'
+ // },
+ // {
+ // label: 'FastAI-Plus',
+ // value: 'gpt-4'
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'temperature',
+ // type: 'custom',
+ // label: '温度',
+ // value: 0,
+ // min: 0,
+ // max: 10,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '严谨',
+ // value: 0
+ // },
+ // {
+ // label: '发散',
+ // value: 10
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'maxToken',
+ // type: 'custom',
+ // label: '回复上限',
+ // value: 8000,
+ // min: 100,
+ // max: 16000,
+ // step: 50,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '16000',
+ // value: 16000
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'systemPrompt',
+ // type: 'textarea',
+ // label: '系统提示词',
+ // description:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // placeholder:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // value: '',
+ // connected: false
+ // },
+ // {
+ // key: 'limitPrompt',
+ // type: 'textarea',
+ // label: '限定词',
+ // description:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // placeholder:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // value: '知识库是关于 Laf 的内容,参考知识库回答我的问题。',
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'quoteQA',
+ // type: 'target',
+ // label: '引用内容',
+ // connected: true
+ // },
+ // {
+ // key: 'history',
+ // type: 'target',
+ // label: '聊天记录',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'answerText',
+ // label: '模型回复',
+ // description: '直接响应,无需配置',
+ // type: 'hidden',
+ // targets: []
+ // }
+ // ],
+ // position: {
+ // x: 745.484449528062,
+ // y: 259.9361900288137
+ // },
+ // moduleId: 'qbf8td'
+ // },
+ // {
+ // ...KBSearchModule,
+ // inputs: [
+ // {
+ // key: 'kbList',
+ // type: 'custom',
+ // label: '关联的知识库',
+ // value: [],
+ // list: [],
+ // connected: false
+ // },
+ // {
+ // key: 'similarity',
+ // type: 'custom',
+ // label: '相似度',
+ // value: 0.8,
+ // min: 0,
+ // max: 1,
+ // step: 0.01,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '1',
+ // value: 1
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'limit',
+ // type: 'custom',
+ // label: '单次搜索上限',
+ // description: '最多取 n 条记录作为本次问题引用',
+ // value: 5,
+ // min: 1,
+ // max: 20,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '1',
+ // value: 1
+ // },
+ // {
+ // label: '20',
+ // value: 20
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // label: '源搜索数据',
+ // type: 'hidden',
+ // response: true,
+ // targets: []
+ // },
+ // {
+ // key: 'isEmpty',
+ // label: '搜索结果为空',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: 'w8av9y',
+ // key: 'switch'
+ // }
+ // ]
+ // },
+ // {
+ // key: 'quoteQA',
+ // label: '引用内容',
+ // description: '搜索结果为空时不返回',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: 'qbf8td',
+ // key: 'quoteQA'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 101.2612930583856,
+ // y: -31.342317423453437
+ // },
+ // moduleId: 'q9v14m'
+ // },
+ // {
+ // ...AnswerModule,
+ // inputs: [
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'answerText',
+ // value: '对不起,我没有找到你的问题',
+ // type: 'input',
+ // label: '回复的内容',
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: 673.6108151684664,
+ // y: -84.13355134221933
+ // },
+ // moduleId: 'w8av9y'
+ // },
+ // {
+ // ...UserGuideModule,
+ // inputs: [
+ // {
+ // key: 'welcomeText',
+ // type: 'input',
+ // label: '开场白',
+ // value:
+ // '你好,我是 Laf 助手,请问有什么可以帮助你的么?\n[laf 是什么?]\n[官网是多少?]',
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: -338.02984747117785,
+ // y: 203.21398144017178
+ // },
+ // moduleId: 'v7lq0x'
+ // }
+ // ]
+ // },
+ // {
+ // id: 'chatGuide',
+ // avatar: '/imgs/module/userGuide.png',
+ // name: '对话引导 + 变量',
+ // intro: '可以在对话开始发送一段提示,或者让用户填写一些内容,作为本次对话的变量',
+ // modules: [
+ // {
+ // ...UserInputModule,
+ // inputs: [
+ // {
+ // key: 'userChatInput',
+ // type: 'systemInput',
+ // label: '用户问题',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'userChatInput',
+ // label: '用户问题',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '7pacf0',
+ // key: 'userChatInput'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 485.8457451202796,
+ // y: 1601.0352987954163
+ // },
+ // moduleId: '7z5g5h'
+ // },
+ // {
+ // ...ChatModule,
+ // inputs: [
+ // {
+ // key: 'model',
+ // type: 'custom',
+ // label: '对话模型',
+ // value: 'gpt-3.5-turbo-16k',
+ // list: [
+ // {
+ // label: 'FastAI-4k',
+ // value: 'gpt-3.5-turbo'
+ // },
+ // {
+ // label: 'FastAI-16k',
+ // value: 'gpt-3.5-turbo-16k'
+ // },
+ // {
+ // label: 'FastAI-Plus',
+ // value: 'gpt-4'
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'temperature',
+ // type: 'custom',
+ // label: '温度',
+ // value: 0,
+ // min: 0,
+ // max: 10,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '严谨',
+ // value: 0
+ // },
+ // {
+ // label: '发散',
+ // value: 10
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'maxToken',
+ // type: 'custom',
+ // label: '回复上限',
+ // value: 8000,
+ // min: 100,
+ // max: 16000,
+ // step: 50,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '16000',
+ // value: 16000
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'systemPrompt',
+ // type: 'textarea',
+ // label: '系统提示词',
+ // description:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // placeholder:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // value: '',
+ // connected: false
+ // },
+ // {
+ // key: 'limitPrompt',
+ // type: 'textarea',
+ // label: '限定词',
+ // description:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // placeholder:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // value: '将我发送的任何内容,直接翻译成{{language}}',
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: false
+ // },
+ // {
+ // key: 'quoteQA',
+ // type: 'target',
+ // label: '引用内容',
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'target',
+ // label: '聊天记录',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'answerText',
+ // label: '模型回复',
+ // description: '直接响应,无需配置',
+ // type: 'hidden',
+ // targets: []
+ // }
+ // ],
+ // position: {
+ // x: 981.9682828103937,
+ // y: 890.014595014464
+ // },
+ // moduleId: '7pacf0'
+ // },
+ // {
+ // ...HistoryModule,
+ // inputs: [
+ // {
+ // key: 'maxContext',
+ // type: 'numberInput',
+ // label: '最长记录数',
+ // value: 10,
+ // min: 0,
+ // max: 50,
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'hidden',
+ // label: '聊天记录',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'history',
+ // label: '聊天记录',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '7pacf0',
+ // key: 'history'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 446.2698477029736,
+ // y: 1281.1006139718102
+ // },
+ // moduleId: 'xj0c9p'
+ // },
+ // {
+ // ...VariableModule,
+ // inputs: [
+ // {
+ // key: 'variables',
+ // type: 'systemInput',
+ // label: '变量输入',
+ // value: [
+ // {
+ // id: 'z3bs2f',
+ // key: 'language',
+ // label: '目标语言',
+ // type: 'select',
+ // required: true,
+ // maxLen: 50,
+ // enums: [
+ // {
+ // value: '英语'
+ // },
+ // {
+ // value: '法语'
+ // },
+ // {
+ // value: '日语'
+ // }
+ // ]
+ // }
+ // ],
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: 513.9049244392417,
+ // y: 996.8739106932076
+ // },
+ // moduleId: '7blchb'
+ // },
+ // {
+ // ...UserGuideModule,
+ // inputs: [
+ // {
+ // key: 'welcomeText',
+ // type: 'input',
+ // label: '开场白',
+ // value: '你好,我是翻译助手,可以帮你翻译任何语言。请告诉我,你需要翻译成什么语言?',
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: 173.17995039750167,
+ // y: 982.945778706804
+ // },
+ // moduleId: 'w35iml'
+ // }
+ // ]
+ // },
+ // {
+ // id: 'CQ',
+ // avatar: '/imgs/module/cq.png',
+ // name: '问题分类 + 知识库',
+ // intro: '先对用户的问题进行分类,再根据不同类型问题,执行不同的操作',
+ // modules: [
+ // {
+ // ...UserInputModule,
+ // inputs: [
+ // {
+ // key: 'userChatInput',
+ // type: 'systemInput',
+ // label: '用户问题',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'userChatInput',
+ // label: '用户问题',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '3n49vn',
+ // key: 'userChatInput'
+ // },
+ // {
+ // moduleId: 's7qnhf',
+ // key: 'userChatInput'
+ // },
+ // {
+ // moduleId: '15c9bv',
+ // key: 'userChatInput'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: -216.08819066976912,
+ // y: 585.9302721518841
+ // },
+ // moduleId: 'xzj0oo'
+ // },
+ // {
+ // ...HistoryModule,
+ // inputs: [
+ // {
+ // key: 'maxContext',
+ // type: 'numberInput',
+ // label: '最长记录数',
+ // value: 10,
+ // min: 0,
+ // max: 50,
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'hidden',
+ // label: '聊天记录',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'history',
+ // label: '聊天记录',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '3n49vn',
+ // key: 'history'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 1146.0216647621794,
+ // y: 236.92269104756855
+ // },
+ // moduleId: 'hh6of9'
+ // },
+ // {
+ // ...ChatModule,
+ // inputs: [
+ // {
+ // key: 'model',
+ // type: 'select',
+ // label: '对话模型',
+ // value: 'gpt-3.5-turbo',
+ // list: [
+ // {
+ // label: 'FastAI-16k',
+ // value: 'gpt-3.5-turbo-16k'
+ // },
+ // {
+ // label: 'FastAI-4k',
+ // value: 'gpt-3.5-turbo'
+ // },
+ // {
+ // label: 'FastAI-Plus-8k',
+ // value: 'gpt-4'
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'temperature',
+ // type: 'custom',
+ // label: '温度',
+ // value: 0,
+ // min: 0,
+ // max: 10,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '严谨',
+ // value: 0
+ // },
+ // {
+ // label: '发散',
+ // value: 10
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'maxToken',
+ // type: 'custom',
+ // label: '回复上限',
+ // value: 3000,
+ // min: 100,
+ // max: 4000,
+ // step: 50,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '4000',
+ // value: 4000
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'systemPrompt',
+ // type: 'textarea',
+ // label: '系统提示词',
+ // description:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // placeholder:
+ // '模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
+ // value: '你是 Laf 助手,可以回答 Laf 相关问题。',
+ // connected: false
+ // },
+ // {
+ // key: 'limitPrompt',
+ // type: 'textarea',
+ // label: '限定词',
+ // description:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // placeholder:
+ // '限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
+ // value: '知识库是 Laf 的内容,参考知识库回答问题。',
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: false
+ // },
+ // {
+ // key: 'quoteQA',
+ // type: 'target',
+ // label: '引用内容',
+ // connected: true
+ // },
+ // {
+ // key: 'history',
+ // type: 'target',
+ // label: '聊天记录',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'answerText',
+ // label: '模型回复',
+ // description: '直接响应,无需配置',
+ // type: 'hidden',
+ // targets: []
+ // }
+ // ],
+ // position: {
+ // x: 1494.4843114348841,
+ // y: -13.57201521210618
+ // },
+ // moduleId: '3n49vn'
+ // },
+ // {
+ // ...KBSearchModule,
+ // inputs: [
+ // {
+ // key: 'kbList',
+ // type: 'custom',
+ // label: '关联的知识库',
+ // value: [],
+ // list: [],
+ // connected: false
+ // },
+ // {
+ // key: 'similarity',
+ // type: 'custom',
+ // label: '相似度',
+ // value: 0.8,
+ // min: 0,
+ // max: 1,
+ // step: 0.01,
+ // markList: [
+ // {
+ // label: '0',
+ // value: 0
+ // },
+ // {
+ // label: '1',
+ // value: 1
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'limit',
+ // type: 'custom',
+ // label: '单次搜索上限',
+ // description: '最多取 n 条记录作为本次问题引用',
+ // value: 5,
+ // min: 1,
+ // max: 20,
+ // step: 1,
+ // markList: [
+ // {
+ // label: '1',
+ // value: 1
+ // },
+ // {
+ // label: '20',
+ // value: 20
+ // }
+ // ],
+ // connected: false
+ // },
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // }
+ // ],
+ // outputs: [
+ // {
+ // label: '源搜索数据',
+ // type: 'hidden',
+ // response: true,
+ // targets: []
+ // },
+ // {
+ // key: 'isEmpty',
+ // label: '搜索结果为空',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: 'phwr0u',
+ // key: 'switch'
+ // }
+ // ]
+ // },
+ // {
+ // key: 'quoteQA',
+ // label: '引用内容',
+ // description: '搜索结果为空时不返回',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '3n49vn',
+ // key: 'quoteQA'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 690.1930900957847,
+ // y: 102.10119978743109
+ // },
+ // moduleId: 's7qnhf'
+ // },
+ // {
+ // ...HistoryModule,
+ // inputs: [
+ // {
+ // key: 'maxContext',
+ // type: 'numberInput',
+ // label: '最长记录数',
+ // value: 2,
+ // min: 0,
+ // max: 50,
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'hidden',
+ // label: '聊天记录',
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'history',
+ // label: '聊天记录',
+ // type: 'source',
+ // targets: [
+ // {
+ // moduleId: '15c9bv',
+ // key: 'history'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: -274.2362185453961,
+ // y: 152.19755525696058
+ // },
+ // moduleId: 'qiwrjt'
+ // },
+ // {
+ // ...AnswerModule,
+ // inputs: [
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'answerText',
+ // value: '你好,我是 Laf 助手,有什么可以帮助你的么?',
+ // type: 'input',
+ // label: '回复的内容',
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: 686.1260929408212,
+ // y: -142.16731465682332
+ // },
+ // moduleId: 'l4e36k'
+ // },
+ // {
+ // ...AnswerModule,
+ // inputs: [
+ // {
+ // key: 'switch',
+ // type: 'target',
+ // label: '触发器',
+ // connected: true
+ // },
+ // {
+ // key: 'answerText',
+ // value: '对不起,我无法回答你的问题,请问有什么关于 Laf 的问题么?',
+ // type: 'input',
+ // label: '回复的内容',
+ // connected: false
+ // }
+ // ],
+ // outputs: [],
+ // position: {
+ // x: 1469.3636235179692,
+ // y: 937.5555811306511
+ // },
+ // moduleId: 'phwr0u'
+ // },
+ // {
+ // ...ClassifyQuestionModule,
+ // inputs: [
+ // {
+ // key: 'systemPrompt',
+ // type: 'textarea',
+ // label: '系统提示词',
+ // description:
+ // '你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
+ // placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统',
+ // value:
+ // ' laf 是什么\nlaf 是云开发平台,可以快速的开发应用\nlaf 是一个开源的 BaaS 开发平台(Backend as a Service)\nlaf 是一个开箱即用的 serverless 开发平台\nlaf 是一个集「函数计算」、「数据库」、「对象存储」等于一身的一站式开发平台\nlaf 可以是开源版的腾讯云开发、开源版的 Google Firebase、开源版的 UniCloud\nlaf 让每个开发团队都可以随时拥有一个自己的云开发平台!',
+ // connected: false
+ // },
+ // {
+ // key: 'history',
+ // type: 'target',
+ // label: '聊天记录',
+ // connected: true
+ // },
+ // {
+ // key: 'userChatInput',
+ // type: 'target',
+ // label: '用户问题',
+ // connected: true
+ // },
+ // {
+ // key: 'agents',
+ // type: 'custom',
+ // label: '',
+ // value: [
+ // {
+ // value: '打招呼、问候等',
+ // key: 'fasw'
+ // },
+ // {
+ // value: '关于 laf 云函数的问题',
+ // key: 'fqsw'
+ // },
+ // {
+ // value: '其他问题',
+ // key: 'q73b'
+ // }
+ // ],
+ // connected: false
+ // }
+ // ],
+ // outputs: [
+ // {
+ // key: 'fasw',
+ // label: '',
+ // type: 'hidden',
+ // targets: [
+ // {
+ // moduleId: 'l4e36k',
+ // key: 'switch'
+ // }
+ // ]
+ // },
+ // {
+ // key: 'fqsw',
+ // label: '',
+ // type: 'hidden',
+ // targets: [
+ // {
+ // moduleId: 's7qnhf',
+ // key: 'switch'
+ // }
+ // ]
+ // },
+ // {
+ // key: 'q73b',
+ // label: '',
+ // type: 'hidden',
+ // targets: [
+ // {
+ // moduleId: 'phwr0u',
+ // key: 'switch'
+ // }
+ // ]
+ // }
+ // ],
+ // position: {
+ // x: 154.9724540917009,
+ // y: -37.48714632270105
+ // },
+ // moduleId: '15c9bv'
+ // }
+ // ]
+ // }
];
diff --git a/client/src/constants/flow/index.ts b/client/src/constants/flow/index.ts
index e5d482735..947614d91 100644
--- a/client/src/constants/flow/index.ts
+++ b/client/src/constants/flow/index.ts
@@ -19,9 +19,10 @@ export enum FlowOutputItemTypeEnum {
}
export enum FlowModuleTypeEnum {
+ empty = 'empty',
variable = 'variable',
userGuide = 'userGuide',
- questionInputNode = 'questionInput',
+ questionInput = 'questionInput',
historyNode = 'historyNode',
chatNode = 'chatNode',
kbSearchNode = 'kbSearchNode',
@@ -30,6 +31,11 @@ export enum FlowModuleTypeEnum {
classifyQuestion = 'classifyQuestion'
}
+export const initModuleType: Record = {
+ [FlowModuleTypeEnum.historyNode]: true,
+ [FlowModuleTypeEnum.questionInput]: true
+};
+
export const edgeOptions = {
style: {
strokeWidth: 1,
diff --git a/client/src/pages/api/app/modules/init/history.tsx b/client/src/pages/api/app/modules/init/history.tsx
deleted file mode 100644
index a6fd9ef92..000000000
--- a/client/src/pages/api/app/modules/init/history.tsx
+++ /dev/null
@@ -1,20 +0,0 @@
-// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
-import type { NextApiRequest, NextApiResponse } from 'next';
-import { jsonRes } from '@/service/response';
-import { SystemInputEnum } from '@/constants/app';
-import { ChatItemType } from '@/types/chat';
-
-export type Props = {
- maxContext: number;
- [SystemInputEnum.history]: ChatItemType[];
-};
-
-export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- const { maxContext = 5, history } = req.body as Props;
-
- jsonRes(res, {
- data: {
- history: history.slice(-maxContext)
- }
- });
-}
diff --git a/client/src/pages/api/app/modules/init/userChatInput.tsx b/client/src/pages/api/app/modules/init/userChatInput.tsx
deleted file mode 100644
index 33de8cb19..000000000
--- a/client/src/pages/api/app/modules/init/userChatInput.tsx
+++ /dev/null
@@ -1,17 +0,0 @@
-// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
-import type { NextApiRequest, NextApiResponse } from 'next';
-import { jsonRes } from '@/service/response';
-import { SystemInputEnum } from '@/constants/app';
-
-export type Props = {
- [SystemInputEnum.userChatInput]: string;
-};
-
-export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- const { userChatInput } = req.body as Props;
- jsonRes(res, {
- data: {
- userChatInput
- }
- });
-}
diff --git a/client/src/pages/api/app/modules/kb/search.ts b/client/src/pages/api/app/modules/kb/search.ts
deleted file mode 100644
index 984bc6a37..000000000
--- a/client/src/pages/api/app/modules/kb/search.ts
+++ /dev/null
@@ -1,137 +0,0 @@
-import type { NextApiRequest, NextApiResponse } from 'next';
-import { jsonRes } from '@/service/response';
-import { PgClient } from '@/service/pg';
-import { withNextCors } from '@/service/utils/tools';
-import type { ChatItemType } from '@/types/chat';
-import { ChatRoleEnum, rawSearchKey, responseDataKey } from '@/constants/chat';
-import { modelToolMap } from '@/utils/plugin';
-import { getVector } from '@/pages/api/openapi/plugin/vector';
-import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
-import { getModel } from '@/service/utils/data';
-import { authUser } from '@/service/utils/auth';
-import type { SelectedKbType } from '@/types/plugin';
-
-export type QuoteItemType = {
- kb_id: string;
- id: string;
- q: string;
- a: string;
- source?: string;
-};
-type Props = {
- kbList: SelectedKbType;
- history: ChatItemType[];
- similarity: number;
- limit: number;
- maxToken: number;
- userChatInput: string;
- stream?: boolean;
- billId?: string;
-};
-type Response = {
- [responseDataKey]: {
- [rawSearchKey]: QuoteItemType[];
- };
- isEmpty?: boolean;
- quotePrompt?: string;
-};
-
-export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
- try {
- await authUser({ req, authRoot: true });
-
- const { kbList = [], userChatInput } = req.body as Props;
-
- if (!userChatInput) {
- throw new Error('用户输入为空');
- }
-
- if (!Array.isArray(kbList) || kbList.length === 0) {
- throw new Error('没有选择知识库');
- }
-
- const result = await kbSearch({
- ...req.body,
- kbList,
- userChatInput
- });
-
- jsonRes(res, {
- data: result
- });
- } catch (err) {
- console.log(err);
- jsonRes(res, {
- code: 500,
- error: err
- });
- }
-});
-
-export async function kbSearch({
- kbList = [],
- history = [],
- similarity = 0.8,
- limit = 5,
- maxToken = 2500,
- userChatInput,
- billId
-}: Props): Promise {
- if (kbList.length === 0) {
- return Promise.reject('没有选择知识库');
- }
-
- // get vector
- const vectorModel = global.vectorModels[0].model;
- const { vectors, tokenLen } = await getVector({
- model: vectorModel,
- input: [userChatInput]
- });
-
- // search kb
- const [res]: any = await Promise.all([
- PgClient.query(
- `BEGIN;
- SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
- select kb_id,id,q,a,source from modelData where kb_id IN (${kbList
- .map((item) => `'${item.kbId}'`)
- .join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
- vectors[0]
- }]' limit ${limit};
- COMMIT;`
- ),
- pushTaskBillListItem({
- billId,
- moduleName: 'Vector Generate',
- amount: countModelPrice({ model: vectorModel, tokens: tokenLen }),
- model: getModel(vectorModel)?.name,
- tokenLen
- })
- ]);
-
- const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
-
- // filter part quote by maxToken
- const sliceResult = modelToolMap
- .tokenSlice({
- maxToken,
- messages: searchRes.map((item, i) => ({
- obj: ChatRoleEnum.System,
- value: `${i + 1}: [${item.q}\n${item.a}]`
- }))
- })
- .map((item) => item.value)
- .join('\n')
- .trim();
-
- // slice filterSearch
- const rawSearch = searchRes.slice(0, sliceResult.length);
-
- return {
- isEmpty: rawSearch.length === 0 ? true : undefined,
- quotePrompt: sliceResult ? `知识库:\n${sliceResult}` : undefined,
- responseData: {
- rawSearch
- }
- };
-}
diff --git a/client/src/pages/api/chat/chatTest.ts b/client/src/pages/api/chat/chatTest.ts
index 5ebe3cb4f..5ad98951a 100644
--- a/client/src/pages/api/chat/chatTest.ts
+++ b/client/src/pages/api/chat/chatTest.ts
@@ -8,7 +8,7 @@ import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt';
-import { createTaskBill, delTaskBill, finishTaskBill } from '@/service/events/pushBill';
+import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
@@ -31,7 +31,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
let { modules = [], history = [], prompt, variables = {}, appName, appId } = req.body as Props;
- let billId = '';
try {
if (!history || !modules || !prompt) {
throw new Error('Prams Error');
@@ -45,13 +44,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
/* user auth */
const { userId } = await authUser({ req });
- billId = await createTaskBill({
- userId,
- appName,
- appId,
- source: BillSourceEnum.fastgpt
- });
-
/* start process */
const { responseData } = await dispatchModules({
res,
@@ -61,8 +53,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
history: gptMessage2ChatType(history),
userChatInput: prompt
},
- stream: true,
- billId
+ stream: true
});
sseResponse({
@@ -77,12 +68,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
res.end();
- // bill
- finishTaskBill({
- billId
+ pushTaskBill({
+ appName,
+ appId,
+ userId,
+ source: BillSourceEnum.fastgpt,
+ response: responseData
});
} catch (err: any) {
- delTaskBill(billId);
res.status(500);
sseErrRes(res, err);
res.end();
diff --git a/client/src/pages/api/openapi/v1/chat/completions.ts b/client/src/pages/api/openapi/v1/chat/completions.ts
index 8a4c73ba7..b1f5e5136 100644
--- a/client/src/pages/api/openapi/v1/chat/completions.ts
+++ b/client/src/pages/api/openapi/v1/chat/completions.ts
@@ -2,21 +2,29 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authApp, authShareChat } from '@/service/utils/auth';
import { sseErrRes, jsonRes } from '@/service/response';
-import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
+import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
+import {
+ dispatchHistory,
+ dispatchChatInput,
+ dispatchChatCompletion,
+ dispatchKBSearch,
+ dispatchAnswer,
+ dispatchClassifyQuestion
+} from '@/service/moduleDispatch';
import type { CreateChatCompletionRequest } from 'openai';
-import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
+import { gptMessage2ChatType } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
-import { TaskResponseKeyEnum, AppModuleItemTypeEnum } from '@/constants/app';
+import { TaskResponseKeyEnum } from '@/constants/chat';
+import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
import { Types } from 'mongoose';
-import { moduleFetch } from '@/service/api/request';
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
-import { FlowInputItemTypeEnum } from '@/constants/flow';
-import { finishTaskBill, createTaskBill, delTaskBill } from '@/service/events/pushBill';
+import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
+import { ChatHistoryItemResType } from '@/types/chat';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
@@ -49,8 +57,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
let { chatId, appId, shareId, stream = false, messages = [], variables = {} } = req.body as Props;
- let billId = '';
-
try {
if (!messages) {
throw new Error('Prams Error');
@@ -105,13 +111,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.setHeader('newChatId', String(newChatId));
}
- billId = await createTaskBill({
- userId,
- appName: app.name,
- appId,
- source: authType === 'apikey' ? BillSourceEnum.api : BillSourceEnum.fastgpt
- });
-
/* start process */
const { responseData, answerText } = await dispatchModules({
res,
@@ -121,9 +120,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
history: prompts,
userChatInput: prompt.value
},
- stream,
- billId
+ stream
});
+ console.log(responseData, '===', answerText);
if (!answerText) {
throw new Error('回复内容为空,可能模块编排出现问题');
@@ -169,10 +168,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.end();
} else {
res.json({
- data: {
- newChatId,
- ...responseData
- },
+ responseData,
id: chatId || '',
model: '',
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
@@ -186,14 +182,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
}
- // bill
- finishTaskBill({
- billId,
- shareId
+ pushTaskBill({
+ appName: app.name,
+ appId,
+ userId,
+ source: authType === 'apikey' ? BillSourceEnum.api : BillSourceEnum.fastgpt,
+ response: responseData
});
} catch (err: any) {
- delTaskBill(billId);
-
if (stream) {
sseErrRes(res, err);
res.end();
@@ -211,35 +207,29 @@ export async function dispatchModules({
modules,
params = {},
variables = {},
- stream = false,
- billId
+ stream = false
}: {
res: NextApiResponse;
modules: AppModuleItemType[];
params?: Record;
variables?: Record;
- billId: string;
stream?: boolean;
}) {
const runningModules = loadModules(modules, variables);
// let storeData: Record = {}; // after module used
- let chatResponse: Record = {}; // response request and save to database
- let answerText = ''; // AI answer
+ let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
+ let chatAnswerText = ''; // AI answer
function pushStore({
- answer,
- responseData = {}
+ answerText = '',
+ responseData
}: {
- answer?: string;
- responseData?: Record;
+ answerText?: string;
+ responseData?: ChatHistoryItemResType;
}) {
- chatResponse = {
- ...chatResponse,
- ...responseData
- };
-
- answerText += answer;
+ responseData && chatResponse.push(responseData);
+ chatAnswerText += answerText;
}
function moduleInput(
module: RunningModuleItemType,
@@ -292,63 +282,45 @@ export async function dispatchModules({
}
async function moduleRun(module: RunningModuleItemType): Promise {
if (res.closed) return Promise.resolve();
- console.log('run=========', module.type, module.url);
+ console.log('run=========', module.flowType);
- // direct answer
- if (module.type === AppModuleItemTypeEnum.answer) {
- const text =
- module.inputs.find((item) => item.key === TaskResponseKeyEnum.answerText)?.value || '';
- pushStore({
- answer: text
- });
- return StreamAnswer({
- res,
- stream,
- text: text
- });
- }
+ // get fetch params
+ const params: Record = {};
+ module.inputs.forEach((item: any) => {
+ params[item.key] = item.value;
+ });
+ const props: Record = {
+ res,
+ stream,
+ ...params
+ };
- if (module.type === AppModuleItemTypeEnum.switch) {
- return moduleOutput(module, switchResponse(module));
- }
-
- if (
- (module.type === AppModuleItemTypeEnum.http ||
- module.type === AppModuleItemTypeEnum.initInput) &&
- module.url
- ) {
- // get fetch params
- const params: Record = {};
- module.inputs.forEach((item: any) => {
- params[item.key] = item.value;
- });
- const data = {
- stream,
- billId,
- ...params
+ const dispatchRes = await (async () => {
+ const callbackMap: Record = {
+ [FlowModuleTypeEnum.historyNode]: dispatchHistory,
+ [FlowModuleTypeEnum.questionInput]: dispatchChatInput,
+ [FlowModuleTypeEnum.answerNode]: dispatchAnswer,
+ [FlowModuleTypeEnum.chatNode]: dispatchChatCompletion,
+ [FlowModuleTypeEnum.kbSearchNode]: dispatchKBSearch,
+ [FlowModuleTypeEnum.classifyQuestion]: dispatchClassifyQuestion
};
+ if (callbackMap[module.flowType]) {
+ return callbackMap[module.flowType](props);
+ }
+ return {};
+ })();
- // response data
- const fetchRes = await moduleFetch({
- res,
- url: module.url,
- data
- });
-
- return moduleOutput(module, fetchRes);
- }
+ return moduleOutput(module, dispatchRes);
}
// start process width initInput
- const initModules = runningModules.filter(
- (item) => item.type === AppModuleItemTypeEnum.initInput
- );
+ const initModules = runningModules.filter((item) => initModuleType[item.flowType]);
await Promise.all(initModules.map((module) => moduleInput(module, params)));
return {
- responseData: chatResponse,
- answerText
+ [TaskResponseKeyEnum.answerText]: chatAnswerText,
+ [TaskResponseKeyEnum.responseData]: chatResponse
};
}
@@ -359,10 +331,9 @@ function loadModules(
return modules.map((module) => {
return {
moduleId: module.moduleId,
- type: module.type,
- url: module.url,
+ flowType: module.flowType,
inputs: module.inputs
- .filter((item) => item.type !== FlowInputItemTypeEnum.target || item.connected) // filter unconnected target input
+ .filter((item) => item.connected) // filter unconnected target input
.map((item) => {
if (typeof item.value !== 'string') {
return {
@@ -385,38 +356,9 @@ function loadModules(
outputs: module.outputs.map((item) => ({
key: item.key,
answer: item.key === TaskResponseKeyEnum.answerText,
- response: item.response,
value: undefined,
targets: item.targets
}))
};
});
}
-function StreamAnswer({
- res,
- stream = false,
- text = ''
-}: {
- res: NextApiResponse;
- stream?: boolean;
- text?: string;
-}) {
- if (stream && text) {
- return sseResponse({
- res,
- event: sseResponseEventEnum.answer,
- data: textAdaptGptResponse({
- text: text.replace(/\\n/g, '\n')
- })
- });
- }
- return text;
-}
-function switchResponse(module: RunningModuleItemType) {
- const val = module?.inputs?.[0]?.value;
-
- if (val) {
- return { true: 1 };
- }
- return { false: 1 };
-}
diff --git a/client/src/pages/app/detail/components/Edit/components/Nodes/NodeChat.tsx b/client/src/pages/app/detail/components/Edit/components/Nodes/NodeChat.tsx
index 214415e55..5c7c1c32f 100644
--- a/client/src/pages/app/detail/components/Edit/components/Nodes/NodeChat.tsx
+++ b/client/src/pages/app/detail/components/Edit/components/Nodes/NodeChat.tsx
@@ -43,7 +43,7 @@ const NodeChat = ({
return (
{
onChangeNode({
diff --git a/client/src/pages/app/detail/components/Edit/components/Nodes/NodeEmpty.tsx b/client/src/pages/app/detail/components/Edit/components/Nodes/NodeEmpty.tsx
new file mode 100644
index 000000000..41951bbf7
--- /dev/null
+++ b/client/src/pages/app/detail/components/Edit/components/Nodes/NodeEmpty.tsx
@@ -0,0 +1,9 @@
+import React from 'react';
+import { NodeProps } from 'reactflow';
+import NodeCard from '../modules/NodeCard';
+import { FlowModuleItemType } from '@/types/flow';
+
+const NodeAnswer = ({ data: { ...props } }: NodeProps) => {
+ return ;
+};
+export default React.memo(NodeAnswer);
diff --git a/client/src/pages/app/detail/components/Edit/components/TemplateList.tsx b/client/src/pages/app/detail/components/Edit/components/TemplateList.tsx
index 2d6629e6d..84c9bae8e 100644
--- a/client/src/pages/app/detail/components/Edit/components/TemplateList.tsx
+++ b/client/src/pages/app/detail/components/Edit/components/TemplateList.tsx
@@ -1,7 +1,7 @@
import React, { useRef } from 'react';
import { Box, Flex, useOutsideClick } from '@chakra-ui/react';
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
-import type { AppModuleTemplateItemType } from '@/types/app';
+import type { FlowModuleItemType } from '@/types/app';
import type { XYPosition } from 'reactflow';
import { useGlobalStore } from '@/store/global';
import Avatar from '@/components/Avatar';
@@ -12,7 +12,7 @@ const ModuleStoreList = ({
onClose
}: {
isOpen: boolean;
- onAddNode: (e: { template: AppModuleTemplateItemType; position: XYPosition }) => void;
+ onAddNode: (e: { template: FlowModuleItemType; position: XYPosition }) => void;
onClose: () => void;
}) => {
const { isPc } = useGlobalStore();
diff --git a/client/src/pages/app/detail/components/Edit/components/modules/NodeCard.tsx b/client/src/pages/app/detail/components/Edit/components/modules/NodeCard.tsx
index cf298fd00..92c64b18c 100644
--- a/client/src/pages/app/detail/components/Edit/components/modules/NodeCard.tsx
+++ b/client/src/pages/app/detail/components/Edit/components/modules/NodeCard.tsx
@@ -7,7 +7,7 @@ import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
type Props = {
- children: React.ReactNode | React.ReactNode[] | string;
+ children?: React.ReactNode | React.ReactNode[] | string;
logo: string;
name: string;
description?: string;
diff --git a/client/src/pages/app/detail/components/Edit/index.tsx b/client/src/pages/app/detail/components/Edit/index.tsx
index bcbb76312..a31c4db00 100644
--- a/client/src/pages/app/detail/components/Edit/index.tsx
+++ b/client/src/pages/app/detail/components/Edit/index.tsx
@@ -12,13 +12,20 @@ import ReactFlow, {
} from 'reactflow';
import { Box, Flex, IconButton, useTheme, useDisclosure } from '@chakra-ui/react';
import { SmallCloseIcon } from '@chakra-ui/icons';
-import { edgeOptions, connectionLineStyle, FlowModuleTypeEnum } from '@/constants/flow';
+import {
+ edgeOptions,
+ connectionLineStyle,
+ FlowModuleTypeEnum,
+ FlowInputItemTypeEnum
+} from '@/constants/flow';
import { appModule2FlowNode, appModule2FlowEdge } from '@/utils/adapt';
import {
FlowModuleItemType,
+ FlowModuleTemplateType,
FlowOutputTargetItemType,
type FlowModuleItemChangeProps
} from '@/types/flow';
+import { AppModuleItemType } from '@/types/app';
import { customAlphabet } from 'nanoid';
import { putAppById } from '@/api/app';
import { useRequest } from '@/hooks/useRequest';
@@ -61,20 +68,20 @@ const NodeUserGuide = dynamic(() => import('./components/Nodes/NodeUserGuide'),
import 'reactflow/dist/style.css';
import styles from './index.module.scss';
-import { AppModuleItemType, AppModuleTemplateItemType } from '@/types/app';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
const nodeTypes = {
[FlowModuleTypeEnum.userGuide]: NodeUserGuide,
[FlowModuleTypeEnum.variable]: NodeVariable,
- [FlowModuleTypeEnum.questionInputNode]: NodeQuestionInput,
+ [FlowModuleTypeEnum.questionInput]: NodeQuestionInput,
[FlowModuleTypeEnum.historyNode]: NodeHistory,
[FlowModuleTypeEnum.chatNode]: NodeChat,
[FlowModuleTypeEnum.kbSearchNode]: NodeKbSearch,
[FlowModuleTypeEnum.tfSwitchNode]: NodeTFSwitch,
[FlowModuleTypeEnum.answerNode]: NodeAnswer,
[FlowModuleTypeEnum.classifyQuestion]: NodeCQNode
+ // [FlowModuleTypeEnum.empty]: EmptyModule
};
const edgeTypes = {
buttonedge: ButtonEdge
@@ -147,7 +154,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
[setEdges, setNodes]
);
const onAddNode = useCallback(
- ({ template, position }: { template: AppModuleTemplateItemType; position: XYPosition }) => {
+ ({ template, position }: { template: FlowModuleItemType; position: XYPosition }) => {
if (!reactFlowWrapper.current) return;
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
@@ -158,8 +165,8 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
appModule2FlowNode({
item: {
...template,
- position: { x: mouseX, y: mouseY },
- moduleId: nanoid()
+ moduleId: nanoid(),
+ position: { x: mouseX, y: mouseY }
},
onChangeNode,
onDelNode
@@ -169,14 +176,18 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
},
[onChangeNode, onDelNode, setNodes, x, y, zoom]
);
- const flow2Modules = useCallback(() => {
+ const flow2AppModules = useCallback(() => {
const modules: AppModuleItemType[] = nodes.map((item) => ({
- ...item.data,
+ moduleId: item.data.moduleId,
position: item.position,
- onChangeNode: undefined,
- onDelNode: undefined,
- outputs: item.data.outputs.map((output) => ({
- ...output,
+ flowType: item.data.flowType,
+ inputs: item.data.inputs.map((item) => ({
+ key: item.key,
+ value: item.value,
+ connected: item.type !== FlowInputItemTypeEnum.target
+ })),
+ outputs: item.data.outputs.map((item) => ({
+ key: item.key,
targets: [] as FlowOutputTargetItemType[]
}))
}));
@@ -184,9 +195,11 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
// update inputs and outputs
modules.forEach((module) => {
module.inputs.forEach((input) => {
- input.connected = !!edges.find(
- (edge) => edge.target === module.moduleId && edge.targetHandle === input.key
- );
+ input.connected =
+ input.connected ||
+ !!edges.find(
+ (edge) => edge.target === module.moduleId && edge.targetHandle === input.key
+ );
});
module.outputs.forEach((output) => {
output.targets = edges
@@ -233,7 +246,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
return putAppById(app._id, {
- modules: flow2Modules()
+ modules: flow2AppModules()
});
},
successToast: '保存配置成功',
@@ -270,6 +283,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
useEffect(() => {
initData(JSON.parse(JSON.stringify(app)));
}, [app, initData]);
+ console.log(flow2AppModules());
return (
<>
@@ -340,7 +354,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
aria-label={'save'}
variant={'base'}
onClick={() => {
- setTestModules(flow2Modules());
+ setTestModules(flow2AppModules());
}}
/>
diff --git a/client/src/service/api/axios.ts b/client/src/service/api/axios.ts
deleted file mode 100644
index 764f8c607..000000000
--- a/client/src/service/api/axios.ts
+++ /dev/null
@@ -1,114 +0,0 @@
-import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
-
-interface ConfigType {
- headers?: { [key: string]: string };
- hold?: boolean;
- timeout?: number;
-}
-interface ResponseDataType {
- code: number;
- message: string;
- data: any;
-}
-
-/**
- * 请求开始
- */
-function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
- if (config.headers) {
- // config.headers.Authorization = getToken();
- }
-
- return config;
-}
-
-/**
- * 请求成功,检查请求头
- */
-function responseSuccess(response: AxiosResponse) {
- return response;
-}
-/**
- * 响应数据检查
- */
-function checkRes(data: ResponseDataType) {
- if (data === undefined) {
- console.log('error->', data, 'data is empty');
- return Promise.reject('服务器异常');
- } else if (data.code < 200 || data.code >= 400) {
- return Promise.reject(data);
- }
- return data.data;
-}
-
-/**
- * 响应错误
- */
-function responseError(err: any) {
- console.log('error->', '请求错误', err);
-
- if (!err) {
- return Promise.reject({ message: '未知错误' });
- }
- if (typeof err === 'string') {
- return Promise.reject({ message: err });
- }
- return Promise.reject(err);
-}
-
-/* 创建请求实例 */
-const instance = axios.create({
- timeout: 60000, // 超时时间
- headers: {
- 'content-type': 'application/json'
- }
-});
-
-/* 请求拦截 */
-instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
-/* 响应拦截 */
-instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
-
-function request(url: string, data: any, config: ConfigType, method: Method): any {
- /* 去空 */
- for (const key in data) {
- if (data[key] === null || data[key] === undefined) {
- delete data[key];
- }
- }
-
- return instance
- .request({
- baseURL: `http://localhost:${process.env.PORT || 3000}/api`,
- url,
- method,
- data: ['POST', 'PUT'].includes(method) ? data : null,
- params: !['POST', 'PUT'].includes(method) ? data : null,
- ...config // 用户自定义配置,可以覆盖前面的配置
- })
- .then((res) => checkRes(res.data))
- .catch((err) => responseError(err));
-}
-
-/**
- * api请求方式
- * @param {String} url
- * @param {Any} params
- * @param {Object} config
- * @returns
- */
-export function GET(url: string, params = {}, config: ConfigType = {}): Promise {
- return request(url, params, config, 'GET');
-}
-
-export function POST(url: string, data = {}, config: ConfigType = {}): Promise {
- return request(url, data, config, 'POST');
-}
-
-export function PUT(url: string, data = {}, config: ConfigType = {}): Promise {
- return request(url, data, config, 'PUT');
-}
-
-export function DELETE(url: string, data = {}, config: ConfigType = {}): Promise {
- return request(url, data, config, 'DELETE');
-}
diff --git a/client/src/service/api/request.ts b/client/src/service/api/request.ts
deleted file mode 100644
index feda9333d..000000000
--- a/client/src/service/api/request.ts
+++ /dev/null
@@ -1,115 +0,0 @@
-import { sseResponseEventEnum } from '@/constants/chat';
-import { getErrText } from '@/utils/tools';
-import { parseStreamChunk } from '@/utils/adapt';
-import { NextApiResponse } from 'next';
-import { sseResponse } from '../utils/tools';
-import { TaskResponseKeyEnum } from '@/constants/app';
-
-interface Props {
- res: NextApiResponse; // 用于流转发
- url: string;
- data: Record;
-}
-export const moduleFetch = ({ url, data, res }: Props) =>
- new Promise>(async (resolve, reject) => {
- try {
- const abortSignal = new AbortController();
- const baseUrl = `http://localhost:${process.env.PORT || 3000}/api`;
- const requestUrl = url.startsWith('/') ? `${baseUrl}${url}` : url;
- const response = await fetch(requestUrl, {
- method: 'POST',
- // @ts-ignore
- headers: {
- 'Content-Type': 'application/json',
- rootkey: process.env.ROOT_KEY
- },
- body: JSON.stringify(data),
- signal: abortSignal.signal
- });
-
- if (response.status >= 300 || response.status < 200) {
- const err = await response.json();
- return reject(err);
- }
-
- if (!response?.body) {
- throw new Error('Request Error');
- }
-
- const responseType = response.headers.get('content-type');
- if (responseType && responseType.includes('application/json')) {
- const jsonResponse = await response.json();
- return resolve(jsonResponse?.data || {});
- }
-
- const reader = response.body?.getReader();
-
- let chatResponse: Record = {
- [TaskResponseKeyEnum.answerText]: ''
- };
-
- const read = async () => {
- try {
- const { done, value } = await reader.read();
- if (done) {
- return resolve(chatResponse);
- } else if (res.closed) {
- resolve(chatResponse);
- abortSignal.abort();
- return;
- }
-
- const chunkResponse = parseStreamChunk(value);
-
- chunkResponse.forEach((item) => {
- // parse json data
- const data = (() => {
- try {
- return JSON.parse(item.data);
- } catch (error) {
- return {};
- }
- })();
- if (!res.closed && item.event === sseResponseEventEnum.moduleFetchResponse) {
- chatResponse = {
- ...chatResponse,
- ...data
- };
- } else if (
- !res.closed &&
- item.event === sseResponseEventEnum.answer &&
- data?.choices?.[0]?.delta
- ) {
- // save answer
- const answer: string = data?.choices?.[0].delta.content || '';
- if (answer) {
- chatResponse = {
- ...chatResponse,
- [TaskResponseKeyEnum.answerText]:
- chatResponse[TaskResponseKeyEnum.answerText] + answer
- };
- }
-
- sseResponse({
- res,
- event: sseResponseEventEnum.answer,
- data: JSON.stringify(data)
- });
- } else if (item.event === sseResponseEventEnum.error) {
- return reject(data);
- }
- });
- read();
- } catch (err: any) {
- if (err?.message === 'The operation was aborted.') {
- return;
- }
- reject(getErrText(err, '请求异常'));
- }
- };
- read();
- } catch (err: any) {
- console.log(err);
- reject(getErrText(err, '请求异常'));
- }
- });
diff --git a/client/src/service/events/pushBill.ts b/client/src/service/events/pushBill.ts
index e0f90b4ae..dc1d4f622 100644
--- a/client/src/service/events/pushBill.ts
+++ b/client/src/service/events/pushBill.ts
@@ -1,93 +1,54 @@
import { connectToDatabase, Bill, User, ShareChat } from '../mongo';
import { BillSourceEnum } from '@/constants/user';
import { getModel } from '../utils/data';
-import type { BillListItemType } from '@/types/mongoSchema';
+import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@/utils/user';
-export const createTaskBill = async ({
+export const pushTaskBill = async ({
appName,
appId,
userId,
- source
+ source,
+ shareId,
+ response
}: {
appName: string;
appId: string;
userId: string;
source: `${BillSourceEnum}`;
+ shareId?: string;
+ response: ChatHistoryItemResType[];
}) => {
- const res = await Bill.create({
- userId,
- appName,
- appId,
- total: 0,
- source,
- list: []
- });
- return String(res._id);
-};
+ const total = response.reduce((sum, item) => sum + item.price, 0);
-export const pushTaskBillListItem = async ({
- billId,
- moduleName,
- amount,
- model,
- tokenLen
-}: { billId?: string } & BillListItemType) => {
- if (!billId) return;
- try {
- await Bill.findByIdAndUpdate(billId, {
- $push: {
- list: {
- moduleName,
- amount,
- model,
- tokenLen
- }
- }
- });
- } catch (error) {}
-};
-export const finishTaskBill = async ({ billId, shareId }: { billId: string; shareId?: string }) => {
- try {
- // update bill
- const res = await Bill.findByIdAndUpdate(billId, [
- {
- $set: {
- total: {
- $sum: '$list.amount'
- },
- time: new Date()
- }
- }
- ]);
- if (!res) return;
- const total = res.list.reduce((sum, item) => sum + item.amount, 0) || 0;
-
- if (shareId) {
- updateShareChatBill({
- shareId,
- total
- });
- }
-
- console.log('finish bill:', formatPrice(total));
-
- // 账号扣费
- await User.findByIdAndUpdate(res.userId, {
+ await Promise.allSettled([
+ Bill.create({
+ userId,
+ appName,
+ appId,
+ total,
+ source,
+ list: response.map((item) => ({
+ moduleName: item.moduleName,
+ amount: item.price || 0,
+ model: item.model,
+ tokenLen: item.tokens
+ }))
+ }),
+ User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
- });
- } catch (error) {
- console.log('Finish bill failed:', error);
- billId && Bill.findByIdAndDelete(billId);
- }
-};
+ }),
+ ...(shareId
+ ? [
+ updateShareChatBill({
+ shareId,
+ total
+ })
+ ]
+ : [])
+ ]);
-export const delTaskBill = async (billId?: string) => {
- if (!billId) return;
-
- try {
- await Bill.findByIdAndRemove(billId);
- } catch (error) {}
+ console.log('finish bill:', formatPrice(total));
};
export const updateShareChatBill = async ({
diff --git a/client/src/pages/api/app/modules/agent/classifyQuestion.ts b/client/src/service/moduleDispatch/agent/classifyQuestion.ts
similarity index 52%
rename from client/src/pages/api/app/modules/agent/classifyQuestion.ts
rename to client/src/service/moduleDispatch/agent/classifyQuestion.ts
index eae1fb4c8..1e6f3bfe2 100644
--- a/client/src/pages/api/app/modules/agent/classifyQuestion.ts
+++ b/client/src/service/moduleDispatch/agent/classifyQuestion.ts
@@ -1,58 +1,31 @@
-// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
-import type { NextApiRequest, NextApiResponse } from 'next';
-import { jsonRes } from '@/service/response';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
-import type { ChatItemType } from '@/types/chat';
-import { ChatRoleEnum } from '@/constants/chat';
+import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
+import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
-import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
-import { getModel } from '@/service/utils/data';
-import { authUser } from '@/service/utils/auth';
+import { countModelPrice } from '@/service/events/pushBill';
-export type Props = {
+export type CQProps = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
- billId?: string;
};
-export type Response = { history: ChatItemType[] };
+export type CQResponse = {
+ [TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
+ [key: string]: any;
+};
+const moduleName = 'Classify Question';
const agentModel = 'gpt-3.5-turbo';
const agentFunName = 'agent_user_question';
-
-export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- try {
- await authUser({ req, authRoot: true });
- let { userChatInput } = req.body as Props;
-
- if (!userChatInput) {
- throw new Error('userChatInput is empty');
- }
-
- const response = await classifyQuestion(req.body);
-
- jsonRes(res, {
- data: response
- });
- } catch (err) {
- jsonRes(res, {
- code: 500,
- error: err
- });
- }
-}
+const maxTokens = 2000;
/* request openai chat */
-export async function classifyQuestion({
- agents,
- systemPrompt,
- history = [],
- userChatInput,
- billId
-}: Props) {
+export const dispatchClassifyQuestion = async (props: Record): Promise => {
+ const { agents, systemPrompt, history = [], userChatInput } = props as CQProps;
+
const messages: ChatItemType[] = [
...(systemPrompt
? [
@@ -62,16 +35,16 @@ export async function classifyQuestion({
}
]
: []),
+ ...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const filterMessages = ChatContextFilter({
- // @ts-ignore
model: agentModel,
prompts: messages,
- maxTokens: 1500
+ maxTokens
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
@@ -112,27 +85,19 @@ export async function classifyQuestion({
throw new Error('');
}
- const totalTokens = response.data.usage?.total_tokens || 0;
+ const tokens = response.data.usage?.total_tokens || 0;
- await pushTaskBillListItem({
- billId,
- moduleName: 'Classify Question',
- amount: countModelPrice({ model: agentModel, tokens: totalTokens }),
- model: getModel(agentModel)?.name,
- tokenLen: totalTokens
- });
-
- console.log(agents.map((item) => `${item.value},返回: '${item.key}'`).join(';'), arg);
-
- const result = agents.find((item) => item.key === arg.type);
-
- if (result) {
- return {
- [arg.type]: 1
- };
- }
+ const result = agents.find((item) => item.key === arg.type) || agents[0];
return {
- [agents[0].key]: 1
+ [result.key]: 1,
+ [TaskResponseKeyEnum.responseData]: {
+ moduleName,
+ price: countModelPrice({ model: agentModel, tokens }),
+ model: agentModel,
+ tokens,
+ cqList: agents,
+ cqResult: result.value
+ }
};
-}
+};
diff --git a/client/src/pages/api/app/modules/agent/extract.ts b/client/src/service/moduleDispatch/agent/extract.ts
similarity index 100%
rename from client/src/pages/api/app/modules/agent/extract.ts
rename to client/src/service/moduleDispatch/agent/extract.ts
diff --git a/client/src/pages/api/app/modules/chat/gpt.ts b/client/src/service/moduleDispatch/chat/oneapi.ts
similarity index 67%
rename from client/src/pages/api/app/modules/chat/gpt.ts
rename to client/src/service/moduleDispatch/chat/oneapi.ts
index 2bd51806c..ed0cb2879 100644
--- a/client/src/pages/api/app/modules/chat/gpt.ts
+++ b/client/src/service/moduleDispatch/chat/oneapi.ts
@@ -1,89 +1,57 @@
-// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
-import type { NextApiRequest, NextApiResponse } from 'next';
-import { jsonRes, sseErrRes } from '@/service/response';
+import type { NextApiResponse } from 'next';
import { sseResponse } from '@/service/utils/tools';
import { OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index';
-import type { ChatItemType } from '@/types/chat';
+import type { ChatItemType, QuoteItemType } from '@/types/chat';
+import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
-import { TaskResponseKeyEnum } from '@/constants/app';
+import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
-import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
-import { authUser } from '@/service/utils/auth';
+import { countModelPrice } from '@/service/events/pushBill';
-export type Props = {
+export type ChatProps = {
+ res: NextApiResponse;
model: `${OpenAiChatEnum}`;
temperature?: number;
maxToken?: number;
history?: ChatItemType[];
userChatInput: string;
stream?: boolean;
- quotePrompt?: string;
+ quoteQA?: QuoteItemType[];
systemPrompt?: string;
limitPrompt?: string;
- billId?: string;
};
-export type Response = { [TaskResponseKeyEnum.answerText]: string; totalTokens: number };
+export type ChatResponse = {
+ [TaskResponseKeyEnum.answerText]: string;
+ [TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
+};
-export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- let { model, stream } = req.body as Props;
- try {
- await authUser({ req, authRoot: true });
-
- const response = await chatCompletion({
- ...req.body,
- res,
- model
- });
-
- if (stream) {
- sseResponse({
- res,
- event: sseResponseEventEnum.moduleFetchResponse,
- data: JSON.stringify(response)
- });
- res.end();
- } else {
- jsonRes(res, {
- data: response
- });
- }
- } catch (err) {
- if (stream) {
- sseErrRes(res, err);
- res.end();
- } else {
- jsonRes(res, {
- code: 500,
- error: err
- });
- }
- }
-}
+const moduleName = 'AI Chat';
/* request openai chat */
-export async function chatCompletion({
- res,
- model,
- temperature = 0,
- maxToken = 4000,
- stream = false,
- history = [],
- quotePrompt = '',
- userChatInput,
- systemPrompt = '',
- limitPrompt = '',
- billId
-}: Props & { res: NextApiResponse }): Promise {
+export const dispatchChatCompletion = async (props: Record): Promise => {
+ let {
+ res,
+ model,
+ temperature = 0,
+ maxToken = 4000,
+ stream = false,
+ history = [],
+ quoteQA = [],
+ userChatInput,
+ systemPrompt = '',
+ limitPrompt = ''
+ } = props as ChatProps;
+
// temperature adapt
const modelConstantsData = getChatModel(model);
if (!modelConstantsData) {
- return Promise.reject('The chat model is undefined');
+ return Promise.reject('The chat model is undefined, you need to select a chat model.');
}
// FastGpt temperature range: 1~10
@@ -91,12 +59,19 @@ export async function chatCompletion({
const limitText = (() => {
if (limitPrompt) return limitPrompt;
- if (quotePrompt && !limitPrompt) {
- return '根据知识库内容回答问题,仅回复知识库提供的内容。';
+ if (quoteQA.length > 0 && !limitPrompt) {
+ return '根据知识库内容回答问题,仅回复知识库提供的内容,不要对知识库内容做补充说明。';
}
return '';
})();
+ const quotePrompt =
+ quoteQA.length > 0
+ ? `下面是知识库内容:
+${quoteQA.map((item, i) => `${i + 1}. [${item.q}\n${item.a}]`).join('\n')}
+`
+ : '';
+
const messages: ChatItemType[] = [
...(quotePrompt
? [
@@ -138,6 +113,7 @@ export async function chatCompletion({
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi();
+ // console.log(adaptMessages);
/* count response max token */
const promptsToken = modelToolMap.countTokens({
@@ -152,8 +128,8 @@ export async function chatCompletion({
temperature: Number(temperature || 0),
max_tokens: maxToken,
messages: adaptMessages,
- // frequency_penalty: 0.5, // 越大,重复内容越少
- // presence_penalty: -0.5, // 越大,越容易出现新内容
+ frequency_penalty: 0.5, // 越大,重复内容越少
+ presence_penalty: -0.5, // 越大,越容易出现新内容
stream
},
{
@@ -163,7 +139,7 @@ export async function chatCompletion({
}
);
- const { answer, totalTokens } = await (async () => {
+ const { answerText, totalTokens, finishMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({ res, response });
@@ -174,38 +150,45 @@ export async function chatCompletion({
});
const totalTokens = countOpenAIToken({
- messages: finishMessages,
- model: 'gpt-3.5-turbo-16k'
+ messages: finishMessages
});
return {
- answer,
- totalTokens
+ answerText: answer,
+ totalTokens,
+ finishMessages
};
} else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
+ const finishMessages = filterMessages.concat({
+ obj: ChatRoleEnum.AI,
+ value: answer
+ });
+
return {
- answer,
- totalTokens
+ answerText: answer,
+ totalTokens,
+ finishMessages
};
}
})();
- await pushTaskBillListItem({
- billId,
- moduleName: 'AI Chat',
- amount: countModelPrice({ model, tokens: totalTokens }),
- model: modelConstantsData.name,
- tokenLen: totalTokens
- });
-
return {
- answerText: answer,
- totalTokens
+ [TaskResponseKeyEnum.answerText]: answerText,
+ [TaskResponseKeyEnum.responseData]: {
+ moduleName,
+ price: countModelPrice({ model, tokens: totalTokens }),
+ model: modelConstantsData.name,
+ tokens: totalTokens,
+ question: userChatInput,
+ answer: answerText,
+ maxToken,
+ finishMessages
+ }
};
-}
+};
async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) {
let answer = '';
diff --git a/client/src/service/moduleDispatch/index.ts b/client/src/service/moduleDispatch/index.ts
new file mode 100644
index 000000000..a84cc7783
--- /dev/null
+++ b/client/src/service/moduleDispatch/index.ts
@@ -0,0 +1,6 @@
+export * from './init/history';
+export * from './init/userChatInput';
+export * from './chat/oneapi';
+export * from './kb/search';
+export * from './tools/answer';
+export * from './agent/classifyQuestion';
diff --git a/client/src/service/moduleDispatch/init/history.tsx b/client/src/service/moduleDispatch/init/history.tsx
new file mode 100644
index 000000000..a96f684be
--- /dev/null
+++ b/client/src/service/moduleDispatch/init/history.tsx
@@ -0,0 +1,15 @@
+import { SystemInputEnum } from '@/constants/app';
+import { ChatItemType } from '@/types/chat';
+
+export type HistoryProps = {
+ maxContext: number;
+ [SystemInputEnum.history]: ChatItemType[];
+};
+
+export const dispatchHistory = (props: Record) => {
+ const { maxContext = 5, history = [] } = props as HistoryProps;
+
+ return {
+ history: history.slice(-maxContext)
+ };
+};
diff --git a/client/src/service/moduleDispatch/init/userChatInput.tsx b/client/src/service/moduleDispatch/init/userChatInput.tsx
new file mode 100644
index 000000000..7743b9f24
--- /dev/null
+++ b/client/src/service/moduleDispatch/init/userChatInput.tsx
@@ -0,0 +1,12 @@
+import { SystemInputEnum } from '@/constants/app';
+
+export type UserChatInputProps = {
+ [SystemInputEnum.userChatInput]: string;
+};
+
+export const dispatchChatInput = (props: Record) => {
+ const { userChatInput } = props as UserChatInputProps;
+ return {
+ userChatInput
+ };
+};
diff --git a/client/src/service/moduleDispatch/kb/search.ts b/client/src/service/moduleDispatch/kb/search.ts
new file mode 100644
index 000000000..a69d4bdf6
--- /dev/null
+++ b/client/src/service/moduleDispatch/kb/search.ts
@@ -0,0 +1,76 @@
+import { PgClient } from '@/service/pg';
+import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
+import { TaskResponseKeyEnum } from '@/constants/chat';
+import { getVector } from '@/pages/api/openapi/plugin/vector';
+import { countModelPrice } from '@/service/events/pushBill';
+import type { SelectedKbType } from '@/types/plugin';
+import type { QuoteItemType } from '@/types/chat';
+
+type KBSearchProps = {
+ kbList: SelectedKbType;
+ history: ChatItemType[];
+ similarity: number;
+ limit: number;
+ userChatInput: string;
+};
+export type KBSearchResponse = {
+ [TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
+ isEmpty?: boolean;
+ unEmpty?: boolean;
+ quoteQA: QuoteItemType[];
+};
+
+const moduleName = 'KB Search';
+
+export async function dispatchKBSearch(props: Record): Promise {
+ const {
+ kbList = [],
+ history = [],
+ similarity = 0.8,
+ limit = 5,
+ userChatInput
+ } = props as KBSearchProps;
+
+ if (kbList.length === 0) {
+ return Promise.reject("You didn't choose the knowledge base");
+ }
+
+ if (!userChatInput) {
+ return Promise.reject('Your input is empty');
+ }
+
+ // get vector
+ const vectorModel = global.vectorModels[0];
+ const { vectors, tokenLen } = await getVector({
+ model: vectorModel.model,
+ input: [userChatInput]
+ });
+
+ // search kb
+ const res: any = await PgClient.query(
+ `BEGIN;
+ SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
+ select kb_id,id,q,a,source from modelData where kb_id IN (${kbList
+ .map((item) => `'${item.kbId}'`)
+ .join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
+ vectors[0]
+ }]' limit ${limit};
+ COMMIT;`
+ );
+
+ const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
+
+ return {
+ isEmpty: searchRes.length === 0 ? true : undefined,
+ unEmpty: searchRes.length > 0 ? true : undefined,
+ quoteQA: searchRes,
+ responseData: {
+ moduleName,
+ price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
+ model: vectorModel.name,
+ tokens: tokenLen,
+ similarity,
+ limit
+ }
+ };
+}
diff --git a/client/src/service/moduleDispatch/tools/answer.ts b/client/src/service/moduleDispatch/tools/answer.ts
new file mode 100644
index 000000000..c292b97c7
--- /dev/null
+++ b/client/src/service/moduleDispatch/tools/answer.ts
@@ -0,0 +1,31 @@
+import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
+import { sseResponse } from '@/service/utils/tools';
+import { textAdaptGptResponse } from '@/utils/adapt';
+import type { NextApiResponse } from 'next';
+
+export type AnswerProps = {
+ res: NextApiResponse;
+ text: string;
+ stream: boolean;
+};
+export type AnswerResponse = {
+ [TaskResponseKeyEnum.answerText]: string;
+};
+
+export const dispatchAnswer = (props: Record): AnswerResponse => {
+ const { res, text = '', stream } = props as AnswerProps;
+
+ if (stream) {
+ sseResponse({
+ res,
+ event: sseResponseEventEnum.answer,
+ data: textAdaptGptResponse({
+ text: text.replace(/\\n/g, '\n')
+ })
+ });
+ }
+
+ return {
+ [TaskResponseKeyEnum.answerText]: text
+ };
+};
diff --git a/client/src/types/app.d.ts b/client/src/types/app.d.ts
index f978be00a..b10dbcfb8 100644
--- a/client/src/types/app.d.ts
+++ b/client/src/types/app.d.ts
@@ -5,7 +5,7 @@ import {
ModulesInputItemTypeEnum,
VariableInputEnum
} from '../constants/app';
-import type { FlowInputItemType, FlowOutputItemType } from './flow';
+import type { FlowInputItemType, FlowOutputItemType, FlowOutputTargetItemType } from './flow';
import type { AppSchema, kbSchema } from './mongoSchema';
import { ChatModelType } from '@/constants/model';
@@ -58,21 +58,12 @@ export type VariableItemType = {
};
/* app module */
-export type AppModuleTemplateItemType = {
- logo: string;
- name: string;
- description?: string;
- intro: string;
-
- flowType: `${FlowModuleTypeEnum}`;
- type: `${AppModuleItemTypeEnum}`;
- url?: string;
- inputs: FlowInputItemType[];
- outputs: FlowOutputItemType[];
-};
-export type AppModuleItemType = AppModuleTemplateItemType & {
+export type AppModuleItemType = {
moduleId: string;
position?: XYPosition;
+ flowType: `${FlowModuleTypeEnum}`;
+ inputs: { key: string; value?: any; connected?: boolean }[];
+ outputs: { key: string; targets: FlowOutputTargetItemType[] }[];
};
export type AppItemType = {
@@ -83,8 +74,7 @@ export type AppItemType = {
export type RunningModuleItemType = {
moduleId: string;
- type: `${AppModuleItemTypeEnum}`;
- url?: string;
+ flowType: `${FlowModuleTypeEnum}`;
inputs: {
key: string;
value?: any;
diff --git a/client/src/types/chat.d.ts b/client/src/types/chat.d.ts
index 6e7a0e679..b6011c18e 100644
--- a/client/src/types/chat.d.ts
+++ b/client/src/types/chat.d.ts
@@ -1,6 +1,7 @@
import { ChatRoleEnum, rawSearchKey } from '@/constants/chat';
import type { InitChatResponse, InitShareChatResponse } from '@/api/response/chat';
-import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
+import { TaskResponseKeyEnum } from '@/constants/chat';
+import { ClassifyQuestionAgentItemType } from './app';
export type ExportChatType = 'md' | 'pdf' | 'html';
@@ -37,3 +38,33 @@ export type ShareChatHistoryItemType = HistoryItemType & {
export type ShareChatType = InitShareChatResponse & {
history: ShareChatHistoryItemType;
};
+
+export type QuoteItemType = {
+ kb_id: string;
+ id: string;
+ q: string;
+ a: string;
+ source?: string;
+};
+
+export type ChatHistoryItemResType = {
+ moduleName: string;
+ price: number;
+ model?: string;
+ tokens?: number;
+
+ // chat
+ answer?: string;
+ question?: string;
+ temperature?: number;
+ maxToken?: number;
+ finishMessages?: ChatItemType[];
+
+ // kb search
+ similarity?: number;
+ limit?: number;
+
+ // cq
+ cqList?: ClassifyQuestionAgentItemType[];
+ cqResult?: string;
+};
diff --git a/client/src/types/flow.d.ts b/client/src/types/flow.d.ts
index 99e66540e..a41bc38d0 100644
--- a/client/src/types/flow.d.ts
+++ b/client/src/types/flow.d.ts
@@ -5,6 +5,15 @@ import {
} from '@/constants/flow';
import { Connection } from 'reactflow';
import type { AppModuleItemType } from './app';
+import { FlowModuleTypeEnum } from '@/constants/flow';
+
+export type FlowModuleItemChangeProps = {
+ moduleId: string;
+ type?: 'inputs' | 'outputs';
+ key: string;
+ value: any;
+ valueKey?: keyof FlowInputItemType & keyof FlowBodyItemType;
+};
export type FlowInputItemType = {
key: string; // 字段名
@@ -31,19 +40,21 @@ export type FlowOutputItemType = {
label: string;
description?: string;
type: `${FlowOutputItemTypeEnum}`;
- response?: boolean;
targets: FlowOutputTargetItemType[];
};
-export type FlowModuleItemChangeProps = {
- moduleId: string;
- type?: 'inputs' | 'outputs';
- key: string;
- value: any;
- valueKey?: keyof FlowInputItemType & keyof FlowBodyItemType;
+export type FlowModuleTemplateType = {
+ logo: string;
+ name: string;
+ description?: string;
+ intro: string;
+ flowType: `${FlowModuleTypeEnum}`;
+ url?: string;
+ inputs: FlowInputItemType[];
+ outputs: FlowOutputItemType[];
};
-
-export type FlowModuleItemType = AppModuleItemType & {
+export type FlowModuleItemType = FlowModuleTemplateType & {
+ moduleId: string;
onChangeNode: (e: FlowModuleItemChangeProps) => void;
onDelNode: (id: string) => void;
};
diff --git a/client/src/utils/adapt.ts b/client/src/utils/adapt.ts
index 52e23e3ee..7eab12d9b 100644
--- a/client/src/utils/adapt.ts
+++ b/client/src/utils/adapt.ts
@@ -6,11 +6,11 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
import type { AppModuleItemType } from '@/types/app';
-import type { FlowModuleItemType } from '@/types/flow';
+import type { FlowModuleItemType, FlowModuleTemplateType } from '@/types/flow';
import type { Edge, Node } from 'reactflow';
import { connectionLineStyle } from '@/constants/flow';
import { customAlphabet } from 'nanoid';
-import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
+import { EmptyModule, ModuleTemplates, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
export const adaptBill = (bill: BillSchema): UserBillType => {
@@ -92,48 +92,41 @@ export const appModule2FlowNode = ({
}): Node => {
// init some static data
const template =
- ModuleTemplates.map((templates) => templates.list)
- ?.flat()
- .find((template) => template.flowType === item.flowType) || item;
+ ModuleTemplatesFlat.find((template) => template.flowType === item.flowType) || EmptyModule;
// replace item data
- const moduleItem = {
+ const moduleItem: FlowModuleItemType = {
...item,
logo: template.logo,
name: template.name,
intro: template.intro,
- type: template.type,
url: template.url,
inputs: template.inputs.map((templateInput) => {
// use latest inputs
const itemInput = item.inputs.find((item) => item.key === templateInput.key) || templateInput;
return {
...templateInput,
- key: itemInput.key,
value: itemInput.value
};
}),
- outputs: item.outputs.map((itemOutput) => {
+ outputs: template.outputs.map((templateOutput) => {
// unChange outputs
- const templateOutput =
- template.outputs.find((item) => item.key === itemOutput.key) || itemOutput;
+ const itemOutput =
+ item.outputs.find((item) => item.key === templateOutput.key) || templateOutput;
return {
...templateOutput,
- key: itemOutput.key,
targets: itemOutput.targets || []
};
- })
+ }),
+ onChangeNode,
+ onDelNode
};
return {
id: item.moduleId,
type: item.flowType,
- data: {
- ...moduleItem,
- onChangeNode,
- onDelNode
- },
+ data: moduleItem,
position: item.position || { x: 0, y: 0 }
};
};