4.7-alpha2 (#1027)

* feat: stop toolCall and rename some field. (#46)

* perf: node delete tip;pay tip

* fix: toolCall cannot save child answer

* feat: stop tool

* fix: team modal

* fix feckbackMoal  auth bug (#47)

* 简单的支持提示词运行tool。优化workflow模板 (#49)

* remove templates

* fix: request body undefined

* feat: prompt tool run

* feat: workflow tamplates modal

* perf: plugin start

* 4.7 (#50)

* fix docker-compose download url (#994)

original code is a bad url with '404 NOT FOUND' return.
fix docker-compose download url, add 'v' before docker-compose version

* Update ai_settings.md (#1000)

* Update configuration.md

* Update configuration.md

* Fix history in classifyQuestion and extract modules (#1012)

* Fix history in classifyQuestion and extract modules

* Add chatValue2RuntimePrompt import and update text formatting

* flow controller to packages

* fix: rerank select

* modal ui

* perf: modal code path

* point not sufficient

* feat: http url support variable

* fix http key

* perf: prompt

* perf: ai setting modal

* simple edit ui

---------

Co-authored-by: entorick <entorick11@qq.com>
Co-authored-by: liujianglc <liujianglc@163.com>
Co-authored-by: Fengrui Liu <liufengrui.work@bytedance.com>

* fix team share redirect to login (#51)

* feat: support openapi import plugins (#48)

* feat: support openapi import plugins

* feat: import from url

* fix: add body params parse

* fix build

* fix

* fix

* fix

* tool box ui (#52)

* fix: training queue

* feat: simple edit tool select

* perf: simple edit dataset prompt

* fix: chatbox tool ux

* feat: quote prompt module

* perf: plugin tools sign

* perf: model avatar

* tool selector ui

* feat: max histories

* perf: http plugin import (#53)

* perf: plugin http import

* chatBox ui

* perf: name

* fix: Node template card (#54)

* fix: ts

* setting modal

* package

* package

* feat: add plugins search (#57)

* feat: add plugins search

* perf: change http plugin header input

* Yjl (#56)

* perf: prompt tool call

* perf: chat box ux

* doc

* doc

* price tip

* perf: tool selector

* ui'

* fix: vector queue

* fix: empty tool and empty response

* fix: empty msg

* perf: pg index

* perf: ui tip

* doc

* tool tip

---------

Co-authored-by: yst <77910600+yu-and-liu@users.noreply.github.com>
Co-authored-by: entorick <entorick11@qq.com>
Co-authored-by: liujianglc <liujianglc@163.com>
Co-authored-by: Fengrui Liu <liufengrui.work@bytedance.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-03-21 13:32:31 +08:00
committed by GitHub
parent 6d4b331db9
commit 9d27de154b
322 changed files with 9282 additions and 6498 deletions

View File

@@ -15,7 +15,7 @@ export enum TeamErrEnum {
const teamErr = [
{ statusText: TeamErrEnum.teamOverSize, message: 'error.team.overSize' },
{ statusText: TeamErrEnum.unAuthTeam, message: '无权操作该团队' },
{ statusText: TeamErrEnum.aiPointsNotEnough, message: 'AI积分已用完~' },
{ statusText: TeamErrEnum.aiPointsNotEnough, message: '' },
{ statusText: TeamErrEnum.datasetSizeNotEnough, message: '知识库容量不足,请先扩容~' },
{ statusText: TeamErrEnum.datasetAmountNotEnough, message: '知识库数量已达上限~' },
{ statusText: TeamErrEnum.appAmountNotEnough, message: '应用数量已达上限~' },

View File

@@ -1,7 +1,7 @@
import { replaceSensitiveText } from '../string/tools';
export const getErrText = (err: any, def = '') => {
const msg: string = typeof err === 'string' ? err : err?.message || def || '';
const msg: string = typeof err === 'string' ? err : err?.message ?? def;
msg && console.log('error =>', msg);
return replaceSensitiveText(msg);
};

View File

@@ -1,3 +1,4 @@
/* mongo fs bucket */
export enum BucketNameEnum {
dataset = 'dataset'
}
@@ -7,4 +8,4 @@ export const bucketNameMap = {
}
};
export const FileBaseUrl = '/api/common/file/read';
export const ReadFileBaseUrl = '/api/common/file/read';

View File

@@ -50,3 +50,7 @@ export const mongoImageTypeMap = {
export const uniqueImageTypeList = Object.entries(mongoImageTypeMap)
.filter(([key, value]) => value.unique)
.map(([key]) => key as `${MongoImageTypeEnum}`);
export const FolderIcon = 'file/fill/folder';
export const FolderImgUrl = '/imgs/files/folder.svg';
export const HttpImgUrl = '/imgs/module/http.png';

View File

@@ -1,2 +1,3 @@
export const HUMAN_ICON = `/icon/human.svg`;
export const LOGO_ICON = `/icon/logo.svg`;
export const HUGGING_FACE_ICON = `/imgs/model/huggingface.svg`;

View File

@@ -1,6 +1,7 @@
export type LLMModelItemType = {
model: string;
name: string;
avatar?: string;
maxContext: number;
maxResponse: number;
quoteMaxToken: number;
@@ -31,6 +32,7 @@ export type LLMModelItemType = {
export type VectorModelItemType = {
model: string;
name: string;
avatar?: string;
defaultToken: number;
charsPointsPrice: number;
maxToken: number;

View File

@@ -0,0 +1,116 @@
import { PromptTemplateItem } from '../type.d';
export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
{
title: '标准模板',
desc: '标准提示词,用于结构不固定的知识库。',
value: `{{q}}
{{a}}`
},
{
title: '问答模板',
desc: '适合 QA 问答结构的知识库可以让AI较为严格的按预设内容回答',
value: `<Question>
{{q}}
</Question>
<Answer>
{{a}}
</Answer>`
},
{
title: '标准严格模板',
desc: '在标准模板基础上,对模型的回答做更严格的要求。',
value: `{{q}}
{{a}}`
},
{
title: '严格问答模板',
desc: '在问答模板基础上,对模型的回答做更严格的要求。',
value: `<Question>
{{q}}
</Question>
<Answer>
{{a}}
</Answer>`
}
];
export const Prompt_QuotePromptList: PromptTemplateItem[] = [
{
title: '标准模板',
desc: '',
value: `使用 <Data></Data> 标记中的内容作为你的知识:
<Data>
{{quote}}
</Data>
回答要求:
- 如果你不清楚答案,你需要澄清。
- 避免提及你是从 <Data></Data> 获取的知识。
- 保持答案与 <Data></Data> 中描述的一致。
- 使用 Markdown 语法优化回答格式。
- 使用与问题相同的语言回答。
问题:"""{{question}}"""`
},
{
title: '问答模板',
desc: '',
value: `使用 <QA></QA> 标记中的问答对进行回答。
<QA>
{{quote}}
</QA>
回答要求:
- 选择其中一个或多个问答对进行回答。
- 回答的内容应尽可能与 <答案></答案> 中的内容一致。
- 如果没有相关的问答对,你需要澄清。
- 避免提及你是从 QA 获取的知识,只需要回复答案。
问题:"""{{question}}"""`
},
{
title: '标准严格模板',
desc: '',
value: `忘记你已有的知识,仅使用 <Data></Data> 标记中的内容作为你的知识:
<Data>
{{quote}}
</Data>
思考流程:
1. 判断问题是否与 <Data></Data> 标记中的内容有关。
2. 如果有关,你按下面的要求回答。
3. 如果无关,你直接拒绝回答本次问题。
回答要求:
- 避免提及你是从 <Data></Data> 获取的知识。
- 保持答案与 <Data></Data> 中描述的一致。
- 使用 Markdown 语法优化回答格式。
- 使用与问题相同的语言回答。
问题:"""{{question}}"""`
},
{
title: '严格问答模板',
desc: '',
value: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
<QA>
{{quote}}
</QA>}
思考流程:
1. 判断问题是否与 <QA></QA> 标记中的内容有关。
2. 如果无关,你直接拒绝回答本次问题。
3. 判断是否有相近或相同的问题。
4. 如果有相同的问题,直接输出对应答案。
5. 如果只有相近的问题,请把相近的问题和答案一起输出。
最后,避免提及你是从 QA 获取的知识,只需要回复答案。
问题:"""{{question}}"""`
}
];

View File

@@ -0,0 +1,60 @@
export const Prompt_AgentQA = {
description: `<Context></Context> 标记中是一段文本,学习和分析它,并整理学习成果:
- 提出问题并给出每个问题的答案。
- 答案需详细完整,尽可能保留原文描述。
- 答案可以包含普通文字、链接、代码、表格、公示、媒体链接等 Markdown 元素。
- 最多提出 30 个问题。
`,
fixedText: `请按以下格式整理学习成果:
<Context>
文本
</Context>
Q1: 问题。
A1: 答案。
Q2:
A2:
------
我们开始吧!
<Context>
{{text}}
<Context/>
`
};
export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录> 中提取指定 JSON 信息,你仅需返回 JSON 字符串,无需回答问题。
<提取要求>
{{description}}
</提取要求>
<字段说明>
1. 下面的 JSON 字符串均按照 JSON Schema 的规则描述。
2. key 代表字段名description 代表字段的描述enum 是可选值,代表可选的 value。
3. 如果没有可提取的内容,忽略该字段。
4. 本次需提取的JSON Schema{{json}}
</字段说明>
<对话记录>
{{text}}
</对话记录>
`;
export const Prompt_CQJson = `我会给你几个问题类型请参考背景知识可能为空和对话记录判断我“本次问题”的类型并返回一个问题“类型ID”:
<问题类型>
{{typeList}}
</问题类型>
<背景知识>
{{systemPrompt}}
</背景知识>
<对话记录>
{{history}}
</对话记录>
Human"{{question}}"
类型ID=
`;

View File

@@ -12,16 +12,9 @@ export type CreateAppParams = {
export interface AppUpdateParams {
name?: string;
type?: `${AppTypeEnum}`;
simpleTemplateId?: string;
avatar?: string;
intro?: string;
modules?: AppSchema['modules'];
permission?: AppSchema['permission'];
teamTags?: AppSchema['teamTags'];
}
export type FormatForm2ModulesProps = {
formData: AppSimpleEditFormType;
chatModelMaxToken: number;
llmModelList: LLMModelItemType[];
};

View File

@@ -1,7 +1,12 @@
import type { AppTTSConfigType, ModuleItemType, VariableItemType } from '../module/type.d';
import type {
AppTTSConfigType,
FlowNodeTemplateType,
ModuleItemType,
VariableItemType
} from '../module/type.d';
import { AppTypeEnum } from './constants';
import { PermissionTypeEnum } from '../../support/permission/constant';
import type { AIChatModuleProps, DatasetModuleProps } from '../module/node/type.d';
import type { DatasetModuleProps } from '../module/node/type.d';
import { VariableInputEnum } from '../module/constants';
import { SelectedDatasetType } from '../module/api';
import { DatasetSearchModeEnum } from '../dataset/constants';
@@ -13,7 +18,6 @@ export interface AppSchema {
tmbId: string;
name: string;
type: `${AppTypeEnum}`;
simpleTemplateId: string;
avatar: string;
intro: string;
updateTime: number;
@@ -37,19 +41,6 @@ export type AppDetailType = AppSchema & {
canWrite: boolean;
};
// export type AppSimpleEditFormType = {
// aiSettings: AIChatModuleProps;
// dataset: DatasetModuleProps & {
// searchEmptyText: string;
// };
// userGuide: {
// welcomeText: string;
// variables: VariableItemType[];
// questionGuide: boolean;
// tts: AppTTSConfigType;
// };
// };
// Since useform cannot infer enumeration types, all enumeration keys can only be undone manually
export type AppSimpleEditFormType = {
// templateId: string;
aiSettings: {
@@ -58,8 +49,7 @@ export type AppSimpleEditFormType = {
temperature: number;
maxToken: number;
isResponseAnswerText: boolean;
quoteTemplate?: string | undefined;
quotePrompt?: string | undefined;
maxHistories: number;
};
dataset: {
datasets: SelectedDatasetType;
@@ -67,11 +57,11 @@ export type AppSimpleEditFormType = {
similarity?: number;
limit?: number;
usingReRank?: boolean;
searchEmptyText?: string;
datasetSearchUsingExtensionQuery?: boolean;
datasetSearchExtensionModel?: string;
datasetSearchExtensionBg?: string;
};
selectedTools: FlowNodeTemplateType[];
userGuide: {
welcomeText: string;
variables: {
@@ -94,34 +84,3 @@ export type AppSimpleEditFormType = {
};
};
};
/* simple mode template*/
export type AppSimpleEditConfigTemplateType = {
id: string;
name: string;
desc: string;
systemForm: {
aiSettings?: {
model?: boolean;
systemPrompt?: boolean;
temperature?: boolean;
maxToken?: boolean;
quoteTemplate?: boolean;
quotePrompt?: boolean;
};
dataset?: {
datasets?: boolean;
similarity?: boolean;
limit?: boolean;
searchMode: `${DatasetSearchModeEnum}`;
usingReRank: boolean;
searchEmptyText?: boolean;
};
userGuide?: {
welcomeText?: boolean;
variables?: boolean;
questionGuide?: boolean;
tts?: boolean;
};
};
};

View File

@@ -1,6 +1,10 @@
import type { AppSimpleEditFormType } from '../app/type';
import { FlowNodeTypeEnum } from '../module/node/constant';
import { ModuleOutputKeyEnum, ModuleInputKeyEnum } from '../module/constants';
import {
ModuleOutputKeyEnum,
ModuleInputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../module/constants';
import type { FlowNodeInputItemType } from '../module/node/type.d';
import { getGuideModule, splitGuideModule } from '../module/utils';
import { ModuleItemType } from '../module/type.d';
@@ -13,20 +17,19 @@ export const getDefaultAppForm = (): AppSimpleEditFormType => {
systemPrompt: '',
temperature: 0,
isResponseAnswerText: true,
quotePrompt: '',
quoteTemplate: '',
maxHistories: 6,
maxToken: 4000
},
dataset: {
datasets: [],
similarity: 0.4,
limit: 1500,
searchEmptyText: '',
searchMode: DatasetSearchModeEnum.embedding,
usingReRank: false,
datasetSearchUsingExtensionQuery: true,
datasetSearchExtensionBg: ''
},
selectedTools: [],
userGuide: {
welcomeText: '',
variables: [],
@@ -47,7 +50,10 @@ export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
};
modules.forEach((module) => {
if (module.flowType === FlowNodeTypeEnum.chatNode) {
if (
module.flowType === FlowNodeTypeEnum.chatNode ||
module.flowType === FlowNodeTypeEnum.tools
) {
defaultAppForm.aiSettings.model = findInputValueByKey(
module.inputs,
ModuleInputKeyEnum.aiModel
@@ -64,13 +70,9 @@ export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
module.inputs,
ModuleInputKeyEnum.aiChatMaxToken
);
defaultAppForm.aiSettings.quoteTemplate = findInputValueByKey(
defaultAppForm.aiSettings.maxHistories = findInputValueByKey(
module.inputs,
ModuleInputKeyEnum.aiChatQuoteTemplate
);
defaultAppForm.aiSettings.quotePrompt = findInputValueByKey(
module.inputs,
ModuleInputKeyEnum.aiChatQuotePrompt
ModuleInputKeyEnum.history
);
} else if (module.flowType === FlowNodeTypeEnum.datasetSearchNode) {
defaultAppForm.dataset.datasets = findInputValueByKey(
@@ -104,17 +106,6 @@ export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
module.inputs,
ModuleInputKeyEnum.datasetSearchExtensionBg
);
// empty text
const emptyOutputs =
module.outputs.find((item) => item.key === ModuleOutputKeyEnum.datasetIsEmpty)?.targets ||
[];
const emptyOutput = emptyOutputs[0];
if (emptyOutput) {
const target = modules.find((item) => item.moduleId === emptyOutput.moduleId);
defaultAppForm.dataset.searchEmptyText =
target?.inputs?.find((item) => item.key === ModuleInputKeyEnum.answerText)?.value || '';
}
} else if (module.flowType === FlowNodeTypeEnum.userGuide) {
const { welcomeText, variableModules, questionGuide, ttsConfig } = splitGuideModule(
getGuideModule(modules)
@@ -125,6 +116,18 @@ export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
questionGuide: questionGuide,
tts: ttsConfig
};
} else if (module.flowType === FlowNodeTypeEnum.pluginModule) {
defaultAppForm.selectedTools.push({
id: module.inputs.find((input) => input.key === ModuleInputKeyEnum.pluginId)?.value || '',
name: module.name,
avatar: module.avatar,
intro: module.intro || '',
flowType: module.flowType,
showStatus: module.showStatus,
inputs: module.inputs,
outputs: module.outputs,
templateType: FlowNodeTemplateTypeEnum.other
});
}
});

View File

@@ -83,6 +83,7 @@ export const chats2GPTMessages = ({
});
}
} else {
//AI
item.value.forEach((value) => {
if (value.type === ChatItemValueTypeEnum.tool && value.tools && reserveTool) {
const tool_calls: ChatCompletionMessageToolCall[] = [];

View File

@@ -3,6 +3,8 @@ export type UpdateChatFeedbackProps = {
chatId: string;
chatItemId: string;
shareId?: string;
teamId?: string;
teamToken?: string;
outLinkUid?: string;
userBadFeedback?: string;
userGoodFeedback?: string;

View File

@@ -141,7 +141,7 @@ export type ChatHistoryItemResType = DispatchNodeResponseType & {
};
/* One tool run response */
export type ToolRunResponseItemType = Record<string, any> | Array;
export type ToolRunResponseItemType = any;
/* tool module response */
export type ToolModuleResponseItemType = {
id: string;

View File

@@ -154,8 +154,5 @@ export const SearchScoreTypeMap = {
}
};
export const FolderIcon = 'file/fill/folder';
export const FolderImgUrl = '/imgs/files/folder.svg';
export const CustomCollectionIcon = 'common/linkBlue';
export const LinkCollectionIcon = 'common/linkBlue';

View File

@@ -13,3 +13,10 @@ export type HttpQueryType = {
variables: Record<string, any>;
[key: string]: any;
};
/* http node */
export type HttpParamAndHeaderItemType = {
key: string;
type: string;
value: string;
};

View File

@@ -1,4 +1,4 @@
export enum ModuleTemplateTypeEnum {
export enum FlowNodeTemplateTypeEnum {
userGuide = 'userGuide',
systemInput = 'systemInput',
tools = 'tools',
@@ -87,7 +87,8 @@ export enum ModuleInputKeyEnum {
runAppSelectApp = 'app',
// plugin
pluginId = 'pluginId'
pluginId = 'pluginId',
pluginStart = 'pluginStart'
}
export enum ModuleOutputKeyEnum {
@@ -117,7 +118,10 @@ export enum ModuleOutputKeyEnum {
selectedTools = 'selectedTools',
// http
httpRawResponse = 'httpRawResponse'
httpRawResponse = 'httpRawResponse',
// plugin
pluginStart = 'pluginStart'
}
export enum VariableInputEnum {

View File

@@ -21,10 +21,12 @@ export enum FlowNodeInputTypeEnum {
// ai model select
selectLLMModel = 'selectLLMModel',
settingLLMModel = 'settingLLMModel',
// dataset special input
selectDataset = 'selectDataset',
selectDatasetParamsModal = 'selectDatasetParamsModal',
settingDatasetQuotePrompt = 'settingDatasetQuotePrompt',
hidden = 'hidden',
custom = 'custom'
@@ -57,7 +59,8 @@ export enum FlowNodeTypeEnum {
pluginInput = 'pluginInput',
pluginOutput = 'pluginOutput',
queryExtension = 'cfr',
tools = 'tools'
tools = 'tools',
stopTool = 'stopTool'
// abandon
}

View File

@@ -102,6 +102,13 @@ export type EditNodeFieldType = {
};
/* ------------- item type --------------- */
export type SettingAIDataType = {
model: string;
temperature: number;
maxToken: number;
isResponseAnswerText?: boolean;
maxHistories?: number;
};
/* ai chat modules props */
export type AIChatModuleProps = {
[ModuleInputKeyEnum.aiModel]: string;

View File

@@ -90,6 +90,7 @@ export type DispatchNodeResponseType = {
// tool
toolCallTokens?: number;
toolDetail?: ChatHistoryItemResType[];
toolStop?: boolean;
};
export type DispatchNodeResultType<T> = {

View File

@@ -8,7 +8,9 @@ import { ClassifyQuestionModule } from './system/classifyQuestion';
import { ContextExtractModule } from './system/contextExtract';
import { HttpModule468 } from './system/http468';
import { HttpModule } from './system/abandon/http';
import { ToolModule } from './system/tools';
import { StopToolNode } from './system/stopTool';
import { RunAppModule } from './system/runApp';
import { PluginInputModule } from './system/pluginInput';
@@ -16,11 +18,11 @@ import { PluginOutputModule } from './system/pluginOutput';
import { RunPluginModule } from './system/runPlugin';
import { AiQueryExtension } from './system/queryExtension';
import type { FlowModuleTemplateType, moduleTemplateListType } from '../../module/type.d';
import { ModuleTemplateTypeEnum } from '../../module/constants';
import type { FlowNodeTemplateType, moduleTemplateListType } from '../../module/type.d';
import { FlowNodeTemplateTypeEnum } from '../../module/constants';
/* app flow module templates */
export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
export const appSystemModuleTemplates: FlowNodeTemplateType[] = [
UserGuideModule,
UserInputModule,
AiChatModule,
@@ -29,13 +31,14 @@ export const appSystemModuleTemplates: FlowModuleTemplateType[] = [
DatasetConcatModule,
RunAppModule,
ToolModule,
StopToolNode,
ClassifyQuestionModule,
ContextExtractModule,
HttpModule468,
AiQueryExtension
];
/* plugin flow module templates */
export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
export const pluginSystemModuleTemplates: FlowNodeTemplateType[] = [
PluginInputModule,
PluginOutputModule,
AiChatModule,
@@ -44,6 +47,7 @@ export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
DatasetConcatModule,
RunAppModule,
ToolModule,
StopToolNode,
ClassifyQuestionModule,
ContextExtractModule,
HttpModule468,
@@ -51,7 +55,7 @@ export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [
];
/* all module */
export const moduleTemplatesFlat: FlowModuleTemplateType[] = [
export const moduleTemplatesFlat: FlowNodeTemplateType[] = [
UserGuideModule,
UserInputModule,
AiChatModule,
@@ -63,6 +67,7 @@ export const moduleTemplatesFlat: FlowModuleTemplateType[] = [
HttpModule468,
HttpModule,
ToolModule,
StopToolNode,
AiChatModule,
RunAppModule,
PluginInputModule,
@@ -73,43 +78,43 @@ export const moduleTemplatesFlat: FlowModuleTemplateType[] = [
export const moduleTemplatesList: moduleTemplateListType = [
{
type: ModuleTemplateTypeEnum.userGuide,
label: 'core.module.template.Guide module',
type: FlowNodeTemplateTypeEnum.userGuide,
label: '',
list: []
},
{
type: ModuleTemplateTypeEnum.systemInput,
label: 'core.module.template.System input module',
list: []
},
{
type: ModuleTemplateTypeEnum.textAnswer,
type: FlowNodeTemplateTypeEnum.textAnswer,
label: 'core.module.template.Response module',
list: []
},
{
type: ModuleTemplateTypeEnum.functionCall,
type: FlowNodeTemplateTypeEnum.functionCall,
label: 'core.module.template.Function module',
list: []
},
{
type: ModuleTemplateTypeEnum.tools,
type: FlowNodeTemplateTypeEnum.tools,
label: 'core.module.template.Tool module',
list: []
},
{
type: ModuleTemplateTypeEnum.externalCall,
type: FlowNodeTemplateTypeEnum.externalCall,
label: 'core.module.template.External module',
list: []
},
{
type: ModuleTemplateTypeEnum.personalPlugin,
label: 'core.module.template.My plugin module',
type: FlowNodeTemplateTypeEnum.personalPlugin,
label: '',
list: []
},
{
type: ModuleTemplateTypeEnum.other,
type: FlowNodeTemplateTypeEnum.other,
label: '其他',
list: []
},
{
type: FlowNodeTemplateTypeEnum.systemInput,
label: 'core.module.template.System input module',
list: []
}
];

View File

@@ -59,7 +59,7 @@ export const Input_Template_DynamicInput: FlowNodeInputItemType = {
hideInApp: true
};
export const Input_Template_AiModel: FlowNodeInputItemType = {
export const Input_Template_SelectAIModel: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
label: 'core.module.input.label.aiModel',
@@ -68,6 +68,15 @@ export const Input_Template_AiModel: FlowNodeInputItemType = {
showTargetInApp: false,
showTargetInPlugin: false
};
export const Input_Template_SettingAiModel: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.settingLLMModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
};
export const Input_Template_System_Prompt: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiSystemPrompt,
@@ -83,7 +92,7 @@ export const Input_Template_System_Prompt: FlowNodeInputItemType = {
export const Input_Template_Dataset_Quote: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiChatDatasetQuote,
type: FlowNodeInputTypeEnum.target,
type: FlowNodeInputTypeEnum.settingDatasetQuotePrompt,
label: '知识库引用',
description: 'core.module.Dataset quote.Input description',
valueType: ModuleIOValueTypeEnum.datasetQuote,

View File

@@ -3,11 +3,11 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../../node/constant';
import { FlowModuleTemplateType } from '../../../type';
import { FlowNodeTemplateType } from '../../../type';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../../constants';
import {
Input_Template_AddInputParam,
@@ -16,9 +16,9 @@ import {
} from '../../input';
import { Output_Template_AddOutput, Output_Template_Finish } from '../../output';
export const HttpModule: FlowModuleTemplateType = {
export const HttpModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.httpRequest,
templateType: ModuleTemplateTypeEnum.externalCall,
templateType: FlowNodeTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.httpRequest,
avatar: '/imgs/module/http.png',
name: 'core.module.template.Http request',

View File

@@ -3,15 +3,15 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_SettingAiModel,
Input_Template_Dataset_Quote,
Input_Template_History,
Input_Template_Switch,
@@ -21,18 +21,18 @@ import {
import { chatNodeSystemPromptTip } from '../tip';
import { Output_Template_Finish, Output_Template_UserChatInput } from '../output';
export const AiChatModule: FlowModuleTemplateType = {
export const AiChatModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.chatNode,
templateType: ModuleTemplateTypeEnum.textAnswer,
templateType: FlowNodeTemplateTypeEnum.textAnswer,
flowType: FlowNodeTypeEnum.chatNode,
avatar: '/imgs/module/AI.png',
name: 'AI 对话',
intro: 'AI 大模型对话',
showStatus: true,
// isTool: true,
isTool: true,
inputs: [
Input_Template_Switch,
Input_Template_AiModel,
Input_Template_SettingAiModel,
// --- settings modal
{
key: ModuleInputKeyEnum.aiChatTemperature,
@@ -83,14 +83,6 @@ export const AiChatModule: FlowModuleTemplateType = {
showTargetInApp: false,
showTargetInPlugin: false
},
{
key: ModuleInputKeyEnum.aiChatSettingModal,
type: FlowNodeInputTypeEnum.aiSettings,
label: '',
valueType: ModuleIOValueTypeEnum.any,
showTargetInApp: false,
showTargetInPlugin: false
},
// settings modal ---
{
...Input_Template_System_Prompt,

View File

@@ -1,12 +1,16 @@
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../../constants';
import { Input_Template_Switch } from '../input';
import { Output_Template_Finish } from '../output';
export const AssignedAnswerModule: FlowModuleTemplateType = {
export const AssignedAnswerModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.answerNode,
templateType: ModuleTemplateTypeEnum.textAnswer,
templateType: FlowNodeTemplateTypeEnum.textAnswer,
flowType: FlowNodeTypeEnum.answerNode,
avatar: '/imgs/module/reply.png',
name: '指定回复',

View File

@@ -3,10 +3,14 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTemplateType } from '../../type.d';
import {
Input_Template_AiModel,
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_SelectAIModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput
@@ -15,9 +19,9 @@ import { Output_Template_UserChatInput } from '../output';
import { Input_Template_System_Prompt } from '../input';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ClassifyQuestionModule: FlowModuleTemplateType = {
export const ClassifyQuestionModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.classifyQuestion,
templateType: ModuleTemplateTypeEnum.functionCall,
templateType: FlowNodeTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.classifyQuestion,
avatar: '/imgs/module/cq.png',
name: '问题分类',
@@ -26,7 +30,7 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.classify
},
{

View File

@@ -3,19 +3,23 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import { Input_Template_AiModel, Input_Template_History, Input_Template_Switch } from '../input';
import {
Input_Template_SelectAIModel,
Input_Template_History,
Input_Template_Switch
} from '../input';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ContextExtractModule: FlowModuleTemplateType = {
export const ContextExtractModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.contentExtract,
templateType: ModuleTemplateTypeEnum.functionCall,
templateType: FlowNodeTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.contentExtract,
avatar: '/imgs/module/extract.png',
name: '文本内容提取',
@@ -25,7 +29,7 @@ export const ContextExtractModule: FlowModuleTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.extractFields
},
{
@@ -35,7 +39,6 @@ export const ContextExtractModule: FlowModuleTemplateType = {
label: '提取要求描述',
description:
'给AI一些对应的背景知识或要求描述引导AI更好的完成任务。\n该输入框可使用全局变量。',
required: true,
placeholder:
'例如: \n1. 当前时间为: {{cTime}}。你是一个实验室预约助手,你的任务是帮助用户预约实验室,从文本中获取对应的预约信息。\n2. 你是谷歌搜索助手,需要从文本中提取出合适的搜索词。',
showTargetInApp: true,

View File

@@ -3,12 +3,12 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import { Input_Template_Dataset_Quote, Input_Template_Switch } from '../input';
import { Output_Template_Finish } from '../output';
@@ -20,10 +20,10 @@ export const getOneQuoteInputTemplate = (key = getNanoid()) => ({
type: FlowNodeInputTypeEnum.hidden
});
export const DatasetConcatModule: FlowModuleTemplateType = {
export const DatasetConcatModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.datasetConcatNode,
flowType: FlowNodeTypeEnum.datasetConcatNode,
templateType: ModuleTemplateTypeEnum.tools,
templateType: FlowNodeTemplateTypeEnum.other,
avatar: '/imgs/module/concat.svg',
name: '知识库搜索引用合并',
intro: '可以将多个知识库搜索结果进行合并输出。使用 RRF 的合并方式进行最终排序输出。',

View File

@@ -3,20 +3,20 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import { Input_Template_Switch, Input_Template_UserChatInput } from '../input';
import { Output_Template_Finish, Output_Template_UserChatInput } from '../output';
import { DatasetSearchModeEnum } from '../../../dataset/constants';
export const DatasetSearchModule: FlowModuleTemplateType = {
export const DatasetSearchModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.datasetSearchNode,
templateType: ModuleTemplateTypeEnum.functionCall,
templateType: FlowNodeTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.datasetSearchNode,
avatar: '/imgs/module/db.png',
name: '知识库搜索',

View File

@@ -3,12 +3,12 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type';
import { FlowNodeTemplateType } from '../../type';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AddInputParam,
@@ -17,9 +17,9 @@ import {
} from '../input';
import { Output_Template_AddOutput, Output_Template_Finish } from '../output';
export const HttpModule468: FlowModuleTemplateType = {
export const HttpModule468: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.httpRequest468,
templateType: ModuleTemplateTypeEnum.externalCall,
templateType: FlowNodeTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.httpRequest468,
avatar: '/imgs/module/http.png',
name: 'HTTP 请求',

View File

@@ -1,15 +1,43 @@
import { ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import {
FlowNodeTemplateTypeEnum,
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum
} from '../../constants';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowNodeTemplateType } from '../../type.d';
export const PluginInputModule: FlowModuleTemplateType = {
export const PluginInputModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.pluginInput,
templateType: ModuleTemplateTypeEnum.systemInput,
templateType: FlowNodeTemplateTypeEnum.systemInput,
flowType: FlowNodeTypeEnum.pluginInput,
avatar: '/imgs/module/input.png',
name: '定义插件输入',
intro: '自定义配置外部输入,使用插件时,仅暴露自定义配置的输入',
showStatus: false,
inputs: [],
outputs: []
inputs: [
{
key: ModuleInputKeyEnum.pluginStart,
type: FlowNodeInputTypeEnum.hidden,
valueType: ModuleIOValueTypeEnum.boolean,
label: '插件开始运行',
description:
'插件开始运行时,会输出一个 True 的标识。有时候,插件不会有额外的的输入,为了顺利的进入下一个阶段,你可以将该值连接到下一个节点的触发器中。',
showTargetInApp: true,
showTargetInPlugin: true
}
],
outputs: [
{
key: ModuleOutputKeyEnum.pluginStart,
label: '插件开始运行',
type: FlowNodeOutputTypeEnum.source,
valueType: ModuleIOValueTypeEnum.boolean,
targets: []
}
]
};

View File

@@ -1,10 +1,10 @@
import { ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTemplateTypeEnum } from '../../constants';
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
export const PluginOutputModule: FlowModuleTemplateType = {
export const PluginOutputModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.pluginOutput,
templateType: ModuleTemplateTypeEnum.systemInput,
templateType: FlowNodeTemplateTypeEnum.systemInput,
flowType: FlowNodeTypeEnum.pluginOutput,
avatar: '/imgs/module/output.png',
name: '定义插件输出',

View File

@@ -3,25 +3,25 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type';
import { FlowNodeTemplateType } from '../../type';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput,
Input_Template_AiModel
Input_Template_SelectAIModel
} from '../input';
import { Output_Template_UserChatInput } from '../output';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const AiQueryExtension: FlowModuleTemplateType = {
export const AiQueryExtension: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.chatNode,
templateType: ModuleTemplateTypeEnum.other,
templateType: FlowNodeTemplateTypeEnum.other,
flowType: FlowNodeTypeEnum.queryExtension,
avatar: '/imgs/module/cfr.svg',
name: '问题优化',
@@ -31,7 +31,7 @@ export const AiQueryExtension: FlowModuleTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.queryExtension
},
{

View File

@@ -3,12 +3,12 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_History,
@@ -17,9 +17,9 @@ import {
} from '../input';
import { Output_Template_Finish, Output_Template_UserChatInput } from '../output';
export const RunAppModule: FlowModuleTemplateType = {
export const RunAppModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.runApp,
templateType: ModuleTemplateTypeEnum.externalCall,
templateType: FlowNodeTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.runApp,
avatar: '/imgs/module/app.png',
name: '应用调用',

View File

@@ -1,10 +1,10 @@
import { ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTemplateTypeEnum } from '../../constants';
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
export const RunPluginModule: FlowModuleTemplateType = {
export const RunPluginModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.pluginModule,
templateType: ModuleTemplateTypeEnum.externalCall,
templateType: FlowNodeTemplateTypeEnum.externalCall,
flowType: FlowNodeTypeEnum.pluginModule,
intro: '',
name: '',

View File

@@ -0,0 +1,16 @@
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowNodeTemplateType } from '../../type.d';
import { FlowNodeTemplateTypeEnum } from '../../constants';
import { Input_Template_Switch } from '../input';
export const StopToolNode: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.stopTool,
templateType: FlowNodeTemplateTypeEnum.functionCall,
flowType: FlowNodeTypeEnum.stopTool,
avatar: '/imgs/module/toolStop.svg',
name: '工具调用终止',
intro:
'该模块需配置工具调用使用。当该模块被执行时本次工具调用将会强制结束并且不再调用AI针对工具调用结果回答问题。',
inputs: [Input_Template_Switch],
outputs: []
};

View File

@@ -1,12 +1,17 @@
import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum,
ModuleInputKeyEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_SettingAiModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_System_Prompt,
@@ -16,19 +21,43 @@ import { chatNodeSystemPromptTip } from '../tip';
import { Output_Template_Finish, Output_Template_UserChatInput } from '../output';
import { LLMModelTypeEnum } from '../../../ai/constants';
export const ToolModule: FlowModuleTemplateType = {
export const ToolModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.tools,
flowType: FlowNodeTypeEnum.tools,
templateType: ModuleTemplateTypeEnum.functionCall,
templateType: FlowNodeTemplateTypeEnum.functionCall,
avatar: '/imgs/module/tool.svg',
name: '工具调用(实验)',
intro: '通过AI模型自动选择一个或多个工具进行调用。工具可以是其他功能块或插件。',
intro: '通过AI模型自动选择一个或多个功能块进行调用,也可以对插件进行调用。',
showStatus: true,
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
llmModelType: LLMModelTypeEnum.toolCall
...Input_Template_SettingAiModel,
llmModelType: LLMModelTypeEnum.all
},
{
key: ModuleInputKeyEnum.aiChatTemperature,
type: FlowNodeInputTypeEnum.hidden, // Set in the pop-up window
label: '',
value: 0,
valueType: ModuleIOValueTypeEnum.number,
min: 0,
max: 10,
step: 1,
showTargetInApp: false,
showTargetInPlugin: false
},
{
key: ModuleInputKeyEnum.aiChatMaxToken,
type: FlowNodeInputTypeEnum.hidden, // Set in the pop-up window
label: '',
value: 2000,
valueType: ModuleIOValueTypeEnum.number,
min: 100,
max: 4000,
step: 50,
showTargetInApp: false,
showTargetInPlugin: false
},
{
...Input_Template_System_Prompt,

View File

@@ -1,11 +1,15 @@
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import { userGuideTip } from '../tip';
import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleTemplateTypeEnum } from '../../constants';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../../constants';
export const UserGuideModule: FlowModuleTemplateType = {
export const UserGuideModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.userGuide,
templateType: ModuleTemplateTypeEnum.userGuide,
templateType: FlowNodeTemplateTypeEnum.userGuide,
flowType: FlowNodeTypeEnum.userGuide,
avatar: '/imgs/module/userGuide.png',
name: '全局配置',

View File

@@ -3,17 +3,17 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
FlowNodeTemplateTypeEnum
} from '../../constants';
export const UserInputModule: FlowModuleTemplateType = {
export const UserInputModule: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.questionInput,
templateType: ModuleTemplateTypeEnum.systemInput,
templateType: FlowNodeTemplateTypeEnum.systemInput,
flowType: FlowNodeTypeEnum.questionInput,
avatar: '/imgs/module/userChatInput.svg',
name: '对话入口',

View File

@@ -2,7 +2,7 @@ import { FlowNodeTypeEnum } from './node/constant';
import {
ModuleIOValueTypeEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum,
FlowNodeTemplateTypeEnum,
VariableInputEnum
} from './constants';
import { DispatchNodeResponseKeyEnum } from './runtime/constants';
@@ -15,10 +15,11 @@ import {
} from '../chat/type';
import { ChatNodeUsageType } from '../../support/wallet/bill/type';
import { RunningModuleItemType } from './runtime/type';
import { PluginTypeEnum } from 'core/plugin/constants';
export type FlowModuleTemplateType = {
export type FlowNodeTemplateType = {
id: string; // module id, unique
templateType: `${ModuleTemplateTypeEnum}`;
templateType: `${FlowNodeTemplateTypeEnum}`;
flowType: `${FlowNodeTypeEnum}`; // render node card
avatar?: string;
name: string;
@@ -27,14 +28,18 @@ export type FlowModuleTemplateType = {
showStatus?: boolean; // chatting response step status
inputs: FlowNodeInputItemType[];
outputs: FlowNodeOutputItemType[];
// plugin data
pluginType?: `${PluginTypeEnum}`;
parentId?: string;
};
export type FlowModuleItemType = FlowModuleTemplateType & {
export type FlowModuleItemType = FlowNodeTemplateType & {
moduleId: string;
};
export type moduleTemplateListType = {
type: `${ModuleTemplateTypeEnum}`;
type: `${FlowNodeTemplateTypeEnum}`;
label: string;
list: FlowModuleTemplateType[];
list: FlowNodeTemplateType[];
}[];
// store module type

View File

@@ -9,6 +9,7 @@ import { FlowNodeInputItemType, FlowNodeOutputItemType } from './node/type';
import { AppTTSConfigType, ModuleItemType, VariableItemType } from './type';
import { Input_Template_Switch } from './template/input';
import { EditorVariablePickerType } from '../../../web/components/common/Textarea/PromptEditor/type';
import { Output_Template_Finish } from './template/output';
/* module */
export const getGuideModule = (modules: ModuleItemType[]) =>
@@ -92,13 +93,16 @@ export const plugin2ModuleIO = (
connected: false
}))
]
: [],
: [Input_Template_Switch],
outputs: pluginOutput
? pluginOutput.outputs.map((item) => ({
...item,
edit: false
}))
: []
? [
...pluginOutput.outputs.map((item) => ({
...item,
edit: false
})),
Output_Template_Finish
]
: [Output_Template_Finish]
};
};

View File

@@ -27,6 +27,26 @@ export const defaultModules: ModuleItemType[] = [
}
];
export enum PluginTypeEnum {
folder = 'folder',
custom = 'custom',
http = 'http'
}
export const pluginTypeMap = {
[PluginTypeEnum.folder]: {
label: '文件夹',
icon: 'file/fill/folder'
},
[PluginTypeEnum.custom]: {
label: '自定义',
icon: 'common/custom'
},
[PluginTypeEnum.http]: {
label: 'HTTP',
icon: 'common/http'
}
};
export enum PluginSourceEnum {
personal = 'personal',
community = 'community',

View File

@@ -1,21 +1,40 @@
import type { ModuleItemType } from '../module/type.d';
import { PluginTypeEnum } from './constants';
import { HttpAuthMethodType } from './httpPlugin/type';
export type CreateOnePluginParams = {
name: string;
avatar: string;
intro: string;
modules?: ModuleItemType[];
modules: ModuleItemType[];
parentId: string | null;
type: `${PluginTypeEnum}`;
metadata?: {
apiSchemaStr?: string;
customHeaders?: string;
};
};
export type UpdatePluginParams = {
id: string;
parentId?: string | null;
name?: string;
avatar?: string;
intro?: string;
modules?: ModuleItemType[];
metadata?: {
apiSchemaStr?: string;
customHeaders?: string;
};
};
export type PluginListItemType = {
_id: string;
parentId: string;
type: `${PluginTypeEnum}`;
name: string;
avatar: string;
intro: string;
metadata?: {
apiSchemaStr?: string;
customHeaders?: string;
};
};

View File

@@ -0,0 +1,13 @@
export type PathDataType = {
name: string;
description: string;
method: string;
path: string;
params: any[];
request: any;
};
export type OpenApiJsonSchema = {
pathData: PathDataType[];
serverPath: string;
};

View File

@@ -0,0 +1,516 @@
import { getNanoid } from '../../../common/string/tools';
import { OpenApiJsonSchema } from './type';
import yaml from 'js-yaml';
import { OpenAPIV3 } from 'openapi-types';
import { PluginTypeEnum } from '../constants';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../../module/node/type';
import { FlowNodeInputTypeEnum, FlowNodeOutputTypeEnum } from '../../module/node/constant';
import { ModuleIOValueTypeEnum } from '../../module/constants';
import { PluginInputModule } from '../../module/template/system/pluginInput';
import { PluginOutputModule } from '../../module/template/system/pluginOutput';
import { HttpModule468 } from '../../module/template/system/http468';
import { HttpParamAndHeaderItemType } from '../../module/api';
import { CreateOnePluginParams } from '../controller';
import { ModuleItemType } from '../../module/type';
import { HttpImgUrl } from '../../../common/file/image/constants';
export const str2OpenApiSchema = (yamlStr = ''): OpenApiJsonSchema => {
try {
const data: OpenAPIV3.Document = (() => {
try {
return JSON.parse(yamlStr);
} catch (jsonError) {
return yaml.load(yamlStr, { schema: yaml.FAILSAFE_SCHEMA });
}
})();
const serverPath = data.servers?.[0].url || '';
const pathData = Object.keys(data.paths)
.map((path) => {
const methodData: any = data.paths[path];
return Object.keys(methodData)
.filter((method) =>
['get', 'post', 'put', 'delete', 'patch'].includes(method.toLocaleLowerCase())
)
.map((method) => {
const methodInfo = methodData[method];
if (methodInfo.deprecated) return;
const result = {
path,
method,
name: methodInfo.operationId || path,
description: methodInfo.description,
params: methodInfo.parameters,
request: methodInfo?.requestBody
};
return result;
});
})
.flat()
.filter(Boolean) as OpenApiJsonSchema['pathData'];
return { pathData, serverPath };
} catch (err) {
throw new Error('Invalid Schema');
}
};
export const httpApiSchema2Plugins = ({
parentId,
apiSchemaStr = '',
customHeader = ''
}: {
parentId: string;
apiSchemaStr?: string;
customHeader?: string;
}): CreateOnePluginParams[] => {
const jsonSchema = str2OpenApiSchema(apiSchemaStr);
const baseUrl = jsonSchema.serverPath;
return jsonSchema.pathData.map((item) => {
const pluginOutputId = getNanoid();
const httpId = getNanoid();
const pluginOutputKey = 'result';
const properties = item.request?.content?.['application/json']?.schema?.properties;
const propsKeys = properties ? Object.keys(properties) : [];
const pluginInputs: FlowNodeInputItemType[] = [
...(item.params?.map((param: any) => {
return {
key: param.name,
valueType: ModuleIOValueTypeEnum.string,
label: param.name,
type: FlowNodeInputTypeEnum.target,
required: param.required,
description: param.description,
edit: true,
editField: {
key: true,
name: true,
description: true,
required: true,
dataType: true,
inputType: true,
isToolInput: true
},
connected: true,
toolDescription: param.description
};
}) || []),
...(propsKeys?.map((key) => {
const prop = properties[key];
return {
key,
valueType: ModuleIOValueTypeEnum.string,
label: key,
type: FlowNodeInputTypeEnum.target,
required: false,
description: prop.description,
edit: true,
editField: {
key: true,
name: true,
description: true,
required: true,
dataType: true,
inputType: true,
isToolInput: true
},
connected: true,
toolDescription: prop.description
};
}) || [])
];
const pluginOutputs: FlowNodeOutputItemType[] = [
...(item.params?.map((param: any) => {
return {
key: param.name,
valueType: ModuleIOValueTypeEnum.string,
label: param.name,
type: FlowNodeOutputTypeEnum.source,
edit: true,
targets: [
{
moduleId: httpId,
key: param.name
}
]
};
}) || []),
...(propsKeys?.map((key) => {
return {
key,
valueType: ModuleIOValueTypeEnum.string,
label: key,
type: FlowNodeOutputTypeEnum.source,
edit: true,
targets: [
{
moduleId: httpId,
key
}
]
};
}) || [])
];
const httpInputs: FlowNodeInputItemType[] = [
...(item.params?.map((param: any) => {
return {
key: param.name,
valueType: ModuleIOValueTypeEnum.string,
label: param.name,
type: FlowNodeInputTypeEnum.target,
description: param.description,
edit: true,
editField: {
key: true,
description: true,
dataType: true
},
connected: true
};
}) || []),
...(propsKeys?.map((key) => {
const prop = properties[key];
return {
key,
valueType: ModuleIOValueTypeEnum.string,
label: key,
type: FlowNodeInputTypeEnum.target,
description: prop.description,
edit: true,
editField: {
key: true,
description: true,
dataType: true
},
connected: true
};
}) || [])
];
/* http node setting */
const httpNodeParams: HttpParamAndHeaderItemType[] = [];
const httpNodeHeaders: HttpParamAndHeaderItemType[] = [];
let httpNodeBody = '{}';
const requestUrl = `${baseUrl}${item.path}`;
if (item.params && item.params.length > 0) {
for (const param of item.params) {
if (param.in === 'header') {
httpNodeHeaders.push({
key: param.name,
type: param.schema?.type || ModuleIOValueTypeEnum.string,
value: `{{${param.name}}}`
});
} else if (param.in === 'body') {
httpNodeBody = JSON.stringify(
{ ...JSON.parse(httpNodeBody), [param.name]: `{{${param.name}}}` },
null,
2
);
} else if (param.in === 'query') {
httpNodeParams.push({
key: param.name,
type: param.schema?.type || ModuleIOValueTypeEnum.string,
value: `{{${param.name}}}`
});
}
}
}
if (item.request) {
const properties = item.request?.content?.['application/json']?.schema?.properties;
const keys = Object.keys(properties);
if (keys.length > 0) {
httpNodeBody = JSON.stringify(
keys.reduce((acc: any, key) => {
acc[key] = `{{${key}}}`;
return acc;
}, {}),
null,
2
);
}
}
if (customHeader) {
const headersObj = (() => {
try {
return JSON.parse(customHeader) as Record<string, string>;
} catch (err) {
return {};
}
})();
for (const key in headersObj) {
httpNodeHeaders.push({
key,
type: 'string',
// @ts-ignore
value: headersObj[key]
});
}
}
/* Combine complete modules */
const modules: ModuleItemType[] = [
{
moduleId: getNanoid(),
name: PluginInputModule.name,
intro: PluginInputModule.intro,
avatar: PluginInputModule.avatar,
flowType: PluginInputModule.flowType,
showStatus: PluginInputModule.showStatus,
position: {
x: 616.4226348688949,
y: -165.05298493910115
},
inputs: [
{
key: 'pluginStart',
type: 'hidden',
valueType: 'boolean',
label: '插件开始运行',
description:
'插件开始运行时,会输出一个 True 的标识。有时候,插件不会有额外的的输入,为了顺利的进入下一个阶段,你可以将该值连接到下一个节点的触发器中。',
showTargetInApp: true,
showTargetInPlugin: true,
connected: true
},
...pluginInputs
],
outputs: [
{
key: 'pluginStart',
label: '插件开始运行',
type: 'source',
valueType: 'boolean',
targets:
pluginOutputs.length === 0
? [
{
moduleId: httpId,
key: 'switch'
}
]
: []
},
...pluginOutputs
]
},
{
moduleId: pluginOutputId,
name: PluginOutputModule.name,
intro: PluginOutputModule.intro,
avatar: PluginOutputModule.avatar,
flowType: PluginOutputModule.flowType,
showStatus: PluginOutputModule.showStatus,
position: {
x: 1607.7142331269126,
y: -151.8669210746189
},
inputs: [
{
key: pluginOutputKey,
valueType: 'string',
label: pluginOutputKey,
type: 'target',
required: true,
description: '',
edit: true,
editField: {
key: true,
name: true,
description: true,
required: false,
dataType: true,
inputType: false
},
connected: true
}
],
outputs: [
{
key: pluginOutputKey,
valueType: 'string',
label: pluginOutputKey,
type: 'source',
edit: true,
targets: []
}
]
},
{
moduleId: httpId,
name: HttpModule468.name,
intro: HttpModule468.intro,
avatar: HttpModule468.avatar,
flowType: HttpModule468.flowType,
showStatus: true,
position: {
x: 1042.549746602742,
y: -447.77496332641647
},
inputs: [
{
key: 'switch',
type: 'target',
label: 'core.module.input.label.switch',
description: 'core.module.input.description.Trigger',
valueType: 'any',
showTargetInApp: true,
showTargetInPlugin: true,
connected: false
},
{
key: 'system_httpMethod',
type: 'custom',
valueType: 'string',
label: '',
value: item.method.toUpperCase(),
required: true,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false
},
{
key: 'system_httpReqUrl',
type: 'hidden',
valueType: 'string',
label: '',
description: 'core.module.input.description.Http Request Url',
placeholder: 'https://api.ai.com/getInventory',
required: false,
showTargetInApp: false,
showTargetInPlugin: false,
value: requestUrl,
connected: false
},
{
key: 'system_httpHeader',
type: 'custom',
valueType: 'any',
value: httpNodeHeaders,
label: '',
description: 'core.module.input.description.Http Request Header',
placeholder: 'core.module.input.description.Http Request Header',
required: false,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false
},
{
key: 'system_httpParams',
type: 'hidden',
valueType: 'any',
value: httpNodeParams,
label: '',
required: false,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false
},
{
key: 'system_httpJsonBody',
type: 'hidden',
valueType: 'any',
value: httpNodeBody,
label: '',
required: false,
showTargetInApp: false,
showTargetInPlugin: false,
connected: false
},
{
key: 'DYNAMIC_INPUT_KEY',
type: 'target',
valueType: 'any',
label: 'core.module.inputType.dynamicTargetInput',
description: 'core.module.input.description.dynamic input',
required: false,
showTargetInApp: false,
showTargetInPlugin: true,
hideInApp: true,
connected: false
},
{
key: 'system_addInputParam',
type: 'addInputParam',
valueType: 'any',
label: '',
required: false,
showTargetInApp: false,
showTargetInPlugin: false,
editField: {
key: true,
description: true,
dataType: true
},
defaultEditField: {
label: '',
key: '',
description: '',
inputType: 'target',
valueType: 'string'
},
connected: false
},
...httpInputs
],
outputs: [
{
key: 'finish',
label: 'core.module.output.label.running done',
description: 'core.module.output.description.running done',
valueType: 'boolean',
type: 'source',
targets: []
},
{
key: 'httpRawResponse',
label: '原始响应',
description: 'HTTP请求的原始响应。只能接受字符串或JSON类型响应数据。',
valueType: 'any',
type: 'source',
targets: [
{
moduleId: pluginOutputId,
key: pluginOutputKey
}
]
},
{
key: 'system_addOutputParam',
type: 'addOutputParam',
valueType: 'any',
label: '',
targets: [],
editField: {
key: true,
description: true,
dataType: true,
defaultValue: true
},
defaultEditField: {
label: '',
key: '',
description: '',
outputType: 'source',
valueType: 'string'
}
}
]
}
];
return {
name: item.name,
avatar: HttpImgUrl,
intro: item.description,
parentId,
type: PluginTypeEnum.http,
modules
};
});
};

View File

@@ -1,6 +1,7 @@
import { ModuleTemplateTypeEnum } from 'core/module/constants';
import type { FlowModuleTemplateType, ModuleItemType } from '../module/type.d';
import { PluginSourceEnum } from './constants';
import { PluginSourceEnum, PluginTypeEnum } from './constants';
import { MethodType } from './controller';
export type PluginItemSchema = {
_id: string;
@@ -12,6 +13,13 @@ export type PluginItemSchema = {
intro: string;
updateTime: Date;
modules: ModuleItemType[];
parentId: string;
type: `${PluginTypeEnum}`;
metadata?: {
pluginUid?: string;
apiSchemaStr?: string;
customHeaders?: string;
};
};
/* plugin template */
@@ -19,7 +27,7 @@ export type PluginTemplateType = PluginRuntimeType & {
author?: string;
id: string;
source: `${PluginSourceEnum}`;
templateType: FlowModuleTemplateType['templateType'];
templateType: FlowNodeTemplateType['templateType'];
intro: string;
modules: ModuleItemType[];
};
@@ -29,5 +37,6 @@ export type PluginRuntimeType = {
name: string;
avatar: string;
showStatus?: boolean;
isTool?: boolean;
modules: ModuleItemType[];
};

View File

@@ -6,11 +6,15 @@
"dayjs": "^1.11.7",
"encoding": "^0.1.13",
"js-tiktoken": "^1.0.7",
"openapi-types": "^12.1.3",
"openai": "4.28.0",
"nanoid": "^4.0.1",
"timezones-list": "^3.0.2"
"js-yaml": "^4.1.0",
"timezones-list": "^3.0.2",
"next": "13.5.2"
},
"devDependencies": {
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.8.5"
}
}

View File

@@ -0,0 +1,15 @@
import { POST } from './plusRequest';
export const postTextCensor = (data: { text: string }) =>
POST<{ code?: number; message: string }>('/common/censor/text_baidu', data)
.then((res) => {
if (res?.code === 5000) {
return Promise.reject(res);
}
})
.catch((err) => {
if (err?.code === 5000) {
return Promise.reject(err.message);
}
return Promise.resolve('');
});

View File

@@ -0,0 +1,114 @@
import { SERVICE_LOCAL_HOST } from '../system/tools';
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
interface ConfigType {
headers?: { [key: string]: string };
hold?: boolean;
timeout?: number;
}
interface ResponseDataType {
code: number;
message: string;
data: any;
}
/**
* 请求开始
*/
function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
return config;
}
/**
* 请求成功,检查请求头
*/
function responseSuccess(response: AxiosResponse<ResponseDataType>) {
return response;
}
/**
* 响应数据检查
*/
function checkRes(data: ResponseDataType) {
if (data === undefined) {
console.log('error->', data, 'data is empty');
return Promise.reject('服务器异常');
} else if (data?.code && (data.code < 200 || data.code >= 400)) {
return Promise.reject(data);
}
return data.data;
}
/**
* 响应错误
*/
function responseError(err: any) {
if (!err) {
return Promise.reject({ message: '未知错误' });
}
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
if (err?.response?.data) {
return Promise.reject(err?.response?.data);
}
return Promise.reject(err);
}
/* 创建请求实例 */
const instance = axios.create({
timeout: 60000, // 超时时间
headers: {
'content-type': 'application/json',
'Cache-Control': 'no-cache'
}
});
/* 请求拦截 */
instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
/* 响应拦截 */
instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
export function request(url: string, data: any, config: ConfigType, method: Method): any {
/* 去空 */
for (const key in data) {
if (data[key] === null || data[key] === undefined) {
delete data[key];
}
}
return instance
.request({
baseURL: `http://${SERVICE_LOCAL_HOST}`,
url,
method,
data: ['POST', 'PUT'].includes(method) ? data : null,
params: !['POST', 'PUT'].includes(method) ? data : null,
...config // custom config
})
.then((res) => checkRes(res.data))
.catch((err) => responseError(err));
}
/**
* api请求方式
* @param {String} url
* @param {Any} params
* @param {Object} config
* @returns
*/
export function GET<T = undefined>(url: string, params = {}, config: ConfigType = {}): Promise<T> {
return request(url, params, config, 'GET');
}
export function POST<T = undefined>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'POST');
}
export function PUT<T = undefined>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'PUT');
}
export function DELETE<T = undefined>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'DELETE');
}

View File

@@ -1,4 +1,6 @@
export const stopWords = new Set([
import { cut } from '@node-rs/jieba';
const stopWords = new Set([
'--',
'?',
'“',
@@ -1506,3 +1508,14 @@ export const stopWords = new Set([
'i'
]
]);
export function jiebaSplit({ text }: { text: string }) {
const tokens = cut(text, true);
return (
tokens
.map((item) => item.replace(/[\u3000-\u303f\uff00-\uffef]/g, '').trim())
.filter((item) => item && !stopWords.has(item))
.join(' ') || ''
);
}

View File

@@ -139,11 +139,11 @@ export const embeddingRecall = async (
const results: any = await PgClient.query(
`BEGIN;
SET LOCAL hnsw.ef_search = ${efSearch};
select id, collection_id, (vector <#> '[${vectors[0]}]') * -1 AS score
select id, collection_id, vector <#> '[${vectors[0]}]' AS score
from ${PgDatasetTableName}
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
AND vector <#> '[${vectors[0]}]' < -${similarity}
order by score desc limit ${limit};
order by score limit ${limit};
COMMIT;`
);
@@ -153,7 +153,7 @@ export const embeddingRecall = async (
results: rows.map((item) => ({
id: item.id,
collectionId: item.collection_id,
score: item.score
score: item.score * -1
}))
};
} catch (error) {

View File

@@ -2,7 +2,7 @@ import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d'
import { getAIApi } from '../config';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
export const Prompt_QuestionGuide = `我不太清楚问你什么问题,请帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
export const Prompt_QuestionGuide = `你是一个AI智能助手可以回答和解决我的问题。请结合前面的对话记录帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
export async function createQuestionGuide({
messages,

View File

@@ -9,7 +9,7 @@ import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
可以根据上下文,消除指代性问题以及扩展问题,利于检索。
*/
const defaultPrompt = `作为一个向量检索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高向量检索的语义丰富度,提高向量检索的精度。生成的问题要求指向对象清晰明确,并与原问题语言相同。例如:
const defaultPrompt = `作为一个向量检索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高向量检索的语义丰富度,提高向量检索的精度。生成的问题要求指向对象清晰明确,并与原问题语言相同。例如:
历史记录:
"""
"""

View File

@@ -0,0 +1,34 @@
import { PostReRankProps, PostReRankResponse } from '@fastgpt/global/core/ai/api.d';
import { POST } from '../../../common/api/serverRequest';
export function reRankRecall({ query, inputs }: PostReRankProps) {
const model = global.reRankModels[0];
if (!model || !model?.requestUrl) {
return Promise.reject('no rerank model');
}
let start = Date.now();
return POST<PostReRankResponse>(
model.requestUrl,
{
query,
inputs
},
{
headers: {
Authorization: `Bearer ${model.requestAuth}`
},
timeout: 120000
}
)
.then((data) => {
console.log('rerank time:', Date.now() - start);
return data;
})
.catch((err) => {
console.log('rerank error:', err);
return [];
});
}

View File

@@ -34,10 +34,6 @@ const AppSchema = new Schema({
default: 'advanced',
enum: Object.keys(AppTypeMap)
},
simpleTemplateId: {
type: String,
required: true
},
avatar: {
type: String,
default: '/icon/logo.svg'

View File

@@ -8,6 +8,15 @@ import axios from 'axios';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
/* slice chat context by tokens */
const filterEmptyMessages = (messages: ChatCompletionMessageParam[]) => {
return messages.filter((item) => {
if (item.role === ChatCompletionRequestMessageRoleEnum.System) return !!item.content;
if (item.role === ChatCompletionRequestMessageRoleEnum.User) return !!item.content;
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant)
return !!item.content || !!item.function_call || !!item.tool_calls;
return true;
});
};
export function filterGPTMessageByMaxTokens({
messages = [],
maxTokens
@@ -38,7 +47,7 @@ export function filterGPTMessageByMaxTokens({
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
return messages;
return filterEmptyMessages(messages);
}
// filter startWith system prompt
@@ -81,7 +90,7 @@ export function filterGPTMessageByMaxTokens({
}
}
return [...systemPrompts, ...chats];
return filterEmptyMessages([...systemPrompts, ...chats]);
}
export const formatGPTMessagesInRequestBefore = (messages: ChatCompletionMessageParam[]) => {
return messages

View File

@@ -67,7 +67,7 @@ const DatasetSchema = new Schema({
agentModel: {
type: String,
required: true,
default: 'gpt-3.5-turbo-16k'
default: 'gpt-3.5-turbo'
},
intro: {
type: String,

View File

@@ -0,0 +1,407 @@
import {
DatasetSearchModeEnum,
DatasetSearchModeMap,
SearchScoreTypeEnum
} from '@fastgpt/global/core/dataset/constants';
import { recallFromVectorStore } from '../../../common/vectorStore/controller';
import { getVectorsByText } from '../../ai/embedding';
import { getVectorModel } from '../../ai/model';
import { MongoDatasetData } from '../data/schema';
import {
DatasetDataSchemaType,
DatasetDataWithCollectionType,
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type';
import { MongoDatasetCollection } from '../collection/schema';
import { reRankRecall } from '../../../core/ai/rerank';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { jiebaSplit } from '../../../common/string/jieba';
type SearchDatasetDataProps = {
teamId: string;
model: string;
similarity?: number; // min distance
limit: number; // max Token limit
datasetIds: string[];
searchMode?: `${DatasetSearchModeEnum}`;
usingReRank?: boolean;
reRankQuery: string;
queries: string[];
};
export async function searchDatasetData(props: SearchDatasetDataProps) {
let {
teamId,
reRankQuery,
queries,
model,
similarity = 0,
limit: maxTokens,
searchMode = DatasetSearchModeEnum.embedding,
usingReRank = false,
datasetIds = []
} = props;
/* init params */
searchMode = DatasetSearchModeMap[searchMode] ? searchMode : DatasetSearchModeEnum.embedding;
usingReRank = usingReRank && global.reRankModels.length > 0;
// Compatible with topk limit
if (maxTokens < 50) {
maxTokens = 1500;
}
let set = new Set<string>();
let usingSimilarityFilter = false;
/* function */
const countRecallLimit = () => {
if (searchMode === DatasetSearchModeEnum.embedding) {
return {
embeddingLimit: 150,
fullTextLimit: 0
};
}
if (searchMode === DatasetSearchModeEnum.fullTextRecall) {
return {
embeddingLimit: 0,
fullTextLimit: 150
};
}
return {
embeddingLimit: 100,
fullTextLimit: 80
};
};
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
const { vectors, tokens } = await getVectorsByText({
model: getVectorModel(model),
input: query
});
const { results } = await recallFromVectorStore({
vectors,
limit,
datasetIds,
efSearch: global.systemEnv?.pgHNSWEfSearch
});
// get q and a
const dataList = (await MongoDatasetData.find(
{
teamId,
datasetId: { $in: datasetIds },
'indexes.dataId': { $in: results.map((item) => item.id?.trim()) }
},
'datasetId collectionId q a chunkIndex indexes'
)
.populate('collectionId', 'name fileId rawLink')
.lean()) as DatasetDataWithCollectionType[];
// add score to data(It's already sorted. The first one is the one with the most points)
const concatResults = dataList.map((data) => {
const dataIdList = data.indexes.map((item) => item.dataId);
const maxScoreResult = results.find((item) => {
return dataIdList.includes(item.id);
});
return {
...data,
score: maxScoreResult?.score || 0
};
});
concatResults.sort((a, b) => b.score - a.score);
const formatResult = concatResults
.map((data, index) => {
if (!data.collectionId) {
console.log('Collection is not found', data);
}
const result: SearchDataResponseItemType = {
id: String(data._id),
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId?._id),
sourceName: data.collectionId?.name || '',
sourceId: data.collectionId?.fileId || data.collectionId?.rawLink,
score: [{ type: SearchScoreTypeEnum.embedding, value: data.score, index }]
};
return result;
})
.filter((item) => item !== null) as SearchDataResponseItemType[];
return {
embeddingRecallResults: formatResult,
tokens
};
};
const fullTextRecall = async ({
query,
limit
}: {
query: string;
limit: number;
}): Promise<{
fullTextRecallResults: SearchDataResponseItemType[];
tokenLen: number;
}> => {
if (limit === 0) {
return {
fullTextRecallResults: [],
tokenLen: 0
};
}
let searchResults = (
await Promise.all(
datasetIds.map((id) =>
MongoDatasetData.find(
{
teamId,
datasetId: id,
$text: { $search: jiebaSplit({ text: query }) }
},
{
score: { $meta: 'textScore' },
_id: 1,
datasetId: 1,
collectionId: 1,
q: 1,
a: 1,
chunkIndex: 1
}
)
.sort({ score: { $meta: 'textScore' } })
.limit(limit)
.lean()
)
)
).flat() as (DatasetDataSchemaType & { score: number })[];
// resort
searchResults.sort((a, b) => b.score - a.score);
searchResults.slice(0, limit);
const collections = await MongoDatasetCollection.find(
{
_id: { $in: searchResults.map((item) => item.collectionId) }
},
'_id name fileId rawLink'
);
return {
fullTextRecallResults: searchResults.map((item, index) => {
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
return {
id: String(item._id),
datasetId: String(item.datasetId),
collectionId: String(item.collectionId),
sourceName: collection?.name || '',
sourceId: collection?.fileId || collection?.rawLink,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex,
indexes: item.indexes,
score: [{ type: SearchScoreTypeEnum.fullText, value: item.score, index }]
};
}),
tokenLen: 0
};
};
const reRankSearchResult = async ({
data,
query
}: {
data: SearchDataResponseItemType[];
query: string;
}): Promise<SearchDataResponseItemType[]> => {
try {
const results = await reRankRecall({
query,
inputs: data.map((item) => ({
id: item.id,
text: `${item.q}\n${item.a}`
}))
});
if (results.length === 0) {
usingReRank = false;
return [];
}
// add new score to data
const mergeResult = results
.map((item, index) => {
const target = data.find((dataItem) => dataItem.id === item.id);
if (!target) return null;
const score = item.score || 0;
return {
...target,
score: [{ type: SearchScoreTypeEnum.reRank, value: score, index }]
};
})
.filter(Boolean) as SearchDataResponseItemType[];
return mergeResult;
} catch (error) {
usingReRank = false;
return [];
}
};
const filterResultsByMaxTokens = (list: SearchDataResponseItemType[], maxTokens: number) => {
const results: SearchDataResponseItemType[] = [];
let totalTokens = 0;
for (let i = 0; i < list.length; i++) {
const item = list[i];
totalTokens += countPromptTokens(item.q + item.a);
if (totalTokens > maxTokens + 500) {
break;
}
results.push(item);
if (totalTokens > maxTokens) {
break;
}
}
return results.length === 0 ? list.slice(0, 1) : results;
};
const multiQueryRecall = async ({
embeddingLimit,
fullTextLimit
}: {
embeddingLimit: number;
fullTextLimit: number;
}) => {
// multi query recall
const embeddingRecallResList: SearchDataResponseItemType[][] = [];
const fullTextRecallResList: SearchDataResponseItemType[][] = [];
let totalTokens = 0;
await Promise.all(
queries.map(async (query) => {
const [{ tokens, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
embeddingRecall({
query,
limit: embeddingLimit
}),
fullTextRecall({
query,
limit: fullTextLimit
})
]);
totalTokens += tokens;
embeddingRecallResList.push(embeddingRecallResults);
fullTextRecallResList.push(fullTextRecallResults);
})
);
// rrf concat
const rrfEmbRecall = datasetSearchResultConcat(
embeddingRecallResList.map((list) => ({ k: 60, list }))
).slice(0, embeddingLimit);
const rrfFTRecall = datasetSearchResultConcat(
fullTextRecallResList.map((list) => ({ k: 60, list }))
).slice(0, fullTextLimit);
return {
tokens: totalTokens,
embeddingRecallResults: rrfEmbRecall,
fullTextRecallResults: rrfFTRecall
};
};
/* main step */
// count limit
const { embeddingLimit, fullTextLimit } = countRecallLimit();
// recall
const { embeddingRecallResults, fullTextRecallResults, tokens } = await multiQueryRecall({
embeddingLimit,
fullTextLimit
});
// ReRank results
const reRankResults = await (async () => {
if (!usingReRank) return [];
set = new Set<string>(embeddingRecallResults.map((item) => item.id));
const concatRecallResults = embeddingRecallResults.concat(
fullTextRecallResults.filter((item) => !set.has(item.id))
);
// remove same q and a data
set = new Set<string>();
const filterSameDataResults = concatRecallResults.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(`${item.q}${item.a}`.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
return reRankSearchResult({
query: reRankQuery,
data: filterSameDataResults
});
})();
// embedding recall and fullText recall rrf concat
const rrfConcatResults = datasetSearchResultConcat([
{ k: 60, list: embeddingRecallResults },
{ k: 60, list: fullTextRecallResults },
{ k: 58, list: reRankResults }
]);
// remove same q and a data
set = new Set<string>();
const filterSameDataResults = rrfConcatResults.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(`${item.q}${item.a}`.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
// score filter
const scoreFilter = (() => {
if (usingReRank) {
usingSimilarityFilter = true;
return filterSameDataResults.filter((item) => {
const reRankScore = item.score.find((item) => item.type === SearchScoreTypeEnum.reRank);
if (reRankScore && reRankScore.value < similarity) return false;
return true;
});
}
if (searchMode === DatasetSearchModeEnum.embedding) {
usingSimilarityFilter = true;
return filterSameDataResults.filter((item) => {
const embeddingScore = item.score.find(
(item) => item.type === SearchScoreTypeEnum.embedding
);
if (embeddingScore && embeddingScore.value < similarity) return false;
return true;
});
}
return filterSameDataResults;
})();
return {
searchRes: filterResultsByMaxTokens(scoreFilter, maxTokens),
tokens,
searchMode,
limit: maxTokens,
similarity,
usingReRank,
usingSimilarityFilter
};
}

View File

@@ -31,16 +31,14 @@ export async function pushDataListToTrainingQueue({
data,
prompt,
billId,
trainingMode = TrainingModeEnum.chunk,
vectorModelList = [],
datasetModelList = []
trainingMode = TrainingModeEnum.chunk
}: {
teamId: string;
tmbId: string;
vectorModelList: VectorModelItemType[];
datasetModelList: LLMModelItemType[];
} & PushDatasetDataProps): Promise<PushDatasetDataResponse> {
const vectorModelList = global.vectorModels;
const datasetModelList = global.llmModels;
const {
datasetId: { _id: datasetId, vectorModel, agentModel }
} = await getCollectionWithDataset(collectionId);
@@ -48,11 +46,11 @@ export async function pushDataListToTrainingQueue({
const checkModelValid = async () => {
const agentModelData = datasetModelList?.find((item) => item.model === agentModel);
if (!agentModelData) {
return Promise.reject(`Vector model ${agentModel} is inValid`);
return Promise.reject(`File model ${agentModel} is inValid`);
}
const vectorModelData = vectorModelList?.find((item) => item.model === vectorModel);
if (!vectorModelData) {
return Promise.reject(`File model ${vectorModel} is inValid`);
return Promise.reject(`Vector model ${vectorModel} is inValid`);
}
if (trainingMode === TrainingModeEnum.chunk) {

View File

@@ -13,11 +13,6 @@ import {
export const DatasetTrainingCollectionName = 'dataset.trainings';
const TrainingDataSchema = new Schema({
userId: {
// abandon
type: Schema.Types.ObjectId,
ref: 'user'
},
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
@@ -100,7 +95,7 @@ try {
// lock training data; delete training data
TrainingDataSchema.index({ teamId: 1, collectionId: 1 });
// get training data and sort
TrainingDataSchema.index({ lockTime: 1, mode: 1, weight: -1 });
TrainingDataSchema.index({ mode: 1, lockTime: 1, weight: -1 });
TrainingDataSchema.index({ expireAt: 1 }, { expireAfterSeconds: 7 * 24 * 60 * 60 }); // 7 days
} catch (error) {
console.log(error);

View File

@@ -1,10 +1,11 @@
import { MongoPlugin } from './schema';
import { FlowModuleTemplateType } from '@fastgpt/global/core/module/type';
import { FlowNodeTemplateType } from '@fastgpt/global/core/module/type';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { plugin2ModuleIO } from '@fastgpt/global/core/module/utils';
import { PluginSourceEnum } from '@fastgpt/global/core/plugin/constants';
import type { PluginRuntimeType, PluginTemplateType } from '@fastgpt/global/core/plugin/type.d';
import { ModuleTemplateTypeEnum } from '@fastgpt/global/core/module/constants';
import { FlowNodeTemplateTypeEnum } from '@fastgpt/global/core/module/constants';
import type { PluginItemSchema } from '@fastgpt/global/core/plugin/type.d';
/*
plugin id rule:
@@ -48,7 +49,7 @@ const getPluginTemplateById = async (id: string): Promise<PluginTemplateType> =>
showStatus: true,
source: PluginSourceEnum.personal,
modules: item.modules,
templateType: ModuleTemplateTypeEnum.personalPlugin
templateType: FlowNodeTemplateTypeEnum.personalPlugin
};
}
return Promise.reject('plugin not found');
@@ -59,7 +60,7 @@ export async function getPluginPreviewModule({
id
}: {
id: string;
}): Promise<FlowModuleTemplateType> {
}): Promise<FlowNodeTemplateType> {
const plugin = await getPluginTemplateById(id);
return {
@@ -70,6 +71,7 @@ export async function getPluginPreviewModule({
name: plugin.name,
intro: plugin.intro,
showStatus: plugin.showStatus,
isTool: plugin.isTool,
...plugin2ModuleIO(plugin.id, plugin.modules)
};
}

View File

@@ -1,3 +1,4 @@
import { pluginTypeMap } from '@fastgpt/global/core/plugin/constants';
import { connectionMongo, type Model } from '../../common/mongo';
const { Schema, model, models } = connectionMongo;
import type { PluginItemSchema } from '@fastgpt/global/core/plugin/type.d';
@@ -9,9 +10,10 @@ import {
export const PluginCollectionName = 'plugins';
const PluginSchema = new Schema({
userId: {
parentId: {
type: Schema.Types.ObjectId,
ref: 'user'
ref: PluginCollectionName,
default: null
},
teamId: {
type: Schema.Types.ObjectId,
@@ -23,6 +25,11 @@ const PluginSchema = new Schema({
ref: TeamMemberCollectionName,
required: true
},
type: {
type: String,
enum: Object.keys(pluginTypeMap),
required: true
},
name: {
type: String,
required: true
@@ -42,11 +49,19 @@ const PluginSchema = new Schema({
modules: {
type: Array,
default: []
},
metadata: {
type: {
pluginUid: String,
apiSchemaStr: String,
customHeaders: String
}
}
});
try {
PluginSchema.index({ tmbId: 1 });
PluginSchema.index({ teamId: 1, parentId: 1 });
PluginSchema.index({ teamId: 1, name: 1, intro: 1 });
} catch (error) {
console.log(error);
}

View File

@@ -0,0 +1,319 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { filterGPTMessageByMaxTokens } from '../../../chat/utils';
import {
countGptMessagesTokens,
countMessagesTokens
} from '@fastgpt/global/common/string/tiktoken';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '../../../ai/config';
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@fastgpt/global/core/ai/prompt/agent';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getLLMModel } from '../../../ai/model';
import { getHistories } from '../utils';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import {
ChatCompletionCreateParams,
ChatCompletionMessageParam,
ChatCompletionTool
} from '@fastgpt/global/core/ai/type';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
}>;
type CQResponse = DispatchNodeResultType<{
[key: string]: any;
}>;
type ActionProps = Props & { cqModel: LLMModelItemType };
const agentFunName = 'classify_question';
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
user,
module: { name },
histories,
params: { model, history = 6, agents, userChatInput }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const cqModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { arg, tokens } = await (async () => {
if (cqModel.toolChoice) {
return toolChoice({
...props,
histories: chatHistories,
cqModel
});
}
if (cqModel.functionCall) {
return functionCall({
...props,
histories: chatHistories,
cqModel
});
}
return completions({
...props,
histories: chatHistories,
cqModel
});
})();
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
const { totalPoints, modelName } = formatModelChars2Points({
model: cqModel.model,
tokens,
modelType: ModelTypeEnum.llm
});
return {
[result.key]: true,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: userChatInput,
tokens,
cqList: agents,
cqResult: result.value,
contextTotalLen: chatHistories.length + 2
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
tokens
}
]
};
};
const getFunctionCallSchema = ({
cqModel,
histories,
params: { agents, systemPrompt, userChatInput }
}: ActionProps) => {
const messages: ChatItemType[] = [
...histories,
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: systemPrompt
? `<背景知识>
${systemPrompt}
</背景知识>
问题: "${userChatInput}"
`
: userChatInput
}
}
]
}
];
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
const filterMessages = filterGPTMessageByMaxTokens({
messages: adaptMessages,
maxTokens: cqModel.maxContext
});
// function body
const agentFunction = {
name: agentFunName,
description: '结合对话记录及背景知识,对问题进行分类,并返回对应的类型字段',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: `问题类型。下面是几种可选的问题类型: ${agents
.map((item) => `${item.value},返回:'${item.key}'`)
.join('')}`,
enum: agents.map((item) => item.key)
}
},
required: ['type']
}
};
return {
agentFunction,
filterMessages
};
};
const toolChoice = async (props: ActionProps) => {
const { user, cqModel } = props;
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
// function body
const tools: ChatCompletionTool[] = [
{
type: 'function',
function: agentFunction
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
messages: filterMessages,
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
});
try {
const arg = JSON.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
);
const completeMessages: ChatCompletionMessageParam[] = [
...filterMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls: response.choices?.[0]?.message?.tool_calls
}
];
return {
arg,
tokens: countGptMessagesTokens(completeMessages, tools)
};
} catch (error) {
console.log(response.choices?.[0]?.message);
console.log('Your model may not support toll_call', error);
return {
arg: {},
tokens: 0
};
}
};
const functionCall = async (props: ActionProps) => {
const { user, cqModel } = props;
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
const functions: ChatCompletionCreateParams.Function[] = [agentFunction];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
messages: filterMessages,
function_call: {
name: agentFunName
},
functions
});
try {
const arg = JSON.parse(response?.choices?.[0]?.message?.function_call?.arguments || '');
const completeMessages: ChatCompletionMessageParam[] = [
...filterMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: response.choices?.[0]?.message?.function_call
}
];
return {
arg,
tokens: countGptMessagesTokens(completeMessages, undefined, functions)
};
} catch (error) {
console.log(response.choices?.[0]?.message);
console.log('Your model may not support toll_call', error);
return {
arg: {},
tokens: 0
};
}
};
const completions = async ({
cqModel,
user,
histories,
params: { agents, systemPrompt = '', userChatInput }
}: ActionProps) => {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
systemPrompt: systemPrompt || 'null',
typeList: agents
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
.join('\n'),
history: histories
.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`)
.join('\n'),
question: userChatInput
})
}
}
]
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const data = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0.01,
messages: chats2GPTMessages({ messages, reserveId: false }),
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const id =
agents.find((item) => answer.includes(item.key) || answer.includes(item.value))?.key || '';
return {
tokens: countMessagesTokens(messages),
arg: { type: id }
};
};

View File

@@ -0,0 +1,382 @@
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { filterGPTMessageByMaxTokens } from '../../../chat/utils';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import {
countGptMessagesTokens,
countMessagesTokens
} from '@fastgpt/global/common/string/tiktoken';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '../../../ai/config';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { Prompt_ExtractJson } from '@fastgpt/global/core/ai/prompt/agent';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getHistories } from '../utils';
import { ModelTypeEnum, getLLMModel } from '../../../ai/model';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import json5 from 'json5';
import {
ChatCompletionCreateParams,
ChatCompletionMessageParam,
ChatCompletionTool
} from '@fastgpt/global/core/ai/type';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.contextExtractInput]: string;
[ModuleInputKeyEnum.extractKeys]: ContextExtractAgentItemType[];
[ModuleInputKeyEnum.description]: string;
[ModuleInputKeyEnum.aiModel]: string;
}>;
type Response = DispatchNodeResultType<{
[ModuleOutputKeyEnum.success]?: boolean;
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.contextExtractFields]: string;
}>;
type ActionProps = Props & { extractModel: LLMModelItemType };
const agentFunName = 'request_function';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
user,
module: { name },
histories,
params: { content, history = 6, model, description, extractKeys }
} = props;
if (!content) {
return Promise.reject('Input is empty');
}
const extractModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { arg, tokens } = await (async () => {
if (extractModel.toolChoice) {
return toolChoice({
...props,
histories: chatHistories,
extractModel
});
}
if (extractModel.functionCall) {
return functionCall({
...props,
histories: chatHistories,
extractModel
});
}
return completions({
...props,
histories: chatHistories,
extractModel
});
})();
// remove invalid key
for (let key in arg) {
const item = extractKeys.find((item) => item.key === key);
if (!item) {
delete arg[key];
}
if (arg[key] === '') {
delete arg[key];
}
}
// auto fill required fields
extractKeys.forEach((item) => {
if (item.required && !arg[item.key]) {
arg[item.key] = item.defaultValue || '';
}
});
// auth fields
let success = !extractKeys.find((item) => !(item.key in arg));
// auth empty value
if (success) {
for (const key in arg) {
const item = extractKeys.find((item) => item.key === key);
if (!item) {
success = false;
break;
}
}
}
const { totalPoints, modelName } = formatModelChars2Points({
model: extractModel.model,
tokens,
modelType: ModelTypeEnum.llm
});
return {
[ModuleOutputKeyEnum.success]: success ? true : undefined,
[ModuleOutputKeyEnum.failed]: success ? undefined : true,
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
...arg,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: content,
tokens,
extractDescription: description,
extractResult: arg,
contextTotalLen: chatHistories.length + 2
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
tokens
}
]
};
}
const getFunctionCallSchema = ({
extractModel,
histories,
params: { content, extractKeys, description }
}: ActionProps) => {
const messages: ChatItemType[] = [
...histories,
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: `我正在执行一个函数,需要你提供一些参数,请以 JSON 字符串格式返回这些参数,要求:
"""
${description ? `- ${description}` : ''}
- 不是每个参数都是必须生成的,如果没有合适的参数值,不要生成该参数,或返回空字符串。
- 需要结合前面的对话内容,一起生成合适的参数。
"""
本次输入内容: ${content}
`
}
}
]
}
];
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
const filterMessages = filterGPTMessageByMaxTokens({
messages: adaptMessages,
maxTokens: extractModel.maxContext
});
const properties: Record<
string,
{
type: string;
description: string;
}
> = {};
extractKeys.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.desc,
...(item.enum ? { enum: item.enum.split('\n') } : {})
};
});
// function body
const agentFunction = {
name: agentFunName,
description: '需要执行的函数',
parameters: {
type: 'object',
properties
}
};
return {
filterMessages,
agentFunction
};
};
const toolChoice = async (props: ActionProps) => {
const { user, extractModel } = props;
const { filterMessages, agentFunction } = getFunctionCallSchema(props);
const tools: ChatCompletionTool[] = [
{
type: 'function',
function: agentFunction
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const response = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0,
messages: filterMessages,
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
});
const arg: Record<string, any> = (() => {
try {
return json5.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '{}'
);
} catch (error) {
console.log(agentFunction.parameters);
console.log(response.choices?.[0]?.message?.tool_calls?.[0]?.function);
console.log('Your model may not support tool_call', error);
return {};
}
})();
const completeMessages: ChatCompletionMessageParam[] = [
...filterMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls: response.choices?.[0]?.message?.tool_calls
}
];
return {
tokens: countGptMessagesTokens(completeMessages, tools),
arg
};
};
const functionCall = async (props: ActionProps) => {
const { user, extractModel } = props;
const { agentFunction, filterMessages } = getFunctionCallSchema(props);
const functions: ChatCompletionCreateParams.Function[] = [agentFunction];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const response = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0,
messages: filterMessages,
function_call: {
name: agentFunName
},
functions
});
try {
const arg = JSON.parse(response?.choices?.[0]?.message?.function_call?.arguments || '');
const completeMessages: ChatCompletionMessageParam[] = [
...filterMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: response.choices?.[0]?.message?.function_call
}
];
return {
arg,
tokens: countGptMessagesTokens(completeMessages, undefined, functions)
};
} catch (error) {
console.log(response.choices?.[0]?.message);
console.log('Your model may not support toll_call', error);
return {
arg: {},
tokens: 0
};
}
};
const completions = async ({
extractModel,
user,
histories,
params: { content, extractKeys, description }
}: ActionProps) => {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: replaceVariable(extractModel.customExtractPrompt || Prompt_ExtractJson, {
description,
json: extractKeys
.map(
(item) =>
`{"key":"${item.key}", "description":"${item.desc}"${
item.enum ? `, "enum":"[${item.enum.split('\n')}]"` : ''
}}`
)
.join('\n'),
text: `${histories.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`).join('\n')}
Human: ${content}`
})
}
}
]
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const data = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0.01,
messages: chats2GPTMessages({ messages, reserveId: false }),
stream: false
});
const answer = data.choices?.[0].message?.content || '';
// parse response
const start = answer.indexOf('{');
const end = answer.lastIndexOf('}');
if (start === -1 || end === -1) {
return {
rawResponse: answer,
tokens: countMessagesTokens(messages),
arg: {}
};
}
const jsonStr = answer
.substring(start, end + 1)
.replace(/(\\n|\\)/g, '')
.replace(/ /g, '');
try {
return {
rawResponse: answer,
tokens: countMessagesTokens(messages),
arg: json5.parse(jsonStr) as Record<string, any>
};
} catch (error) {
console.log(error);
return {
rawResponse: answer,
tokens: countMessagesTokens(messages),
arg: {}
};
}
};

View File

@@ -0,0 +1,36 @@
export const Prompt_Tool_Call = `<Instruction>
你是一个智能机器人,除了可以回答用户问题外,你还掌握工具的使用能力。有时候,你可以依赖工具的运行结果,来更准确的回答用户。
下面是你可以使用的工具,使用 JSON Schema 的格式声明,其中 toolId 是工具的 description 是工具的描述parameters 是工具的参数包括参数的类型和描述required 是必填参数的列表。
"""
{{toolsPrompt}}
"""
接下来请你根据工具描述决定回答问题或是使用工具。在完成任务过程中USER代表用户的输入TOOL_RESPONSE代表工具运行结果。ASSISTANT 代表你的输出。
你的每次输出都必须以0,1开头代表是否需要调用工具
0: 不使用工具,直接回答内容。
1: 使用工具,返回工具调用的参数。
例如:
USER: 你好呀
ANSWER: 0: 你好,有什么可以帮助你的么?
USER: 今天杭州的天气如何
ANSWER: 1: {"toolId":"w2121",arguments:{"city": "杭州"}}
TOOL_RESPONSE: """
晴天......
"""
ANSWER: 0: 今天杭州是晴天。
USER: 今天杭州的天气适合去哪里玩?
ANSWER: 1: {"toolId":"as21da",arguments:{"query": "杭州 天气 去哪里玩"}}
TOOL_RESPONSE: """
晴天. 西湖、灵隐寺、千岛湖……
"""
ANSWER: 0: 今天杭州是晴天,适合去西湖、灵隐寺、千岛湖等地玩。
</Instruction>
现在,我们开始吧!
USER: {{question}}
ANSWER:
`;

View File

@@ -0,0 +1,409 @@
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getAIApi } from '../../../../ai/config';
import { filterGPTMessageByMaxTokens } from '../../../../chat/utils';
import {
ChatCompletion,
StreamChatType,
ChatCompletionMessageParam,
ChatCompletionCreateParams,
ChatCompletionMessageFunctionCall,
ChatCompletionFunctionMessageParam,
ChatCompletionAssistantMessageParam
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import {
responseWrite,
responseWriteController,
responseWriteNodeStatus
} from '../../../../../common/response';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import { dispatchWorkFlow } from '../../index';
import { DispatchToolModuleProps, RunToolResponse, ToolModuleItemType } from './type.d';
import json5 from 'json5';
import { DispatchFlowResponse } from '../../type';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { AIChatItemType, AIChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
type FunctionRunResponseType = {
moduleRunResponse: DispatchFlowResponse;
functionCallMsg: ChatCompletionFunctionMessageParam;
}[];
export const runToolWithFunctionCall = async (
props: DispatchToolModuleProps & {
messages: ChatCompletionMessageParam[];
toolModules: ToolModuleItemType[];
toolModel: LLMModelItemType;
},
response?: RunToolResponse
): Promise<RunToolResponse> => {
const {
toolModel,
toolModules,
messages,
res,
runtimeModules,
detail = false,
module,
stream
} = props;
const assistantResponses = response?.assistantResponses || [];
const functions: ChatCompletionCreateParams.Function[] = toolModules.map((module) => {
const properties: Record<
string,
{
type: string;
description: string;
required?: boolean;
}
> = {};
module.toolParams.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.toolDescription || ''
};
});
return {
name: module.moduleId,
description: module.intro,
parameters: {
type: 'object',
properties,
required: module.toolParams.filter((item) => item.required).map((item) => item.key)
}
};
});
const filterMessages = filterGPTMessageByMaxTokens({
messages,
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
});
/* Run llm */
const ai = getAIApi({
timeout: 480000
});
const aiResponse = await ai.chat.completions.create(
{
...toolModel?.defaultConfig,
model: toolModel.model,
temperature: 0,
stream,
messages: filterMessages,
functions,
function_call: 'auto'
},
{
headers: {
Accept: 'application/json, text/plain, */*'
}
}
);
const { answer, functionCalls } = await (async () => {
if (stream) {
return streamResponse({
res,
detail,
toolModules,
stream: aiResponse
});
} else {
const result = aiResponse as ChatCompletion;
const function_call = result.choices?.[0]?.message?.function_call;
const toolModule = toolModules.find((module) => module.moduleId === function_call?.name);
const toolCalls = function_call
? [
{
...function_call,
id: getNanoid(),
toolName: toolModule?.name,
toolAvatar: toolModule?.avatar
}
]
: [];
return {
answer: result.choices?.[0]?.message?.content || '',
functionCalls: toolCalls
};
}
})();
// Run the selected tool.
const toolsRunResponse = (
await Promise.all(
functionCalls.map(async (tool) => {
if (!tool) return;
const toolModule = toolModules.find((module) => module.moduleId === tool.name);
if (!toolModule) return;
const startParams = (() => {
try {
return json5.parse(tool.arguments);
} catch (error) {
return {};
}
})();
const moduleRunResponse = await dispatchWorkFlow({
...props,
runtimeModules: runtimeModules.map((module) => ({
...module,
isEntry: module.moduleId === toolModule.moduleId
})),
startParams
});
const stringToolResponse = (() => {
if (typeof moduleRunResponse.toolResponses === 'object') {
return JSON.stringify(moduleRunResponse.toolResponses, null, 2);
}
return moduleRunResponse.toolResponses ? String(moduleRunResponse.toolResponses) : 'none';
})();
const functionCallMsg: ChatCompletionFunctionMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Function,
name: tool.name,
content: stringToolResponse
};
if (stream && detail) {
responseWrite({
res,
event: SseResponseEventEnum.toolResponse,
data: JSON.stringify({
tool: {
id: tool.id,
toolName: '',
toolAvatar: '',
params: '',
response: stringToolResponse
}
})
});
}
return {
moduleRunResponse,
functionCallMsg
};
})
)
).filter(Boolean) as FunctionRunResponseType;
const flatToolsResponseData = toolsRunResponse.map((item) => item.moduleRunResponse).flat();
const functionCall = functionCalls[0];
if (functionCall && !res.closed) {
// Run the tool, combine its results, and perform another round of AI calls
const assistantToolMsgParams: ChatCompletionAssistantMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: functionCall
};
const concatToolMessages = [
...filterMessages,
assistantToolMsgParams
] as ChatCompletionMessageParam[];
const tokens = countGptMessagesTokens(concatToolMessages, undefined, functions);
const completeMessages = [
...concatToolMessages,
...toolsRunResponse.map((item) => item?.functionCallMsg)
];
// console.log(tokens, 'tool');
if (stream && detail) {
responseWriteNodeStatus({
res,
name: module.name
});
}
// tool assistant
const toolAssistants = toolsRunResponse
.map((item) => {
const assistantResponses = item.moduleRunResponse.assistantResponses || [];
return assistantResponses;
})
.flat();
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistants = [
...assistantResponses,
...toolAssistants,
...toolNodeAssistant.value
];
// concat tool responses
const dispatchFlowResponse = response
? response.dispatchFlowResponse.concat(flatToolsResponseData)
: flatToolsResponseData;
/* check stop signal */
const hasStopSignal = flatToolsResponseData.some(
(item) => !!item.flowResponses?.find((item) => item.toolStop)
);
if (hasStopSignal) {
return {
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages: filterMessages,
assistantResponses: toolNodeAssistants
};
}
return runToolWithFunctionCall(
{
...props,
messages: completeMessages
},
{
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
assistantResponses: toolNodeAssistants
}
);
} else {
// No tool is invoked, indicating that the process is over
const gptAssistantResponse: ChatCompletionAssistantMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: answer
};
const completeMessages = filterMessages.concat(gptAssistantResponse);
const tokens = countGptMessagesTokens(completeMessages, undefined, functions);
// console.log(tokens, 'response token');
// concat tool assistant
const toolNodeAssistant = GPTMessages2Chats([gptAssistantResponse])[0] as AIChatItemType;
return {
dispatchFlowResponse: response?.dispatchFlowResponse || [],
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages,
assistantResponses: [...assistantResponses, ...toolNodeAssistant.value]
};
}
};
async function streamResponse({
res,
detail,
toolModules,
stream
}: {
res: NextApiResponse;
detail: boolean;
toolModules: ToolModuleItemType[];
stream: StreamChatType;
}) {
const write = responseWriteController({
res,
readStream: stream
});
let textAnswer = '';
let functionCalls: ChatCompletionMessageFunctionCall[] = [];
let functionId = getNanoid();
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
break;
}
const responseChoice = part.choices?.[0]?.delta;
if (responseChoice.content) {
const content = responseChoice?.content || '';
textAnswer += content;
responseWrite({
write,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
} else if (responseChoice.function_call) {
const functionCall: {
arguments: string;
name?: string;
} = responseChoice.function_call;
// 流响应中,每次只会返回一个函数如果带了name说明触发某个函数
if (functionCall?.name) {
functionId = getNanoid();
const toolModule = toolModules.find((module) => module.moduleId === functionCall?.name);
if (toolModule) {
if (functionCall?.arguments === undefined) {
functionCall.arguments = '';
}
functionCalls.push({
...functionCall,
id: functionId,
name: functionCall.name,
toolName: toolModule.name,
toolAvatar: toolModule.avatar
});
if (detail) {
responseWrite({
write,
event: SseResponseEventEnum.toolCall,
data: JSON.stringify({
tool: {
id: functionId,
toolName: toolModule.name,
toolAvatar: toolModule.avatar,
functionName: functionCall.name,
params: functionCall.arguments,
response: ''
}
})
});
}
}
}
/* arg 插入最后一个工具的参数里 */
const arg: string = functionCall?.arguments || '';
const currentTool = functionCalls[functionCalls.length - 1];
if (currentTool) {
currentTool.arguments += arg;
if (detail) {
responseWrite({
write,
event: SseResponseEventEnum.toolParams,
data: JSON.stringify({
tool: {
id: functionId,
toolName: '',
toolAvatar: '',
params: arg,
response: ''
}
})
});
}
}
}
}
if (!textAnswer && functionCalls.length === 0) {
return Promise.reject('LLM api response empty');
}
return { answer: textAnswer, functionCalls };
}

View File

@@ -0,0 +1,157 @@
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import type {
DispatchNodeResultType,
RunningModuleItemType
} from '@fastgpt/global/core/module/runtime/type';
import { ModelTypeEnum, getLLMModel } from '../../../../ai/model';
import { getHistories } from '../../utils';
import { runToolWithToolChoice } from './toolChoice';
import { DispatchToolModuleProps, ToolModuleItemType } from './type.d';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import {
GPTMessages2Chats,
chats2GPTMessages,
getSystemPrompt,
runtimePrompt2ChatsValue
} from '@fastgpt/global/core/chat/adapt';
import { formatModelChars2Points } from '../../../../../support/wallet/usage/utils';
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
import { runToolWithFunctionCall } from './functionCall';
import { runToolWithPromptCall } from './promptCall';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_Tool_Call } from './constants';
type Response = DispatchNodeResultType<{}>;
export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<Response> => {
const {
module: { name, outputs },
runtimeModules,
histories,
params: { model, systemPrompt, userChatInput, history = 6 }
} = props;
const toolModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
/* get tool params */
// get tool output targets
const toolOutput = outputs.find((output) => output.key === ModuleOutputKeyEnum.selectedTools);
if (!toolOutput) {
return Promise.reject('No tool output found');
}
const targets = toolOutput.targets;
// Gets the module to which the tool is connected
const toolModules = targets
.map((item) => {
const tool = runtimeModules.find((module) => module.moduleId === item.moduleId);
return tool;
})
.filter(Boolean)
.map<ToolModuleItemType>((tool) => {
const toolParams = tool?.inputs.filter((input) => !!input.toolDescription) || [];
return {
...(tool as RunningModuleItemType),
toolParams
};
});
const messages: ChatItemType[] = [
...getSystemPrompt(systemPrompt),
...chatHistories,
{
obj: ChatRoleEnum.Human,
value: runtimePrompt2ChatsValue({
text: userChatInput,
files: []
})
}
];
const {
dispatchFlowResponse, // tool flow response
totalTokens,
completeMessages = [], // The actual message sent to AI(just save text)
assistantResponses = [] // FastGPT system store assistant.value response
} = await (async () => {
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
if (toolModel.toolChoice) {
return runToolWithToolChoice({
...props,
toolModules,
toolModel,
messages: adaptMessages
});
}
if (toolModel.functionCall) {
return runToolWithFunctionCall({
...props,
toolModules,
toolModel,
messages: adaptMessages
});
}
const lastMessage = adaptMessages[adaptMessages.length - 1];
if (typeof lastMessage.content !== 'string') {
return Promise.reject('暂时只支持纯文本');
}
lastMessage.content = replaceVariable(Prompt_Tool_Call, {
question: userChatInput
});
return runToolWithPromptCall({
...props,
toolModules,
toolModel,
messages: adaptMessages
});
})();
const { totalPoints, modelName } = formatModelChars2Points({
model,
tokens: totalTokens,
modelType: ModelTypeEnum.llm
});
// flat child tool response
const childToolResponse = dispatchFlowResponse.map((item) => item.flowResponses).flat();
// concat tool usage
const totalPointsUsage =
totalPoints +
dispatchFlowResponse.reduce((sum, item) => {
const childrenTotal = item.flowUsages.reduce((sum, item) => sum + item.totalPoints, 0);
return sum + childrenTotal;
}, 0);
const flatUsages = dispatchFlowResponse.map((item) => item.flowUsages).flat();
return {
[DispatchNodeResponseKeyEnum.assistantResponses]: assistantResponses,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: totalPointsUsage,
toolCallTokens: totalTokens,
model: modelName,
query: userChatInput,
historyPreview: getHistoryPreview(GPTMessages2Chats(completeMessages, false)),
toolDetail: childToolResponse
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: name,
totalPoints,
model: modelName,
tokens: totalTokens
},
...flatUsages
]
};
};

View File

@@ -0,0 +1,385 @@
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getAIApi } from '../../../../ai/config';
import { filterGPTMessageByMaxTokens } from '../../../../chat/utils';
import {
ChatCompletion,
StreamChatType,
ChatCompletionMessageParam,
ChatCompletionAssistantMessageParam
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import {
responseWrite,
responseWriteController,
responseWriteNodeStatus
} from '../../../../../common/response';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import { dispatchWorkFlow } from '../../index';
import { DispatchToolModuleProps, RunToolResponse, ToolModuleItemType } from './type.d';
import json5 from 'json5';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { getNanoid, replaceVariable } from '@fastgpt/global/common/string/tools';
import { AIChatItemType } from '@fastgpt/global/core/chat/type';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
type FunctionCallCompletion = {
id: string;
name: string;
arguments: string;
toolName?: string;
toolAvatar?: string;
};
export const runToolWithPromptCall = async (
props: DispatchToolModuleProps & {
messages: ChatCompletionMessageParam[];
toolModules: ToolModuleItemType[];
toolModel: LLMModelItemType;
},
response?: RunToolResponse
): Promise<RunToolResponse> => {
const {
toolModel,
toolModules,
messages,
res,
runtimeModules,
detail = false,
module,
stream
} = props;
const assistantResponses = response?.assistantResponses || [];
const toolsPrompt = JSON.stringify(
toolModules.map((module) => {
const properties: Record<
string,
{
type: string;
description: string;
required?: boolean;
}
> = {};
module.toolParams.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.toolDescription || ''
};
});
return {
toolId: module.moduleId,
description: module.intro,
parameters: {
type: 'object',
properties,
required: module.toolParams.filter((item) => item.required).map((item) => item.key)
}
};
})
);
const lastMessage = messages[messages.length - 1];
if (typeof lastMessage.content !== 'string') {
return Promise.reject('暂时只支持纯文本');
}
lastMessage.content = replaceVariable(lastMessage.content, {
toolsPrompt
});
const filterMessages = filterGPTMessageByMaxTokens({
messages,
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
});
// console.log(JSON.stringify(filterMessages, null, 2));
/* Run llm */
const ai = getAIApi({
timeout: 480000
});
const aiResponse = await ai.chat.completions.create(
{
...toolModel?.defaultConfig,
model: toolModel.model,
temperature: 0,
stream,
messages: filterMessages
},
{
headers: {
Accept: 'application/json, text/plain, */*'
}
}
);
const answer = await (async () => {
if (stream) {
const { answer } = await streamResponse({
res,
detail,
toolModules,
stream: aiResponse
});
return answer;
} else {
const result = aiResponse as ChatCompletion;
return result.choices?.[0]?.message?.content || '';
}
})();
const parseAnswerResult = parseAnswer(answer);
// console.log(answer, '==11==');
// No tools
if (typeof parseAnswerResult === 'string') {
// No tool is invoked, indicating that the process is over
const gptAssistantResponse: ChatCompletionAssistantMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: parseAnswerResult
};
const completeMessages = filterMessages.concat(gptAssistantResponse);
const tokens = countGptMessagesTokens(completeMessages, undefined);
// console.log(tokens, 'response token');
// concat tool assistant
const toolNodeAssistant = GPTMessages2Chats([gptAssistantResponse])[0] as AIChatItemType;
return {
dispatchFlowResponse: response?.dispatchFlowResponse || [],
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages,
assistantResponses: [...assistantResponses, ...toolNodeAssistant.value]
};
}
// Run the selected tool.
const toolsRunResponse = await (async () => {
if (!parseAnswerResult) return Promise.reject('tool run error');
const toolModule = toolModules.find((module) => module.moduleId === parseAnswerResult.name);
if (!toolModule) return Promise.reject('tool not found');
parseAnswerResult.toolName = toolModule.name;
parseAnswerResult.toolAvatar = toolModule.avatar;
// SSE response to client
if (stream && detail) {
responseWrite({
res,
event: SseResponseEventEnum.toolCall,
data: JSON.stringify({
tool: {
id: parseAnswerResult.id,
toolName: toolModule.name,
toolAvatar: toolModule.avatar,
functionName: parseAnswerResult.name,
params: parseAnswerResult.arguments,
response: ''
}
})
});
}
// run tool flow
const startParams = (() => {
try {
return json5.parse(parseAnswerResult.arguments);
} catch (error) {
return {};
}
})();
const moduleRunResponse = await dispatchWorkFlow({
...props,
runtimeModules: runtimeModules.map((module) => ({
...module,
isEntry: module.moduleId === toolModule.moduleId
})),
startParams
});
const stringToolResponse = (() => {
if (typeof moduleRunResponse.toolResponses === 'object') {
return JSON.stringify(moduleRunResponse.toolResponses, null, 2);
}
return moduleRunResponse.toolResponses ? String(moduleRunResponse.toolResponses) : 'none';
})();
if (stream && detail) {
responseWrite({
res,
event: SseResponseEventEnum.toolResponse,
data: JSON.stringify({
tool: {
id: parseAnswerResult.id,
toolName: '',
toolAvatar: '',
params: '',
response: stringToolResponse
}
})
});
}
return {
moduleRunResponse,
toolResponsePrompt: stringToolResponse
};
})();
if (stream && detail) {
responseWriteNodeStatus({
res,
name: module.name
});
}
// 合并工具调用的结果,使用 functionCall 格式存储。
const assistantToolMsgParams: ChatCompletionAssistantMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: parseAnswerResult
};
const concatToolMessages = [
...filterMessages,
assistantToolMsgParams
] as ChatCompletionMessageParam[];
const tokens = countGptMessagesTokens(concatToolMessages, undefined);
const completeMessages: ChatCompletionMessageParam[] = [
...concatToolMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Function,
name: parseAnswerResult.name,
content: toolsRunResponse.toolResponsePrompt
}
];
// tool assistant
const toolAssistants = toolsRunResponse.moduleRunResponse.assistantResponses || [];
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistants = [...assistantResponses, ...toolAssistants, ...toolNodeAssistant.value];
const dispatchFlowResponse = response
? response.dispatchFlowResponse.concat(toolsRunResponse.moduleRunResponse)
: [toolsRunResponse.moduleRunResponse];
// get the next user prompt
lastMessage.content += `${answer}
TOOL_RESPONSE: ${toolsRunResponse.toolResponsePrompt}
ANSWER: `;
/* check stop signal */
const hasStopSignal = toolsRunResponse.moduleRunResponse.flowResponses.some(
(item) => !!item.toolStop
);
if (hasStopSignal) {
return {
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages: filterMessages,
assistantResponses: toolNodeAssistants
};
}
return runToolWithPromptCall(
{
...props,
messages
},
{
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
assistantResponses: toolNodeAssistants
}
);
};
async function streamResponse({
res,
detail,
stream
}: {
res: NextApiResponse;
detail: boolean;
toolModules: ToolModuleItemType[];
stream: StreamChatType;
}) {
const write = responseWriteController({
res,
readStream: stream
});
let startResponseWrite = false;
let textAnswer = '';
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
break;
}
const responseChoice = part.choices?.[0]?.delta;
if (responseChoice.content) {
const content = responseChoice?.content || '';
textAnswer += content;
if (startResponseWrite) {
responseWrite({
write,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
} else if (textAnswer.length >= 3) {
textAnswer = textAnswer.trim();
if (textAnswer.startsWith('0')) {
startResponseWrite = true;
// find first : index
const firstIndex = textAnswer.indexOf(':');
textAnswer = textAnswer.substring(firstIndex + 1).trim();
responseWrite({
write,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: textAnswer
})
});
}
}
}
}
if (!textAnswer) {
return Promise.reject('LLM api response empty');
}
// console.log(textAnswer, '---===');
return { answer: textAnswer.trim() };
}
const parseAnswer = (str: string): FunctionCallCompletion | string => {
// 首先使用正则表达式提取TOOL_ID和TOOL_ARGUMENTS
const prefix = '1:';
str = str.trim();
if (str.startsWith(prefix)) {
const toolString = str.substring(prefix.length).trim();
try {
const toolCall = json5.parse(toolString);
return {
id: getNanoid(),
name: toolCall.toolId,
arguments: JSON.stringify(toolCall.arguments)
};
} catch (error) {
return str;
}
} else {
return str;
}
};

View File

@@ -0,0 +1,14 @@
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
export type AnswerProps = ModuleDispatchProps<{}>;
export type AnswerResponse = DispatchNodeResultType<{}>;
export const dispatchStopToolCall = (props: Record<string, any>): AnswerResponse => {
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
toolStop: true
}
};
};

View File

@@ -0,0 +1,413 @@
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getAIApi } from '../../../../ai/config';
import { filterGPTMessageByMaxTokens } from '../../../../chat/utils';
import {
ChatCompletion,
ChatCompletionMessageToolCall,
StreamChatType,
ChatCompletionToolMessageParam,
ChatCompletionAssistantToolParam,
ChatCompletionMessageParam,
ChatCompletionTool,
ChatCompletionAssistantMessageParam
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import {
responseWrite,
responseWriteController,
responseWriteNodeStatus
} from '../../../../../common/response';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import { dispatchWorkFlow } from '../../index';
import { DispatchToolModuleProps, RunToolResponse, ToolModuleItemType } from './type.d';
import json5 from 'json5';
import { DispatchFlowResponse } from '../../type';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { AIChatItemType } from '@fastgpt/global/core/chat/type';
type ToolRunResponseType = {
moduleRunResponse: DispatchFlowResponse;
toolMsgParams: ChatCompletionToolMessageParam;
}[];
/*
调用思路
1. messages 接收发送给AI的消息
2. response 记录递归运行结果(累计计算 dispatchFlowResponse, totalTokens和assistantResponses)
3. 如果运行工具的话则需要把工具中的结果累计加到dispatchFlowResponse中。 本次消耗的 token 加到 totalTokens, assistantResponses 记录当前工具运行的内容。
*/
export const runToolWithToolChoice = async (
props: DispatchToolModuleProps & {
messages: ChatCompletionMessageParam[];
toolModules: ToolModuleItemType[];
toolModel: LLMModelItemType;
},
response?: RunToolResponse
): Promise<RunToolResponse> => {
const {
toolModel,
toolModules,
messages,
res,
runtimeModules,
detail = false,
module,
stream
} = props;
const assistantResponses = response?.assistantResponses || [];
const tools: ChatCompletionTool[] = toolModules.map((module) => {
const properties: Record<
string,
{
type: string;
description: string;
required?: boolean;
}
> = {};
module.toolParams.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.toolDescription || ''
};
});
return {
type: 'function',
function: {
name: module.moduleId,
description: module.intro,
parameters: {
type: 'object',
properties,
required: module.toolParams.filter((item) => item.required).map((item) => item.key)
}
}
};
});
const filterMessages = filterGPTMessageByMaxTokens({
messages,
maxTokens: toolModel.maxContext - 300 // filter token. not response maxToken
});
/* Run llm */
const ai = getAIApi({
timeout: 480000
});
const aiResponse = await ai.chat.completions.create(
{
...toolModel?.defaultConfig,
model: toolModel.model,
temperature: 0,
stream,
messages: filterMessages,
tools,
tool_choice: 'auto'
},
{
headers: {
Accept: 'application/json, text/plain, */*'
}
}
);
const { answer, toolCalls } = await (async () => {
if (stream) {
return streamResponse({
res,
detail,
toolModules,
stream: aiResponse
});
} else {
const result = aiResponse as ChatCompletion;
const calls = result.choices?.[0]?.message?.tool_calls || [];
// 加上name和avatar
const toolCalls = calls.map((tool) => {
const toolModule = toolModules.find((module) => module.moduleId === tool.function?.name);
return {
...tool,
toolName: toolModule?.name || '',
toolAvatar: toolModule?.avatar || ''
};
});
return {
answer: result.choices?.[0]?.message?.content || '',
toolCalls: toolCalls
};
}
})();
// Run the selected tool.
const toolsRunResponse = (
await Promise.all(
toolCalls.map(async (tool) => {
const toolModule = toolModules.find((module) => module.moduleId === tool.function?.name);
if (!toolModule) return;
const startParams = (() => {
try {
return json5.parse(tool.function.arguments);
} catch (error) {
return {};
}
})();
const moduleRunResponse = await dispatchWorkFlow({
...props,
runtimeModules: runtimeModules.map((module) => ({
...module,
isEntry: module.moduleId === toolModule.moduleId
})),
startParams
});
const stringToolResponse = (() => {
if (typeof moduleRunResponse.toolResponses === 'object') {
return JSON.stringify(moduleRunResponse.toolResponses, null, 2);
}
return moduleRunResponse.toolResponses ? String(moduleRunResponse.toolResponses) : 'none';
})();
const toolMsgParams: ChatCompletionToolMessageParam = {
tool_call_id: tool.id,
role: ChatCompletionRequestMessageRoleEnum.Tool,
name: tool.function.name,
content: stringToolResponse
};
if (stream && detail) {
responseWrite({
res,
event: SseResponseEventEnum.toolResponse,
data: JSON.stringify({
tool: {
id: tool.id,
toolName: '',
toolAvatar: '',
params: '',
response: stringToolResponse
}
})
});
}
return {
moduleRunResponse,
toolMsgParams
};
})
)
).filter(Boolean) as ToolRunResponseType;
const flatToolsResponseData = toolsRunResponse.map((item) => item.moduleRunResponse).flat();
if (toolCalls.length > 0 && !res.closed) {
// Run the tool, combine its results, and perform another round of AI calls
const assistantToolMsgParams: ChatCompletionAssistantToolParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls: toolCalls
};
const concatToolMessages = [
...filterMessages,
assistantToolMsgParams
] as ChatCompletionMessageParam[];
const tokens = countGptMessagesTokens(concatToolMessages, tools);
const completeMessages = [
...concatToolMessages,
...toolsRunResponse.map((item) => item?.toolMsgParams)
];
// console.log(tokens, 'tool');
if (stream && detail) {
responseWriteNodeStatus({
res,
name: module.name
});
}
// tool assistant
const toolAssistants = toolsRunResponse
.map((item) => {
const assistantResponses = item.moduleRunResponse.assistantResponses || [];
return assistantResponses;
})
.flat();
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistants = [
...assistantResponses,
...toolAssistants,
...toolNodeAssistant.value
];
// concat tool responses
const dispatchFlowResponse = response
? response.dispatchFlowResponse.concat(flatToolsResponseData)
: flatToolsResponseData;
/* check stop signal */
const hasStopSignal = flatToolsResponseData.some(
(item) => !!item.flowResponses?.find((item) => item.toolStop)
);
if (hasStopSignal) {
return {
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages,
assistantResponses: toolNodeAssistants
};
}
return runToolWithToolChoice(
{
...props,
messages: completeMessages
},
{
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
assistantResponses: toolNodeAssistants
}
);
} else {
// No tool is invoked, indicating that the process is over
const gptAssistantResponse: ChatCompletionAssistantMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: answer
};
const completeMessages = filterMessages.concat(gptAssistantResponse);
const tokens = countGptMessagesTokens(completeMessages, tools);
// console.log(tokens, 'response token');
// concat tool assistant
const toolNodeAssistant = GPTMessages2Chats([gptAssistantResponse])[0] as AIChatItemType;
return {
dispatchFlowResponse: response?.dispatchFlowResponse || [],
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages,
assistantResponses: [...assistantResponses, ...toolNodeAssistant.value]
};
}
};
async function streamResponse({
res,
detail,
toolModules,
stream
}: {
res: NextApiResponse;
detail: boolean;
toolModules: ToolModuleItemType[];
stream: StreamChatType;
}) {
const write = responseWriteController({
res,
readStream: stream
});
let textAnswer = '';
let toolCalls: ChatCompletionMessageToolCall[] = [];
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
break;
}
const responseChoice = part.choices?.[0]?.delta;
// console.log(JSON.stringify(responseChoice, null, 2));
if (responseChoice.content) {
const content = responseChoice?.content || '';
textAnswer += content;
responseWrite({
write,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
} else if (responseChoice.tool_calls?.[0]) {
const toolCall: ChatCompletionMessageToolCall = responseChoice.tool_calls[0];
// 流响应中,每次只会返回一个工具. 如果带了 id说明是执行一个工具
if (toolCall.id) {
const toolModule = toolModules.find(
(module) => module.moduleId === toolCall.function?.name
);
if (toolModule) {
if (toolCall.function?.arguments === undefined) {
toolCall.function.arguments = '';
}
toolCalls.push({
...toolCall,
toolName: toolModule.name,
toolAvatar: toolModule.avatar
});
if (detail) {
responseWrite({
write,
event: SseResponseEventEnum.toolCall,
data: JSON.stringify({
tool: {
id: toolCall.id,
toolName: toolModule.name,
toolAvatar: toolModule.avatar,
functionName: toolCall.function.name,
params: toolCall.function.arguments,
response: ''
}
})
});
}
}
}
/* arg 插入最后一个工具的参数里 */
const arg: string = responseChoice.tool_calls?.[0]?.function?.arguments;
const currentTool = toolCalls[toolCalls.length - 1];
if (currentTool) {
currentTool.function.arguments += arg;
if (detail) {
responseWrite({
write,
event: SseResponseEventEnum.toolParams,
data: JSON.stringify({
tool: {
id: currentTool.id,
toolName: '',
toolAvatar: '',
params: arg,
response: ''
}
})
});
}
}
}
}
if (!textAnswer && toolCalls.length === 0) {
return Promise.reject('LLM api response empty');
}
return { answer: textAnswer, toolCalls };
}

View File

@@ -0,0 +1,28 @@
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { FlowNodeInputItemType } from '@fastgpt/global/core/module/node/type';
import type {
ModuleDispatchProps,
DispatchNodeResponseType
} from '@fastgpt/global/core/module/type.d';
import type { RunningModuleItemType } from '@fastgpt/global/core/module/runtime/type';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import type { DispatchFlowResponse } from '../../type.d';
import { AIChatItemValueItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
export type DispatchToolModuleProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]: string;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
export type RunToolResponse = {
dispatchFlowResponse: DispatchFlowResponse[];
totalTokens: number;
completeMessages?: ChatCompletionMessageParam[];
assistantResponses?: AIChatItemValueItemType[];
};
export type ToolModuleItemType = RunningModuleItemType & {
toolParams: RunningModuleItemType['inputs'];
};

View File

@@ -0,0 +1,404 @@
import type { NextApiResponse } from 'next';
import {
filterGPTMessageByMaxTokens,
formatGPTMessagesInRequestBefore,
loadChatImgToBase64
} from '../../../chat/utils';
import type { ChatItemType, UserChatItemValueItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import { getAIApi } from '../../../ai/config';
import type {
ChatCompletion,
ChatCompletionMessageParam,
StreamChatType
} from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '../../../../common/api/requestPlusApi';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
import type { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
import {
countGptMessagesTokens,
countMessagesTokens
} from '@fastgpt/global/common/string/tiktoken';
import {
chats2GPTMessages,
getSystemPrompt,
GPTMessages2Chats,
runtimePrompt2ChatsValue
} from '@fastgpt/global/core/chat/adapt';
import {
Prompt_QuotePromptList,
Prompt_QuoteTemplateList
} from '@fastgpt/global/core/ai/prompt/AIChat';
import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { responseWrite, responseWriteController } from '../../../../common/response';
import { getLLMModel, ModelTypeEnum } from '../../../ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { getHistories } from '../utils';
import { filterSearchResultsByMaxChars } from '@fastgpt/global/core/dataset/search/utils';
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
export type ChatProps = ModuleDispatchProps<
AIChatModuleProps & {
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
}
>;
export type ChatResponse = DispatchNodeResultType<{
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.history]: ChatItemType[];
}>;
/* request openai chat */
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
let {
res,
stream = false,
detail = false,
user,
histories,
module: { name, outputs },
inputFiles = [],
params: {
model,
temperature = 0,
maxToken = 4000,
history = 6,
quoteQA = [],
userChatInput,
isResponseAnswerText = true,
systemPrompt = '',
quoteTemplate,
quotePrompt
}
} = props;
if (!userChatInput && inputFiles.length === 0) {
return Promise.reject('Question is empty');
}
stream = stream && isResponseAnswerText;
const chatHistories = getHistories(history, histories);
// temperature adapt
const modelConstantsData = getLLMModel(model);
if (!modelConstantsData) {
return Promise.reject('The chat model is undefined, you need to select a chat model.');
}
const { quoteText } = filterQuote({
quoteQA,
model: modelConstantsData,
quoteTemplate
});
// censor model and system key
if (modelConstantsData.censor && !user.openaiAccount?.key) {
await postTextCensor({
text: `${systemPrompt}
${quoteText}
${userChatInput}
`
});
}
const { filterMessages } = getChatMessages({
model: modelConstantsData,
histories: chatHistories,
quoteText,
quotePrompt,
userChatInput,
inputFiles,
systemPrompt
});
const { max_tokens } = await getMaxTokens({
model: modelConstantsData,
maxToken,
filterMessages
});
// FastGPT temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const concatMessages = [
...(modelConstantsData.defaultSystemChatPrompt
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystemChatPrompt
}
]
: []),
...formatGPTMessagesInRequestBefore(filterMessages)
] as ChatCompletionMessageParam[];
if (concatMessages.length === 0) {
return Promise.reject('core.chat.error.Messages empty');
}
const loadMessages = await Promise.all(
concatMessages.map(async (item) => {
if (item.role === ChatCompletionRequestMessageRoleEnum.User) {
return {
...item,
content: await loadChatImgToBase64(item.content)
};
} else {
return item;
}
})
);
const response = await ai.chat.completions.create(
{
...modelConstantsData?.defaultConfig,
model: modelConstantsData.model,
temperature,
max_tokens,
stream,
messages: loadMessages
},
{
headers: {
Accept: 'application/json, text/plain, */*'
}
}
);
const { answerText } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({
res,
detail,
stream: response
});
targetResponse({ res, detail, outputs });
return {
answerText: answer
};
} else {
const unStreamResponse = response as ChatCompletion;
const answer = unStreamResponse.choices?.[0]?.message?.content || '';
return {
answerText: answer
};
}
})();
const completeMessages = filterMessages.concat({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: answerText
});
const chatCompleteMessages = GPTMessages2Chats(completeMessages);
const tokens = countMessagesTokens(chatCompleteMessages);
const { totalPoints, modelName } = formatModelChars2Points({
model,
tokens,
modelType: ModelTypeEnum.llm
});
return {
answerText,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
tokens,
query: `${userChatInput}`,
maxToken: max_tokens,
historyPreview: getHistoryPreview(chatCompleteMessages),
contextTotalLen: completeMessages.length
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
tokens
}
],
[DispatchNodeResponseKeyEnum.toolResponses]: answerText,
history: chatCompleteMessages
};
};
function filterQuote({
quoteQA = [],
model,
quoteTemplate
}: {
quoteQA: ChatProps['params']['quoteQA'];
model: LLMModelItemType;
quoteTemplate?: string;
}) {
function getValue(item: SearchDataResponseItemType, index: number) {
return replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
q: item.q,
a: item.a,
source: item.sourceName,
sourceId: String(item.sourceId || 'UnKnow'),
index: index + 1
});
}
// slice filterSearch
const filterQuoteQA = filterSearchResultsByMaxChars(quoteQA, model.quoteMaxToken);
const quoteText =
filterQuoteQA.length > 0
? `${filterQuoteQA.map((item, index) => getValue(item, index).trim()).join('\n------\n')}`
: '';
return {
filterQuoteQA: filterQuoteQA,
quoteText
};
}
function getChatMessages({
quotePrompt,
quoteText,
histories = [],
systemPrompt,
userChatInput,
inputFiles,
model
}: {
quotePrompt?: string;
quoteText: string;
histories: ChatItemType[];
systemPrompt: string;
userChatInput: string;
inputFiles: UserChatItemValueItemType['file'][];
model: LLMModelItemType;
}) {
const replaceInputValue = quoteText
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
quote: quoteText,
question: userChatInput
})
: userChatInput;
const messages: ChatItemType[] = [
...getSystemPrompt(systemPrompt),
...histories,
{
obj: ChatRoleEnum.Human,
value: runtimePrompt2ChatsValue({
files: inputFiles,
text: replaceInputValue
})
}
];
const adaptMessages = chats2GPTMessages({ messages, reserveId: false });
const filterMessages = filterGPTMessageByMaxTokens({
messages: adaptMessages,
maxTokens: model.maxContext - 300 // filter token. not response maxToken
});
return {
filterMessages
};
}
function getMaxTokens({
maxToken,
model,
filterMessages = []
}: {
maxToken: number;
model: LLMModelItemType;
filterMessages: ChatCompletionMessageParam[];
}) {
maxToken = Math.min(maxToken, model.maxResponse);
const tokensLimit = model.maxContext;
/* count response max token */
const promptsToken = countGptMessagesTokens(filterMessages);
maxToken = promptsToken + maxToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
if (maxToken <= 0) {
maxToken = 200;
}
return {
max_tokens: maxToken
};
}
function targetResponse({
res,
outputs,
detail
}: {
res: NextApiResponse;
outputs: ModuleItemType['outputs'];
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === ModuleOutputKeyEnum.answerText)?.targets || [];
if (targets.length === 0) return;
responseWrite({
res,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}
async function streamResponse({
res,
detail,
stream
}: {
res: NextApiResponse;
detail: boolean;
stream: StreamChatType;
}) {
const write = responseWriteController({
res,
readStream: stream
});
let answer = '';
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
break;
}
const content = part.choices?.[0]?.delta?.content || '';
answer += content;
responseWrite({
write,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
}
if (!answer) {
return Promise.reject('core.chat.Chat API is error or undefined');
}
return { answer };
}

View File

@@ -0,0 +1,35 @@
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
import { filterSearchResultsByMaxChars } from '@fastgpt/global/core/dataset/search/utils';
type DatasetConcatProps = ModuleDispatchProps<
{
[ModuleInputKeyEnum.datasetMaxTokens]: number;
} & { [key: string]: SearchDataResponseItemType[] }
>;
type DatasetConcatResponse = {
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
};
export async function dispatchDatasetConcat(
props: DatasetConcatProps
): Promise<DatasetConcatResponse> {
const {
params: { limit = 1500, ...quoteMap }
} = props as DatasetConcatProps;
const quoteList = Object.values(quoteMap).filter((list) => Array.isArray(list));
const rrfConcatResults = datasetSearchResultConcat(
quoteList.map((list) => ({
k: 60,
list
}))
);
return {
[ModuleOutputKeyEnum.datasetQuoteQA]: filterSearchResultsByMaxChars(rrfConcatResults, limit)
};
}

View File

@@ -0,0 +1,164 @@
import {
DispatchNodeResponseType,
DispatchNodeResultType
} from '@fastgpt/global/core/module/runtime/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModelTypeEnum, getLLMModel, getVectorModel } from '../../../ai/model';
import { searchDatasetData } from '../../../dataset/search/controller';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
import { getHistories } from '../utils';
import { datasetSearchQueryExtension } from '../../../dataset/search/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '../../../../support/permission/teamLimit';
type DatasetSearchProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
[ModuleInputKeyEnum.datasetSimilarity]: number;
[ModuleInputKeyEnum.datasetMaxTokens]: number;
[ModuleInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.datasetSearchUsingReRank]: boolean;
[ModuleInputKeyEnum.datasetSearchUsingExtensionQuery]: boolean;
[ModuleInputKeyEnum.datasetSearchExtensionModel]: string;
[ModuleInputKeyEnum.datasetSearchExtensionBg]: string;
}>;
export type DatasetSearchResponse = DispatchNodeResultType<{
[ModuleOutputKeyEnum.datasetIsEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetUnEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
}>;
export async function dispatchDatasetSearch(
props: DatasetSearchProps
): Promise<DatasetSearchResponse> {
const {
teamId,
histories,
module,
params: {
datasets = [],
similarity,
limit = 1500,
usingReRank,
searchMode,
userChatInput,
datasetSearchUsingExtensionQuery,
datasetSearchExtensionModel,
datasetSearchExtensionBg
}
} = props as DatasetSearchProps;
if (!Array.isArray(datasets)) {
return Promise.reject('Quote type error');
}
if (datasets.length === 0) {
return Promise.reject('core.chat.error.Select dataset empty');
}
if (!userChatInput) {
return Promise.reject('core.chat.error.User input empty');
}
// query extension
const extensionModel =
datasetSearchUsingExtensionQuery && datasetSearchExtensionModel
? getLLMModel(datasetSearchExtensionModel)
: undefined;
const { concatQueries, rewriteQuery, aiExtensionResult } = await datasetSearchQueryExtension({
query: userChatInput,
extensionModel,
extensionBg: datasetSearchExtensionBg,
histories: getHistories(6, histories)
});
// console.log(concatQueries, rewriteQuery, aiExtensionResult);
// get vector
const vectorModel = getVectorModel(datasets[0]?.vectorModel?.model);
// start search
const {
searchRes,
tokens,
usingSimilarityFilter,
usingReRank: searchUsingReRank
} = await searchDatasetData({
teamId,
reRankQuery: `${rewriteQuery}`,
queries: concatQueries,
model: vectorModel.model,
similarity,
limit,
datasetIds: datasets.map((item) => item.datasetId),
searchMode,
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId))
});
// count bill results
// vector
const { totalPoints, modelName } = formatModelChars2Points({
model: vectorModel.model,
tokens,
modelType: ModelTypeEnum.vector
});
const responseData: DispatchNodeResponseType & { totalPoints: number } = {
totalPoints,
query: concatQueries.join('\n'),
model: modelName,
tokens,
similarity: usingSimilarityFilter ? similarity : undefined,
limit,
searchMode,
searchUsingReRank: searchUsingReRank,
quoteList: searchRes
};
const nodeDispatchUsages: ChatNodeUsageType[] = [
{
totalPoints,
moduleName: module.name,
model: modelName,
tokens
}
];
if (aiExtensionResult) {
const { totalPoints, modelName } = formatModelChars2Points({
model: aiExtensionResult.model,
tokens: aiExtensionResult.tokens,
modelType: ModelTypeEnum.llm
});
responseData.totalPoints += totalPoints;
responseData.tokens = aiExtensionResult.tokens;
responseData.extensionModel = modelName;
responseData.extensionResult =
aiExtensionResult.extensionQueries?.join('\n') ||
JSON.stringify(aiExtensionResult.extensionQueries);
nodeDispatchUsages.push({
totalPoints,
moduleName: 'core.module.template.Query extension',
model: modelName,
tokens: aiExtensionResult.tokens
});
}
return {
isEmpty: searchRes.length === 0 ? true : undefined,
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
[DispatchNodeResponseKeyEnum.nodeResponse]: responseData,
nodeDispatchUsages,
[DispatchNodeResponseKeyEnum.toolResponses]: searchRes.map((item) => ({
id: item.id,
text: `${item.q}\n${item.a}`.trim()
}))
};
}

View File

@@ -0,0 +1,435 @@
import { NextApiResponse } from 'next';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ChatDispatchProps } from '@fastgpt/global/core/module/type.d';
import type { RunningModuleItemType } from '@fastgpt/global/core/module/runtime/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import type {
AIChatItemValueItemType,
ChatHistoryItemResType,
ToolRunResponseItemType
} from '@fastgpt/global/core/chat/type.d';
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { responseWriteNodeStatus } from '../../../common/response';
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
import { dispatchHistory } from './init/history';
import { dispatchChatInput } from './init/userChatInput';
import { dispatchChatCompletion } from './chat/oneapi';
import { dispatchDatasetSearch } from './dataset/search';
import { dispatchDatasetConcat } from './dataset/concat';
import { dispatchAnswer } from './tools/answer';
import { dispatchClassifyQuestion } from './agent/classifyQuestion';
import { dispatchContentExtract } from './agent/extract';
import { dispatchHttpRequest } from './tools/http';
import { dispatchHttp468Request } from './tools/http468';
import { dispatchAppRequest } from './tools/runApp';
import { dispatchQueryExtension } from './tools/queryExternsion';
import { dispatchRunPlugin } from './plugin/run';
import { dispatchPluginInput } from './plugin/runInput';
import { dispatchPluginOutput } from './plugin/runOutput';
import { checkTheModuleConnectedByTool, valueTypeFormat } from './utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { dispatchRunTools } from './agent/runTool/index';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { DispatchFlowResponse } from './type';
import { dispatchStopToolCall } from './agent/runTool/stopTool';
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
[FlowNodeTypeEnum.questionInput]: dispatchChatInput,
[FlowNodeTypeEnum.answerNode]: dispatchAnswer,
[FlowNodeTypeEnum.chatNode]: dispatchChatCompletion,
[FlowNodeTypeEnum.datasetSearchNode]: dispatchDatasetSearch,
[FlowNodeTypeEnum.datasetConcatNode]: dispatchDatasetConcat,
[FlowNodeTypeEnum.classifyQuestion]: dispatchClassifyQuestion,
[FlowNodeTypeEnum.contentExtract]: dispatchContentExtract,
[FlowNodeTypeEnum.httpRequest]: dispatchHttpRequest,
[FlowNodeTypeEnum.httpRequest468]: dispatchHttp468Request,
[FlowNodeTypeEnum.runApp]: dispatchAppRequest,
[FlowNodeTypeEnum.pluginModule]: dispatchRunPlugin,
[FlowNodeTypeEnum.pluginInput]: dispatchPluginInput,
[FlowNodeTypeEnum.pluginOutput]: dispatchPluginOutput,
[FlowNodeTypeEnum.queryExtension]: dispatchQueryExtension,
[FlowNodeTypeEnum.tools]: dispatchRunTools,
[FlowNodeTypeEnum.stopTool]: dispatchStopToolCall,
// none
[FlowNodeTypeEnum.userGuide]: () => Promise.resolve()
};
/* running */
export async function dispatchWorkFlow({
res,
modules = [],
runtimeModules,
startParams = {},
histories = [],
variables = {},
user,
stream = false,
detail = false,
...props
}: ChatDispatchProps & {
modules?: ModuleItemType[]; // app modules
runtimeModules?: RunningModuleItemType[];
startParams?: Record<string, any>; // entry module params
}): Promise<DispatchFlowResponse> {
// set sse response headers
if (stream) {
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
}
variables = {
...getSystemVariable({ timezone: user.timezone }),
...variables
};
const runningModules = runtimeModules ? runtimeModules : loadModules(modules, variables);
let chatResponses: ChatHistoryItemResType[] = []; // response request and save to database
let chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
let chatNodeUsages: ChatNodeUsageType[] = [];
let toolRunResponse: ToolRunResponseItemType;
let runningTime = Date.now();
/* Store special response field */
function pushStore(
{ inputs = [] }: RunningModuleItemType,
{
answerText = '',
responseData,
nodeDispatchUsages,
toolResponses,
assistantResponses
}: {
[ModuleOutputKeyEnum.answerText]?: string;
[DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType;
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[];
[DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType;
[DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // tool module, save the response value
}
) {
const time = Date.now();
if (responseData) {
chatResponses.push({
...responseData,
runningTime: +((time - runningTime) / 1000).toFixed(2)
});
}
if (nodeDispatchUsages) {
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
}
if (toolResponses !== undefined) {
if (Array.isArray(toolResponses) && toolResponses.length === 0) return;
if (typeof toolResponses === 'object' && Object.keys(toolResponses).length === 0) {
return;
}
toolRunResponse = toolResponses;
}
if (assistantResponses) {
chatAssistantResponse = chatAssistantResponse.concat(assistantResponses);
}
// save assistant text response
if (answerText) {
const isResponseAnswerText =
inputs.find((item) => item.key === ModuleInputKeyEnum.aiChatIsResponseText)?.value ?? true;
if (isResponseAnswerText) {
chatAssistantResponse.push({
type: ChatItemValueTypeEnum.text,
text: {
content: answerText
}
});
}
}
runningTime = time;
}
/* Inject data into module input */
function moduleInput(module: RunningModuleItemType, data: Record<string, any> = {}) {
const updateInputValue = (key: string, value: any) => {
const index = module.inputs.findIndex((item: any) => item.key === key);
if (index === -1) return;
module.inputs[index].value = value;
};
Object.entries(data).map(([key, val]: any) => {
updateInputValue(key, val);
});
return;
}
/* Pass the output of the module to the next stage */
function moduleOutput(
module: RunningModuleItemType,
result: Record<string, any> = {}
): Promise<any> {
pushStore(module, result);
const nextRunModules: RunningModuleItemType[] = [];
// Assign the output value to the next module
module.outputs.map((outputItem) => {
if (result[outputItem.key] === undefined) return;
/* update output value */
outputItem.value = result[outputItem.key];
/* update target */
outputItem.targets.map((target: any) => {
// find module
const targetModule = runningModules.find((item) => item.moduleId === target.moduleId);
if (!targetModule) return;
// push to running queue
nextRunModules.push(targetModule);
// update input
moduleInput(targetModule, { [target.key]: outputItem.value });
});
});
// Ensure the uniqueness of running modules
const set = new Set<string>();
const filterModules = nextRunModules.filter((module) => {
if (set.has(module.moduleId)) return false;
set.add(module.moduleId);
return true;
});
return checkModulesCanRun(filterModules);
}
function checkModulesCanRun(modules: RunningModuleItemType[] = []) {
return Promise.all(
modules.map((module) => {
if (!module.inputs.find((item: any) => item.value === undefined)) {
// remove switch
moduleInput(module, { [ModuleInputKeyEnum.switch]: undefined });
return moduleRun(module);
}
})
);
}
async function moduleRun(module: RunningModuleItemType): Promise<any> {
if (res.closed) return Promise.resolve();
if (stream && detail && module.showStatus) {
responseStatus({
res,
name: module.name,
status: 'running'
});
}
// get module running params
const params: Record<string, any> = {};
module.inputs.forEach((item) => {
params[item.key] = valueTypeFormat(item.value, item.valueType);
});
const dispatchData: ModuleDispatchProps<Record<string, any>> = {
...props,
res,
variables,
histories,
user,
stream,
detail,
module,
runtimeModules: runningModules,
params
};
// run module
const dispatchRes: Record<string, any> = await (async () => {
if (callbackMap[module.flowType]) {
return callbackMap[module.flowType](dispatchData);
}
return {};
})();
// format response data. Add modulename and module type
const formatResponseData: ChatHistoryItemResType = (() => {
if (!dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]) return undefined;
return {
moduleName: module.name,
moduleType: module.flowType,
...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]
};
})();
// Add output default value
module.outputs.forEach((item) => {
if (!item.required) return;
if (dispatchRes[item.key] !== undefined) return;
dispatchRes[item.key] = valueTypeFormat(item.defaultValue, item.valueType);
});
// Pass userChatInput
const hasUserChatInputTarget = !!module.outputs.find(
(item) => item.key === ModuleOutputKeyEnum.userChatInput
)?.targets?.length;
return moduleOutput(module, {
[ModuleOutputKeyEnum.finish]: true,
[ModuleOutputKeyEnum.userChatInput]: hasUserChatInputTarget
? params[ModuleOutputKeyEnum.userChatInput]
: undefined,
...dispatchRes,
[DispatchNodeResponseKeyEnum.nodeResponse]: formatResponseData,
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]:
dispatchRes[DispatchNodeResponseKeyEnum.nodeDispatchUsages]
});
}
// start process width initInput
const initModules = runningModules.filter((item) => item.isEntry);
// reset entry
modules.forEach((item) => {
item.isEntry = false;
});
initModules.map((module) =>
moduleInput(module, {
...startParams,
history: [] // abandon history field. History module will get histories from other fields.
})
);
await checkModulesCanRun(initModules);
// focus try to run pluginOutput
const pluginOutputModule = runningModules.find(
(item) => item.flowType === FlowNodeTypeEnum.pluginOutput
);
if (pluginOutputModule) {
await moduleRun(pluginOutputModule);
}
return {
flowResponses: chatResponses,
flowUsages: chatNodeUsages,
[DispatchNodeResponseKeyEnum.assistantResponses]:
concatAssistantResponseAnswerText(chatAssistantResponse),
[DispatchNodeResponseKeyEnum.toolResponses]: toolRunResponse
};
}
/* init store modules to running modules */
function loadModules(
modules: ModuleItemType[],
variables: Record<string, any>
): RunningModuleItemType[] {
return modules
.filter((item) => {
return ![FlowNodeTypeEnum.userGuide].includes(item.moduleId as any);
})
.map<RunningModuleItemType>((module) => {
return {
moduleId: module.moduleId,
name: module.name,
avatar: module.avatar,
intro: module.intro,
flowType: module.flowType,
showStatus: module.showStatus,
isEntry: module.isEntry,
inputs: module.inputs
.filter(
/*
1. system input must be save
2. connected by source handle
3. manual input value or have default value
4. For the module connected by the tool, leave the toolDescription input
*/
(item) => {
const isTool = checkTheModuleConnectedByTool(modules, module);
if (isTool && item.toolDescription) {
return true;
}
return (
item.type === FlowNodeInputTypeEnum.systemInput ||
item.connected ||
item.value !== undefined
);
}
) // filter unconnected target input
.map((item) => {
const replace = ['string'].includes(typeof item.value);
return {
key: item.key,
// variables replace
value: replace ? replaceVariable(item.value, variables) : item.value,
valueType: item.valueType,
required: item.required,
toolDescription: item.toolDescription
};
}),
outputs: module.outputs
.map((item) => ({
key: item.key,
required: item.required,
defaultValue: item.defaultValue,
answer: item.key === ModuleOutputKeyEnum.answerText,
value: undefined,
valueType: item.valueType,
targets: item.targets
}))
.sort((a, b) => {
// finish output always at last
if (a.key === ModuleOutputKeyEnum.finish) return 1;
if (b.key === ModuleOutputKeyEnum.finish) return -1;
return 0;
})
};
});
}
/* sse response modules staus */
export function responseStatus({
res,
status,
name
}: {
res: NextApiResponse;
status?: 'running' | 'finish';
name?: string;
}) {
if (!name) return;
responseWriteNodeStatus({
res,
name
});
}
/* get system variable */
export function getSystemVariable({ timezone }: { timezone: string }) {
return {
cTime: getSystemTime(timezone)
};
}
export const concatAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) => {
const result: AIChatItemValueItemType[] = [];
// 合并连续的text
for (let i = 0; i < response.length; i++) {
const item = response[i];
if (item.type === ChatItemValueTypeEnum.text) {
let text = item.text?.content || '';
const lastItem = result[result.length - 1];
if (lastItem && lastItem.type === ChatItemValueTypeEnum.text && lastItem.text?.content) {
lastItem.text.content += text;
continue;
}
}
result.push(item);
}
return result;
};

View File

@@ -0,0 +1,19 @@
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { getHistories } from '../utils';
export type HistoryProps = ModuleDispatchProps<{
maxContext?: number;
[ModuleInputKeyEnum.history]: ChatItemType[];
}>;
export const dispatchHistory = (props: Record<string, any>) => {
const {
histories,
params: { maxContext }
} = props as HistoryProps;
return {
history: getHistories(maxContext, histories)
};
};

View File

@@ -0,0 +1,14 @@
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
export type UserChatInputProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.userChatInput]: string;
}>;
export const dispatchChatInput = (props: Record<string, any>) => {
const {
params: { userChatInput }
} = props as UserChatInputProps;
return {
userChatInput
};
};

View File

@@ -0,0 +1,99 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { dispatchWorkFlow } from '../index';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { DYNAMIC_INPUT_KEY, ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { getPluginRuntimeById } from '../../../plugin/controller';
import { authPluginCanUse } from '../../../../support/permission/auth/plugin';
import { setEntryEntries } from '../utils';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
type RunPluginProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.pluginId]: string;
[key: string]: any;
}>;
type RunPluginResponse = DispatchNodeResultType<{}>;
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
const {
mode,
teamId,
tmbId,
module,
params: { pluginId, ...data }
} = props;
if (!pluginId) {
return Promise.reject('pluginId can not find');
}
await authPluginCanUse({ id: pluginId, teamId, tmbId });
const plugin = await getPluginRuntimeById(pluginId);
// concat dynamic inputs
const inputModule = plugin.modules.find((item) => item.flowType === FlowNodeTypeEnum.pluginInput);
if (!inputModule) return Promise.reject('Plugin error, It has no set input.');
const hasDynamicInput = inputModule.inputs.find((input) => input.key === DYNAMIC_INPUT_KEY);
const startParams: Record<string, any> = (() => {
if (!hasDynamicInput) return data;
const params: Record<string, any> = {
[DYNAMIC_INPUT_KEY]: {}
};
for (const key in data) {
const input = inputModule.inputs.find((input) => input.key === key);
if (input) {
params[key] = data[key];
} else {
params[DYNAMIC_INPUT_KEY][key] = data[key];
}
}
return params;
})();
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
...props,
modules: setEntryEntries(plugin.modules).map((module) => ({
...module,
showStatus: false
})),
runtimeModules: undefined, // must reset
startParams
});
const output = flowResponses.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
if (output) {
output.moduleLogo = plugin.avatar;
}
return {
assistantResponses,
// responseData, // debug
[DispatchNodeResponseKeyEnum.nodeResponse]: {
moduleLogo: plugin.avatar,
totalPoints: flowResponses.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
pluginOutput: output?.pluginOutput,
pluginDetail:
mode === 'test' && plugin.teamId === teamId
? flowResponses.filter((item) => {
const filterArr = [FlowNodeTypeEnum.pluginOutput];
return !filterArr.includes(item.moduleType as any);
})
: undefined
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: plugin.name,
totalPoints: flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
model: plugin.name,
tokens: 0
}
],
[DispatchNodeResponseKeyEnum.toolResponses]: output?.pluginOutput ? output.pluginOutput : {},
...(output ? output.pluginOutput : {})
};
};

View File

@@ -0,0 +1,11 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
export type PluginInputProps = ModuleDispatchProps<{
[key: string]: any;
}>;
export const dispatchPluginInput = (props: PluginInputProps) => {
const { params } = props;
return params;
};

View File

@@ -0,0 +1,19 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type.d';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
export type PluginOutputProps = ModuleDispatchProps<{
[key: string]: any;
}>;
export type PluginOutputResponse = DispatchNodeResultType<{}>;
export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResponse => {
const { params } = props;
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0,
pluginOutput: params
}
};
};

View File

@@ -0,0 +1,37 @@
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { responseWrite } from '../../../../common/response';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
export type AnswerProps = ModuleDispatchProps<{
text: string;
}>;
export type AnswerResponse = DispatchNodeResultType<{
[ModuleOutputKeyEnum.answerText]: string;
}>;
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const {
res,
detail,
stream,
params: { text = '' }
} = props as AnswerProps;
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
if (stream) {
responseWrite({
res,
event: detail ? SseResponseEventEnum.fastAnswer : undefined,
data: textAdaptGptResponse({
text: `\n${formatText}`
})
});
}
return {
[ModuleOutputKeyEnum.answerText]: formatText
};
};

View File

@@ -0,0 +1,251 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import {
DYNAMIC_INPUT_KEY,
ModuleInputKeyEnum,
ModuleOutputKeyEnum
} from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import axios from 'axios';
import { valueTypeFormat } from '../utils';
import { SERVICE_LOCAL_HOST } from '../../../../common/system/tools';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
type HttpRequestProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.abandon_httpUrl]: string;
[ModuleInputKeyEnum.httpMethod]: string;
[ModuleInputKeyEnum.httpReqUrl]: string;
[ModuleInputKeyEnum.httpHeaders]: string;
[key: string]: any;
}>;
type HttpResponse = DispatchNodeResultType<{
[ModuleOutputKeyEnum.failed]?: boolean;
[key: string]: any;
}>;
const flatDynamicParams = (params: Record<string, any>) => {
const dynamicParams = params[DYNAMIC_INPUT_KEY];
if (!dynamicParams) return params;
return {
...params,
...dynamicParams,
[DYNAMIC_INPUT_KEY]: undefined
};
};
export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<HttpResponse> => {
let {
appId,
chatId,
responseChatItemId,
variables,
module: { outputs },
params: {
system_httpMethod: httpMethod = 'POST',
system_httpReqUrl: httpReqUrl,
system_httpHeader: httpHeader,
...body
}
} = props;
if (!httpReqUrl) {
return Promise.reject('Http url is empty');
}
body = flatDynamicParams(body);
const requestBody = {
appId,
chatId,
responseChatItemId,
variables,
data: body
};
const requestQuery = {
appId,
chatId,
...variables,
...body
};
const formatBody = transformFlatJson({ ...requestBody });
// parse header
const headers = await (() => {
try {
if (!httpHeader) return {};
return JSON.parse(httpHeader);
} catch (error) {
return Promise.reject('Header 为非法 JSON 格式');
}
})();
try {
const response = await fetchData({
method: httpMethod,
url: httpReqUrl,
headers,
body: formatBody,
query: requestQuery
});
// format output value type
const results: Record<string, any> = {};
for (const key in response) {
const output = outputs.find((item) => item.key === key);
if (!output) continue;
results[key] = valueTypeFormat(response[key], output.valueType);
}
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0,
body: formatBody,
httpResult: response
},
...results
};
} catch (error) {
console.log(error);
return {
[ModuleOutputKeyEnum.failed]: true,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0,
body: formatBody,
httpResult: { error }
}
};
}
};
async function fetchData({
method,
url,
headers,
body,
query
}: {
method: string;
url: string;
headers: Record<string, any>;
body: Record<string, any>;
query: Record<string, any>;
}): Promise<Record<string, any>> {
const { data: response } = await axios<Record<string, any>>({
method,
baseURL: `http://${SERVICE_LOCAL_HOST}`,
url,
headers: {
'Content-Type': 'application/json',
...headers
},
timeout: 360000,
params: method === 'GET' ? query : {},
data: method === 'POST' ? body : {}
});
/*
parse the json:
{
user: {
name: 'xxx',
age: 12
},
list: [
{
name: 'xxx',
age: 50
},
[{ test: 22 }]
],
psw: 'xxx'
}
result: {
'user': { name: 'xxx', age: 12 },
'user.name': 'xxx',
'user.age': 12,
'list': [ { name: 'xxx', age: 50 }, [ [Object] ] ],
'list[0]': { name: 'xxx', age: 50 },
'list[0].name': 'xxx',
'list[0].age': 50,
'list[1]': [ { test: 22 } ],
'list[1][0]': { test: 22 },
'list[1][0].test': 22,
'psw': 'xxx'
}
*/
const parseJson = (obj: Record<string, any>, prefix = '') => {
let result: Record<string, any> = {};
if (Array.isArray(obj)) {
for (let i = 0; i < obj.length; i++) {
result[`${prefix}[${i}]`] = obj[i];
if (Array.isArray(obj[i])) {
result = {
...result,
...parseJson(obj[i], `${prefix}[${i}]`)
};
} else if (typeof obj[i] === 'object') {
result = {
...result,
...parseJson(obj[i], `${prefix}[${i}].`)
};
}
}
} else if (typeof obj == 'object') {
for (const key in obj) {
result[`${prefix}${key}`] = obj[key];
if (Array.isArray(obj[key])) {
result = {
...result,
...parseJson(obj[key], `${prefix}${key}`)
};
} else if (typeof obj[key] === 'object') {
result = {
...result,
...parseJson(obj[key], `${prefix}${key}.`)
};
}
}
}
return result;
};
return parseJson(response);
}
function transformFlatJson(obj: Record<string, any>) {
for (let key in obj) {
if (typeof obj[key] === 'object') {
transformFlatJson(obj[key]);
}
if (key.includes('.')) {
let parts = key.split('.');
if (parts.length <= 1) continue;
const firstKey = parts.shift();
if (!firstKey) continue;
const lastKey = parts.join('.');
if (obj[firstKey]) {
obj[firstKey] = {
...obj[firstKey],
[lastKey]: obj[key]
};
} else {
obj[firstKey] = { [lastKey]: obj[key] };
}
transformFlatJson(obj[firstKey]);
delete obj[key];
}
}
return obj;
}

View File

@@ -0,0 +1,293 @@
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import {
DYNAMIC_INPUT_KEY,
ModuleInputKeyEnum,
ModuleOutputKeyEnum
} from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import axios from 'axios';
import { valueTypeFormat } from '../utils';
import { SERVICE_LOCAL_HOST } from '../../../../common/system/tools';
import { addLog } from '../../../../common/system/log';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
type PropsArrType = {
key: string;
type: string;
value: string;
};
type HttpRequestProps = ModuleDispatchProps<{
[ModuleInputKeyEnum.abandon_httpUrl]: string;
[ModuleInputKeyEnum.httpMethod]: string;
[ModuleInputKeyEnum.httpReqUrl]: string;
[ModuleInputKeyEnum.httpHeaders]: PropsArrType[];
[ModuleInputKeyEnum.httpParams]: PropsArrType[];
[ModuleInputKeyEnum.httpJsonBody]: string;
[DYNAMIC_INPUT_KEY]: Record<string, any>;
[key: string]: any;
}>;
type HttpResponse = DispatchNodeResultType<{
[ModuleOutputKeyEnum.failed]?: boolean;
[key: string]: any;
}>;
const UNDEFINED_SIGN = 'UNDEFINED_SIGN';
export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<HttpResponse> => {
let {
appId,
chatId,
responseChatItemId,
variables,
module: { outputs },
histories,
params: {
system_httpMethod: httpMethod = 'POST',
system_httpReqUrl: httpReqUrl,
system_httpHeader: httpHeader,
system_httpParams: httpParams = [],
system_httpJsonBody: httpJsonBody,
[DYNAMIC_INPUT_KEY]: dynamicInput,
...body
}
} = props;
if (!httpReqUrl) {
return Promise.reject('Http url is empty');
}
const concatVariables = {
appId,
chatId,
responseChatItemId,
...variables,
histories: histories.slice(0, 10),
...body
};
httpReqUrl = replaceVariable(httpReqUrl, concatVariables);
// parse header
const headers = await (() => {
try {
if (!httpHeader || httpHeader.length === 0) return {};
// array
return httpHeader.reduce((acc: Record<string, string>, item) => {
const key = replaceVariable(item.key, concatVariables);
const value = replaceVariable(item.value, concatVariables);
acc[key] = valueTypeFormat(value, 'string');
return acc;
}, {});
} catch (error) {
return Promise.reject('Header 为非法 JSON 格式');
}
})();
const params = httpParams.reduce((acc: Record<string, string>, item) => {
const key = replaceVariable(item.key, concatVariables);
const value = replaceVariable(item.value, concatVariables);
acc[key] = valueTypeFormat(value, 'string');
return acc;
}, {});
const requestBody = await (() => {
if (!httpJsonBody) return { [DYNAMIC_INPUT_KEY]: dynamicInput };
httpJsonBody = replaceVariable(httpJsonBody, concatVariables);
try {
const jsonParse = JSON.parse(httpJsonBody);
const removeSignJson = removeUndefinedSign(jsonParse);
return { [DYNAMIC_INPUT_KEY]: dynamicInput, ...removeSignJson };
} catch (error) {
console.log(error);
return Promise.reject(`Invalid JSON body: ${httpJsonBody}`);
}
})();
try {
const { formatResponse, rawResponse } = await fetchData({
method: httpMethod,
url: httpReqUrl,
headers,
body: requestBody,
params
});
// format output value type
const results: Record<string, any> = {};
for (const key in formatResponse) {
const output = outputs.find((item) => item.key === key);
if (!output) continue;
results[key] = valueTypeFormat(formatResponse[key], output.valueType);
}
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0,
params: Object.keys(params).length > 0 ? params : undefined,
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
headers: Object.keys(headers).length > 0 ? headers : undefined,
httpResult: rawResponse
},
[DispatchNodeResponseKeyEnum.toolResponses]: results,
[ModuleOutputKeyEnum.httpRawResponse]: rawResponse,
...results
};
} catch (error) {
addLog.error('Http request error', error);
return {
[ModuleOutputKeyEnum.failed]: true,
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0,
params: Object.keys(params).length > 0 ? params : undefined,
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
headers: Object.keys(headers).length > 0 ? headers : undefined,
httpResult: { error: formatHttpError(error) }
}
};
}
};
async function fetchData({
method,
url,
headers,
body,
params
}: {
method: string;
url: string;
headers: Record<string, any>;
body: Record<string, any>;
params: Record<string, any>;
}): Promise<Record<string, any>> {
const { data: response } = await axios<Record<string, any>>({
method,
baseURL: `http://${SERVICE_LOCAL_HOST}`,
url,
headers: {
'Content-Type': 'application/json',
...headers
},
params: params,
data: ['POST', 'PUT', 'PATCH'].includes(method) ? body : undefined
});
/*
parse the json:
{
user: {
name: 'xxx',
age: 12
},
list: [
{
name: 'xxx',
age: 50
},
[{ test: 22 }]
],
psw: 'xxx'
}
result: {
'user': { name: 'xxx', age: 12 },
'user.name': 'xxx',
'user.age': 12,
'list': [ { name: 'xxx', age: 50 }, [ [Object] ] ],
'list[0]': { name: 'xxx', age: 50 },
'list[0].name': 'xxx',
'list[0].age': 50,
'list[1]': [ { test: 22 } ],
'list[1][0]': { test: 22 },
'list[1][0].test': 22,
'psw': 'xxx'
}
*/
const parseJson = (obj: Record<string, any>, prefix = '') => {
let result: Record<string, any> = {};
if (Array.isArray(obj)) {
for (let i = 0; i < obj.length; i++) {
result[`${prefix}[${i}]`] = obj[i];
if (Array.isArray(obj[i])) {
result = {
...result,
...parseJson(obj[i], `${prefix}[${i}]`)
};
} else if (typeof obj[i] === 'object') {
result = {
...result,
...parseJson(obj[i], `${prefix}[${i}].`)
};
}
}
} else if (typeof obj == 'object') {
for (const key in obj) {
result[`${prefix}${key}`] = obj[key];
if (Array.isArray(obj[key])) {
result = {
...result,
...parseJson(obj[key], `${prefix}${key}`)
};
} else if (typeof obj[key] === 'object') {
result = {
...result,
...parseJson(obj[key], `${prefix}${key}.`)
};
}
}
}
return result;
};
return {
formatResponse: parseJson(response),
rawResponse: response
};
}
function replaceVariable(text: string, obj: Record<string, any>) {
for (const [key, value] of Object.entries(obj)) {
if (value === undefined) {
text = text.replace(new RegExp(`{{${key}}}`, 'g'), UNDEFINED_SIGN);
} else {
const replacement = JSON.stringify(value);
const unquotedReplacement =
replacement.startsWith('"') && replacement.endsWith('"')
? replacement.slice(1, -1)
: replacement;
text = text.replace(new RegExp(`{{${key}}}`, 'g'), unquotedReplacement);
}
}
return text || '';
}
function removeUndefinedSign(obj: Record<string, any>) {
for (const key in obj) {
if (obj[key] === UNDEFINED_SIGN) {
obj[key] = undefined;
} else if (Array.isArray(obj[key])) {
obj[key] = obj[key].map((item: any) => {
if (item === UNDEFINED_SIGN) {
return undefined;
} else if (typeof item === 'object') {
removeUndefinedSign(item);
}
return item;
});
} else if (typeof obj[key] === 'object') {
removeUndefinedSign(obj[key]);
}
}
return obj;
}
function formatHttpError(error: any) {
return {
message: error?.message,
name: error?.name,
method: error?.config?.method,
baseURL: error?.config?.baseURL,
url: error?.config?.url,
code: error?.code,
status: error?.status
};
}

View File

@@ -0,0 +1,76 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { ModelTypeEnum, getLLMModel } from '../../../../core/ai/model';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import { queryExtension } from '../../../../core/ai/functions/queryExtension';
import { getHistories } from '../utils';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
type Response = DispatchNodeResultType<{
[ModuleOutputKeyEnum.text]: string;
}>;
export const dispatchQueryExtension = async ({
histories,
module,
params: { model, systemPrompt, history, userChatInput }
}: Props): Promise<Response> => {
if (!userChatInput) {
return Promise.reject('Question is empty');
}
const queryExtensionModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { extensionQueries, tokens } = await queryExtension({
chatBg: systemPrompt,
query: userChatInput,
histories: chatHistories,
model: queryExtensionModel.model
});
extensionQueries.unshift(userChatInput);
const { totalPoints, modelName } = formatModelChars2Points({
model: queryExtensionModel.model,
tokens,
modelType: ModelTypeEnum.llm
});
const set = new Set<string>();
const filterSameQueries = extensionQueries.filter((item) => {
// 删除所有的标点符号与空格等,只对文本进行比较
const str = hashStr(item.replace(/[^\p{L}\p{N}]/gu, ''));
if (set.has(str)) return false;
set.add(str);
return true;
});
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints,
model: modelName,
tokens,
query: userChatInput,
textOutput: JSON.stringify(filterSameQueries)
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: module.name,
totalPoints,
model: modelName,
tokens
}
],
[ModuleOutputKeyEnum.text]: JSON.stringify(filterSameQueries)
};
};

View File

@@ -0,0 +1,107 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
import { dispatchWorkFlow } from '../index';
import { MongoApp } from '../../../../core/app/schema';
import { responseWrite } from '../../../../common/response';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { textAdaptGptResponse } from '@fastgpt/global/core/module/runtime/utils';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { getHistories, setEntryEntries } from '../utils';
import { chatValue2RuntimePrompt, runtimePrompt2ChatsValue } from '@fastgpt/global/core/chat/adapt';
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
app: SelectAppItemType;
}>;
type Response = DispatchNodeResultType<{
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.history]: ChatItemType[];
}>;
export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const {
res,
teamId,
stream,
detail,
histories,
inputFiles,
params: { userChatInput, history, app }
} = props;
let start = Date.now();
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const appData = await MongoApp.findOne({
_id: app.id,
teamId
});
if (!appData) {
return Promise.reject('App not found');
}
if (stream) {
responseWrite({
res,
event: detail ? SseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}
const chatHistories = getHistories(history, histories);
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
...props,
appId: app.id,
modules: setEntryEntries(appData.modules),
runtimeModules: undefined, // must reset
histories: chatHistories,
inputFiles,
startParams: {
userChatInput
}
});
const completeMessages = chatHistories.concat([
{
obj: ChatRoleEnum.Human,
value: runtimePrompt2ChatsValue({
files: inputFiles,
text: userChatInput
})
},
{
obj: ChatRoleEnum.AI,
value: assistantResponses
}
]);
const { text } = chatValue2RuntimePrompt(assistantResponses);
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
moduleLogo: appData.avatar,
query: userChatInput,
textOutput: text,
totalPoints: flowResponses.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
moduleName: appData.name,
totalPoints: flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
}
],
answerText: text,
history: completeMessages
};
};

View File

@@ -0,0 +1,15 @@
import {
AIChatItemValueItemType,
ChatHistoryItemResType,
ChatItemValueItemType,
ToolRunResponseItemType
} from '@fastgpt/global/core/chat/type';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
export type DispatchFlowResponse = {
flowResponses: ChatHistoryItemResType[];
flowUsages: ChatNodeUsageType[];
[DispatchNodeResponseKeyEnum.toolResponses]: ToolRunResponseItemType;
[DispatchNodeResponseKeyEnum.assistantResponses]: AIChatItemValueItemType[];
};

View File

@@ -0,0 +1,62 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ModuleIOValueTypeEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
export const setEntryEntries = (modules: ModuleItemType[]) => {
const initRunningModuleType: Record<string, boolean> = {
[FlowNodeTypeEnum.historyNode]: true,
[FlowNodeTypeEnum.questionInput]: true,
[FlowNodeTypeEnum.pluginInput]: true
};
modules.forEach((item) => {
if (initRunningModuleType[item.flowType]) {
item.isEntry = true;
}
});
return modules;
};
export const checkTheModuleConnectedByTool = (
modules: ModuleItemType[],
module: ModuleItemType
) => {
let sign = false;
const toolModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.tools);
toolModules.forEach((item) => {
const toolOutput = item.outputs.find(
(output) => output.key === ModuleOutputKeyEnum.selectedTools
);
toolOutput?.targets.forEach((target) => {
if (target.moduleId === module.moduleId) {
sign = true;
}
});
});
return sign;
};
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
if (!history) return [];
if (typeof history === 'number') return histories.slice(-history);
if (Array.isArray(history)) return history;
return [];
};
/* value type format */
export const valueTypeFormat = (value: any, type?: `${ModuleIOValueTypeEnum}`) => {
if (value === undefined) return;
if (type === 'string') {
if (typeof value !== 'object') return String(value);
return JSON.stringify(value);
}
if (type === 'number') return Number(value);
if (type === 'boolean') return Boolean(value);
return value;
};

View File

@@ -3,11 +3,14 @@
"version": "1.0.0",
"dependencies": {
"@fastgpt/global": "workspace:*",
"@node-rs/jieba": "1.10.0",
"axios": "^1.5.1",
"cheerio": "1.0.0-rc.12",
"cookie": "^0.5.0",
"date-fns": "2.30.0",
"dayjs": "^1.11.7",
"encoding": "^0.1.13",
"json5": "^2.2.3",
"jsonwebtoken": "^9.0.2",
"mongoose": "^7.0.2",
"multer": "1.4.5-lts.1",
@@ -15,7 +18,6 @@
"nextjs-cors": "^2.1.2",
"node-cron": "^3.0.3",
"pg": "^8.10.0",
"date-fns": "2.30.0",
"tunnel": "^0.0.6"
},
"devDependencies": {

View File

@@ -69,6 +69,7 @@ export const iconPaths = {
'core/app/simpleMode/template': () => import('./icons/core/app/simpleMode/template.svg'),
'core/app/simpleMode/tts': () => import('./icons/core/app/simpleMode/tts.svg'),
'core/app/simpleMode/variable': () => import('./icons/core/app/simpleMode/variable.svg'),
'core/app/toolCall': () => import('./icons/core/app/toolCall.svg'),
'core/app/ttsFill': () => import('./icons/core/app/ttsFill.svg'),
'core/app/variable/external': () => import('./icons/core/app/variable/external.svg'),
'core/app/variable/input': () => import('./icons/core/app/variable/input.svg'),
@@ -101,8 +102,11 @@ export const iconPaths = {
'core/dataset/rerank': () => import('./icons/core/dataset/rerank.svg'),
'core/dataset/tableCollection': () => import('./icons/core/dataset/tableCollection.svg'),
'core/dataset/websiteDataset': () => import('./icons/core/dataset/websiteDataset.svg'),
'core/modules/basicNode': () => import('./icons/core/modules/basicNode.svg'),
'core/modules/flowLight': () => import('./icons/core/modules/flowLight.svg'),
'core/modules/previewLight': () => import('./icons/core/modules/previewLight.svg'),
'core/modules/systemPlugin': () => import('./icons/core/modules/systemPlugin.svg'),
'core/modules/teamPlugin': () => import('./icons/core/modules/teamPlugin.svg'),
'core/modules/variable': () => import('./icons/core/modules/variable.svg'),
'core/modules/welcomeText': () => import('./icons/core/modules/welcomeText.svg'),
date: () => import('./icons/date.svg'),
@@ -131,6 +135,7 @@ export const iconPaths = {
kbTest: () => import('./icons/kbTest.svg'),
menu: () => import('./icons/menu.svg'),
minus: () => import('./icons/minus.svg'),
'modal/concat': () => import('./icons/modal/concat.svg'),
'modal/confirmPay': () => import('./icons/modal/confirmPay.svg'),
'modal/edit': () => import('./icons/modal/edit.svg'),
'modal/manualDataset': () => import('./icons/modal/manualDataset.svg'),

View File

@@ -1,4 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 13 12" fill="none">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 13 12">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M6.5 2C6.77614 2 7 2.22386 7 2.5V5.5H10C10.2761 5.5 10.5 5.72386 10.5 6C10.5 6.27614 10.2761 6.5 10 6.5H7V9.5C7 9.77614 6.77614 10 6.5 10C6.22386 10 6 9.77614 6 9.5V6.5H3C2.72386 6.5 2.5 6.27614 2.5 6C2.5 5.72386 2.72386 5.5 3 5.5H6V2.5C6 2.22386 6.22386 2 6.5 2Z" />
</svg>

Before

Width:  |  Height:  |  Size: 408 B

After

Width:  |  Height:  |  Size: 396 B

View File

@@ -0,0 +1,12 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 21" fill="none">
<path
d="M8.96208 3.83326H11.2335C11.9698 3.83326 12.5668 4.43022 12.5668 5.1666V5.44866H14.1665C14.189 5.44866 14.2113 5.44866 14.2335 5.44866V5.1666C14.2335 3.50974 12.8903 2.1666 11.2335 2.1666H8.96208C7.30522 2.1666 5.96208 3.50974 5.96208 5.1666V5.44866H7.62874V5.1666C7.62874 4.43022 8.2257 3.83326 8.96208 3.83326Z"
fill="#487FFF" />
<path
d="M18.6966 9.04808H1.30328C1.31483 8.05698 1.36761 7.46917 1.60886 6.99569C1.8805 6.46256 2.31395 6.02912 2.84708 5.75747C3.45316 5.44866 4.24657 5.44866 5.83338 5.44866H14.1665C15.7534 5.44866 16.5468 5.44866 17.1529 5.75747C17.686 6.02912 18.1194 6.46256 18.3911 6.99569C18.6323 7.46917 18.6851 8.05698 18.6966 9.04808Z"
fill="#487FFF" />
<path
d="M12.1963 10.5314H18.6999V14.3C18.6999 15.8868 18.6999 16.6802 18.3911 17.2863C18.1194 17.8194 17.686 18.2529 17.1529 18.5245C16.5468 18.8333 15.7534 18.8333 14.1665 18.8333H5.83338C4.24657 18.8333 3.45316 18.8333 2.84708 18.5245C2.31395 18.2529 1.8805 17.8194 1.60886 17.2863C1.30005 16.6802 1.30005 15.8868 1.30005 14.3V10.5314H7.80364V13.2689C7.80364 13.545 8.02749 13.7689 8.30364 13.7689H11.6963C11.9724 13.7689 12.1963 13.545 12.1963 13.2689V10.5314Z"
fill="#487FFF" />
<path d="M9.13697 10.5314H10.863V12.4356H9.13697V10.5314Z" fill="#487FFF" />
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M8.69761 0.888662C8.89698 0.847659 9.1026 0.847659 9.30197 0.888662C9.53242 0.93606 9.7384 1.05143 9.90216 1.14315C9.91741 1.15169 9.93229 1.16003 9.94679 1.16809L15.4968 4.25142C15.5121 4.25994 15.5279 4.26865 15.5441 4.27757C15.7175 4.37305 15.9355 4.49311 16.1028 4.67474C16.2476 4.83178 16.3571 5.0179 16.4241 5.22066C16.5016 5.45519 16.5006 5.70402 16.4999 5.90193C16.4999 5.92042 16.4998 5.93847 16.4998 5.95603V12.0439C16.4998 12.0614 16.4999 12.0795 16.4999 12.098C16.5006 12.2959 16.5016 12.5447 16.4241 12.7792C16.3571 12.982 16.2476 13.1681 16.1028 13.3251C15.9355 13.5068 15.7175 13.6268 15.5442 13.7223C15.5279 13.7312 15.5121 13.7399 15.4968 13.7485L9.94679 16.8318C9.93229 16.8399 9.9174 16.8482 9.90215 16.8567C9.7384 16.9485 9.53242 17.0638 9.30197 17.1112C9.1026 17.1522 8.89698 17.1522 8.69761 17.1112C8.46716 17.0638 8.26118 16.9485 8.09742 16.8567C8.08217 16.8482 8.06729 16.8399 8.05279 16.8318L2.50279 13.7485C2.48744 13.7399 2.47164 13.7312 2.45544 13.7223C2.28209 13.6268 2.06412 13.5068 1.89674 13.3251C1.75203 13.1681 1.64251 12.982 1.57552 12.7792C1.49803 12.5447 1.49893 12.2959 1.49966 12.098C1.49972 12.0795 1.49979 12.0614 1.49979 12.0439V5.95603C1.49979 5.93847 1.49972 5.92042 1.49966 5.90193C1.49893 5.70403 1.49803 5.45519 1.57552 5.22066C1.64251 5.0179 1.75202 4.83178 1.89674 4.67474C2.06412 4.49311 2.28208 4.37306 2.45543 4.27758C2.47163 4.26865 2.48744 4.25994 2.50278 4.25142L8.05279 1.16809C8.06729 1.16003 8.08217 1.15169 8.09742 1.14315C8.26118 1.05143 8.46716 0.93606 8.69761 0.888662ZM8.99979 2.36168C8.99865 2.36223 8.99745 2.3628 8.99621 2.3634C8.95292 2.38427 8.89491 2.41618 8.78125 2.47932L3.79414 5.24994L8.99977 8.14195L14.2054 5.24992L9.21833 2.47932C9.10467 2.41618 9.04666 2.38427 9.00338 2.3634C9.00213 2.3628 9.00094 2.36223 8.99979 2.36168ZM14.9998 6.52454V12.0439C14.9998 12.1811 14.9995 12.2513 14.9964 12.3022C14.9963 12.3037 14.9962 12.3051 14.9961 12.3065C14.995 12.3072 14.9938 12.308 14.9925 12.3088C14.9495 12.3362 14.8883 12.3706 14.7683 12.4372L9.74979 15.2253L9.74977 9.44122L14.9998 6.52454ZM8.24977 9.44122L2.99979 6.52457V12.0439C2.99979 12.1811 3.0001 12.2513 3.00316 12.3022C3.00325 12.3037 3.00335 12.3051 3.00344 12.3065C3.00459 12.3072 3.0058 12.308 3.00707 12.3088C3.05004 12.3362 3.11129 12.3706 3.23125 12.4372L8.24979 15.2253L8.24977 9.44122Z" />
</svg>

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M8.97102 5.68723L8.93214 4.22716C8.91418 3.5528 8.36099 3.01099 7.68174 3.01099C7.00248 3.01099 6.4493 3.5528 6.43134 4.22716L6.39245 5.68723H3.375C3.16789 5.68723 3 5.85512 3 6.06223V7.77955C4.3982 7.83251 5.51546 8.98273 5.51546 10.3939C5.51546 11.8051 4.3982 12.9553 3 13.0082V14.6325C3 14.8396 3.16789 15.0075 3.375 15.0075H5.13833C5.23045 13.6831 6.33396 12.6374 7.68172 12.6374C9.02947 12.6374 10.133 13.6831 10.2251 15.0075H11.9452C12.1523 15.0075 12.3202 14.8396 12.3202 14.6325V11.7055L13.7725 11.6593C14.4518 11.6377 14.9966 11.0792 14.9966 10.394C14.9966 9.70889 14.4518 9.1504 13.7725 9.12879L12.3202 9.08261V6.06222C12.3202 5.85512 12.1523 5.68723 11.9452 5.68723H8.97102ZM8.73132 16.5075V15.187C8.73132 14.6073 8.2614 14.1374 7.68172 14.1374C7.10203 14.1374 6.63211 14.6073 6.63211 15.187V16.5075H3.375C2.33947 16.5075 1.5 15.668 1.5 14.6325V11.5101H2.8992C3.51569 11.5101 4.01546 11.0104 4.01546 10.3939C4.01546 9.7774 3.51569 9.27764 2.8992 9.27764L1.5 9.27764V6.06223C1.5 5.02669 2.33947 4.18722 3.375 4.18722H4.93187C4.97141 2.70246 6.18743 1.51099 7.68174 1.51099C9.17604 1.51099 10.3921 2.70246 10.4316 4.18722H11.9452C12.9807 4.18722 13.8202 5.02669 13.8202 6.06222V7.62955C15.3063 7.67682 16.4966 8.89643 16.4966 10.394C16.4966 11.8916 15.3063 13.1112 13.8202 13.1585V14.6325C13.8202 15.668 12.9807 16.5075 11.9452 16.5075H8.73132Z" />
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 19 18">
<path fill-rule="evenodd" clip-rule="evenodd"
d="M9.49989 3C8.05014 3 6.87489 4.17525 6.87489 5.625C6.87489 7.07475 8.05014 8.25 9.49989 8.25C10.9496 8.25 12.1249 7.07475 12.1249 5.625C12.1249 4.17525 10.9496 3 9.49989 3ZM5.37489 5.625C5.37489 3.34683 7.22171 1.5 9.49989 1.5C11.7781 1.5 13.6249 3.34683 13.6249 5.625C13.6249 7.90317 11.7781 9.75 9.49989 9.75C7.22171 9.75 5.37489 7.90317 5.37489 5.625ZM9.49989 12C7.37062 12 5.47275 13.027 4.26027 14.6316C4.1861 14.7298 4.12933 14.805 4.08211 14.8711C4.04626 14.9213 4.0218 14.9581 4.00477 14.9861C4.11624 14.9988 4.27506 15 4.57031 15H14.4295C14.7247 15 14.8835 14.9988 14.995 14.9861C14.978 14.9581 14.9535 14.9213 14.9177 14.8711C14.8704 14.805 14.8137 14.7298 14.7395 14.6316C13.527 13.027 11.6292 12 9.49989 12ZM3.0635 13.7273C4.54322 11.769 6.87405 10.5 9.49989 10.5C12.1257 10.5 14.4566 11.769 15.9363 13.7273C15.943 13.7362 15.9497 13.7451 15.9564 13.754C16.0837 13.9222 16.2201 14.1026 16.3176 14.2767C16.4356 14.4873 16.523 14.731 16.5176 15.0274C16.5133 15.2657 16.4411 15.4887 16.3534 15.6648C16.2658 15.8409 16.1313 16.0329 15.9439 16.18C15.6939 16.3762 15.4217 16.4457 15.1799 16.4747C14.9678 16.5001 14.7164 16.5001 14.4636 16.5C14.4523 16.5 14.4409 16.5 14.4295 16.5H4.57031C4.55892 16.5 4.54753 16.5 4.53613 16.5C4.28339 16.5001 4.032 16.5001 3.81987 16.4747C3.57808 16.4457 3.3059 16.3762 3.05591 16.18C2.86845 16.0329 2.73403 15.8409 2.64637 15.6648C2.55871 15.4887 2.48653 15.2657 2.48217 15.0274C2.47675 14.731 2.56421 14.4873 2.68221 14.2767C2.7797 14.1026 2.91611 13.9222 3.04334 13.754C3.05009 13.7451 3.05681 13.7362 3.0635 13.7273Z" />
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -0,0 +1,12 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1700745458924"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="30191"
xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128">
<path d="M0 512a512 512 0 1 0 1024 0 512 512 0 1 0-1024 0Z" fill="#EFF2FF" p-id="30192"></path>
<path
d="M682.666667 682.666667c9.898667 9.841778-9.841778-9.841778 0 0zM338.488889 685.511111l5.688889-5.688889c-1.080889 0.682667-2.161778 2.104889-2.844445 2.844445a10.752 10.752 0 0 0-2.844444 2.844444z"
fill="#000000" p-id="30193"></path>
<path
d="M784.896 740.693333c23.324444-40.504889 10.410667-71.68-30.947556-102.798222-48.696889-36.579556-96.199111-61.212444-130.389333-21.788444 0 0-36.295111 43.064889-143.075555-57.685334C356.352 440.490667 408.462222 398.677333 408.462222 398.677333c43.178667-43.235556 15.701333-75.548444-20.48-124.416-36.238222-48.924444-72.817778-64.341333-125.326222-22.300444-101.262222 81.009778 41.528889 271.189333 113.607111 345.144889 0 0 109.738667 113.095111 178.801778 150.755555l36.920889 20.593778c52.906667 27.079111 112.469333 39.480889 154.339555 14.336 0 0 20.195556-10.353778 38.570667-42.097778z"
fill="#4D4DEE" p-id="30194"></path>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -78,7 +78,7 @@ export default function Editor({
);
return (
<Box position={'relative'} width={'full'} h={`${h}px`} cursor={'text'}>
<Box position={'relative'} width={'full'} h={`${h}px`} cursor={'text'} overflowY={'visible'}>
<LexicalComposer initialConfig={initialConfig} key={key}>
<PlainTextPlugin
contentEditable={<ContentEditable className={styles.contentEditable} />}

View File

@@ -18,10 +18,9 @@ export interface MyModalProps extends ModalContentProps {
isCentered?: boolean;
isOpen: boolean;
onClose?: () => void;
isPc?: boolean;
}
const CustomModal = ({
const MyModal = ({
isOpen,
onClose,
iconSrc,
@@ -92,4 +91,4 @@ const CustomModal = ({
);
};
export default CustomModal;
export default React.memo(MyModal);

View File

@@ -92,7 +92,8 @@ const MySelect = (
: `${width} !important`;
})()}
w={'auto'}
p={'6px'}
px={'6px'}
py={'6px'}
border={'1px solid #fff'}
boxShadow={
'0px 2px 4px rgba(161, 167, 179, 0.25), 0px 0px 1px rgba(121, 141, 159, 0.25);'

View File

@@ -0,0 +1,17 @@
import React from 'react';
import MyTooltip from '.';
import { IconProps, QuestionOutlineIcon } from '@chakra-ui/icons';
type Props = IconProps & {
label?: string;
};
const QuestionTip = ({ label, ...props }: Props) => {
return (
<MyTooltip label={label}>
<QuestionOutlineIcon {...props} />
</MyTooltip>
);
};
export default QuestionTip;

View File

@@ -1,8 +1,8 @@
import React from 'react';
import { Flex, Box } from '@chakra-ui/react';
import { Flex, Box, BoxProps } from '@chakra-ui/react';
import MyIcon from '../Icon';
type Props = {
type Props = BoxProps & {
list: {
icon?: string;
label: string | React.ReactNode;
@@ -12,7 +12,7 @@ type Props = {
onChange: (e: string) => void;
};
const RowTabs = ({ list, value, onChange }: Props) => {
const RowTabs = ({ list, value, onChange, py = '7px', px = '12px', ...props }: Props) => {
return (
<Box
display={'inline-flex'}
@@ -23,16 +23,19 @@ const RowTabs = ({ list, value, onChange }: Props) => {
borderColor={'borderColor.base'}
bg={'myGray.50'}
gap={'4px'}
{...props}
>
{list.map((item) => (
<Flex
key={item.value}
flex={'1 0 0'}
alignItems={'center'}
cursor={'pointer'}
borderRadius={'md'}
px={'12px'}
py={'7px'}
px={px}
py={py}
userSelect={'none'}
whiteSpace={'noWrap'}
{...(value === item.value
? {
bg: 'white',

Some files were not shown because too many files have changed in this diff Show More