v4.6 -1 (#459)
This commit is contained in:
@@ -1,15 +1,15 @@
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ClassifyQuestionAgentItemType } from '@/types/app';
|
||||
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { FlowNodeSpecialInputKeyEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||
import { FunctionModelItemType } from '@/types/model';
|
||||
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getCQModel } from '@/service/core/ai/model';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
@@ -88,7 +88,7 @@ ${systemPrompt}
|
||||
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
maxTokens: cqModel.maxToken
|
||||
maxTokens: cqModel.maxContext
|
||||
});
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ContextExtractAgentItemType } from '@/types/app';
|
||||
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
|
||||
import { ContextExtractEnum } from '@/constants/flow/flowField';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { FunctionModelItemType } from '@/types/model';
|
||||
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
history?: ChatItemType[];
|
||||
@@ -98,7 +98,7 @@ async function functionCall({
|
||||
];
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
maxTokens: extractModel.maxToken
|
||||
maxTokens: extractModel.maxContext
|
||||
});
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatItemType, moduleDispatchResType } from '@/types/chat';
|
||||
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { countModelPrice } from '@/service/common/bill/push';
|
||||
import { ChatModelItemType } from '@/types/model';
|
||||
import { postTextCensor } from '@/web/common/plusApi/censor';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { countModelPrice } from '@/service/support/wallet/bill/utils';
|
||||
import type { ChatModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { postTextCensor } from '@/service/common/censor';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { countMessagesTokens, sliceMessagesTB } from '@/global/common/tiktoken';
|
||||
@@ -99,7 +100,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
maxToken,
|
||||
filterMessages
|
||||
});
|
||||
// console.log(messages);
|
||||
|
||||
// FastGPT temperature range: 1~10
|
||||
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
|
||||
@@ -281,7 +281,7 @@ function getChatMessages({
|
||||
|
||||
const filterMessages = ChatContextFilter({
|
||||
messages,
|
||||
maxTokens: Math.ceil(model.maxToken - 300) // filter token. not response maxToken
|
||||
maxTokens: Math.ceil(model.maxContext - 300) // filter token. not response maxToken
|
||||
});
|
||||
|
||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||
@@ -300,13 +300,13 @@ function getMaxTokens({
|
||||
model: ChatModelItemType;
|
||||
filterMessages: ChatProps['inputs']['history'];
|
||||
}) {
|
||||
const tokensLimit = model.maxToken;
|
||||
/* count response max token */
|
||||
const tokensLimit = model.maxContext;
|
||||
|
||||
/* count response max token */
|
||||
const promptsToken = countMessagesTokens({
|
||||
messages: filterMessages
|
||||
});
|
||||
maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
|
||||
maxToken = promptsToken + model.maxResponse > tokensLimit ? tokensLimit - promptsToken : maxToken;
|
||||
|
||||
return {
|
||||
max_tokens: maxToken
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
import { PgClient } from '@/service/pg';
|
||||
import type { moduleDispatchResType } from '@/types/chat';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
||||
import { countModelPrice } from '@/service/common/bill/push';
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { countModelPrice } from '@/service/support/wallet/bill/utils';
|
||||
import type { SelectedDatasetType } from '@/types/core/dataset';
|
||||
import type {
|
||||
SearchDataResponseItemType,
|
||||
SearchDataResultItemType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import { getDatasetDataItemInfo } from '@/pages/api/core/dataset/data/getDataById';
|
||||
import { searchDatasetData } from '@/service/core/dataset/data/utils';
|
||||
|
||||
type DatasetSearchProps = ModuleDispatchProps<{
|
||||
datasets: SelectedDatasetType;
|
||||
@@ -28,7 +22,8 @@ export type KBSearchResponse = {
|
||||
|
||||
export async function dispatchDatasetSearch(props: Record<string, any>): Promise<KBSearchResponse> {
|
||||
const {
|
||||
user,
|
||||
teamId,
|
||||
tmbId,
|
||||
inputs: { datasets = [], similarity = 0.4, limit = 5, userChatInput }
|
||||
} = props as DatasetSearchProps;
|
||||
|
||||
@@ -42,34 +37,15 @@ export async function dispatchDatasetSearch(props: Record<string, any>): Promise
|
||||
|
||||
// get vector
|
||||
const vectorModel = datasets[0]?.vectorModel || global.vectorModels[0];
|
||||
const { vectors, tokenLen } = await getVector({
|
||||
|
||||
const { searchRes, tokenLen } = await searchDatasetData({
|
||||
text: userChatInput,
|
||||
model: vectorModel.model,
|
||||
input: [userChatInput]
|
||||
similarity,
|
||||
limit,
|
||||
datasetIds: datasets.map((item) => item.datasetId)
|
||||
});
|
||||
|
||||
// search kb
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL hnsw.ef_search = ${global.systemEnv.pgHNSWEfSearch || 100};
|
||||
select id, q, a, dataset_id, collection_id, (vector <#> '[${
|
||||
vectors[0]
|
||||
}]') * -1 AS score from ${PgDatasetTableName} where user_id='${
|
||||
user._id
|
||||
}' AND dataset_id IN (${datasets
|
||||
.map((item) => `'${item.datasetId}'`)
|
||||
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
|
||||
vectors[0]
|
||||
}]' limit ${limit};
|
||||
COMMIT;`
|
||||
);
|
||||
|
||||
const rows = results?.[2]?.rows as SearchDataResultItemType[];
|
||||
const collectionsData = await getDatasetDataItemInfo({ pgDataList: rows });
|
||||
const searchRes: SearchDataResponseItemType[] = collectionsData.map((item, index) => ({
|
||||
...item,
|
||||
score: rows[index].score
|
||||
}));
|
||||
|
||||
return {
|
||||
isEmpty: searchRes.length === 0 ? true : undefined,
|
||||
unEmpty: searchRes.length > 0 ? true : undefined,
|
||||
|
||||
@@ -1,12 +1,303 @@
|
||||
export * from './init/history';
|
||||
export * from './init/userChatInput';
|
||||
export * from './chat/oneapi';
|
||||
export * from './dataset/search';
|
||||
export * from './tools/answer';
|
||||
export * from './tools/http';
|
||||
export * from './tools/runApp';
|
||||
export * from './agent/classifyQuestion';
|
||||
export * from './agent/extract';
|
||||
export * from './plugin/run';
|
||||
export * from './plugin/runInput';
|
||||
export * from './plugin/runOutput';
|
||||
import { NextApiResponse } from 'next';
|
||||
import { SystemInputEnum, SystemOutputEnum } from '@/constants/app';
|
||||
import { RunningModuleItemType } from '@/types/app';
|
||||
import { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/api';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { ModuleItemType } from '@fastgpt/global/core/module/type';
|
||||
import { UserType } from '@fastgpt/global/support/user/type';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
|
||||
import { initModuleType } from '@/constants/flow';
|
||||
|
||||
import { dispatchHistory } from './init/history';
|
||||
import { dispatchChatInput } from './init/userChatInput';
|
||||
import { dispatchChatCompletion } from './chat/oneapi';
|
||||
import { dispatchDatasetSearch } from './dataset/search';
|
||||
import { dispatchAnswer } from './tools/answer';
|
||||
import { dispatchClassifyQuestion } from './agent/classifyQuestion';
|
||||
import { dispatchContentExtract } from './agent/extract';
|
||||
import { dispatchHttpRequest } from './tools/http';
|
||||
import { dispatchAppRequest } from './tools/runApp';
|
||||
import { dispatchRunPlugin } from './plugin/run';
|
||||
import { dispatchPluginInput } from './plugin/runInput';
|
||||
import { dispatchPluginOutput } from './plugin/runOutput';
|
||||
|
||||
/* running */
|
||||
export async function dispatchModules({
|
||||
res,
|
||||
chatId,
|
||||
modules,
|
||||
user,
|
||||
teamId,
|
||||
tmbId,
|
||||
params = {},
|
||||
variables = {},
|
||||
stream = false,
|
||||
detail = false
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
chatId?: string;
|
||||
modules: ModuleItemType[];
|
||||
user: UserType;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
params?: Record<string, any>;
|
||||
variables?: Record<string, any>;
|
||||
stream?: boolean;
|
||||
detail?: boolean;
|
||||
}) {
|
||||
variables = {
|
||||
...getSystemVariable({ timezone: user.timezone }),
|
||||
...variables
|
||||
};
|
||||
const runningModules = loadModules(modules, variables);
|
||||
|
||||
// let storeData: Record<string, any> = {}; // after module used
|
||||
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
|
||||
let chatAnswerText = ''; // AI answer
|
||||
let runningTime = Date.now();
|
||||
|
||||
function pushStore(
|
||||
{ inputs = [] }: RunningModuleItemType,
|
||||
{
|
||||
answerText = '',
|
||||
responseData
|
||||
}: {
|
||||
answerText?: string;
|
||||
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
|
||||
}
|
||||
) {
|
||||
const time = Date.now();
|
||||
if (responseData) {
|
||||
if (Array.isArray(responseData)) {
|
||||
chatResponse = chatResponse.concat(responseData);
|
||||
} else {
|
||||
chatResponse.push({
|
||||
...responseData,
|
||||
runningTime: +((time - runningTime) / 1000).toFixed(2)
|
||||
});
|
||||
}
|
||||
}
|
||||
runningTime = time;
|
||||
|
||||
const isResponseAnswerText =
|
||||
inputs.find((item) => item.key === SystemInputEnum.isResponseAnswerText)?.value ?? true;
|
||||
if (isResponseAnswerText) {
|
||||
chatAnswerText += answerText;
|
||||
}
|
||||
}
|
||||
function moduleInput(
|
||||
module: RunningModuleItemType,
|
||||
data: Record<string, any> = {}
|
||||
): Promise<any> {
|
||||
const checkInputFinish = () => {
|
||||
return !module.inputs.find((item: any) => item.value === undefined);
|
||||
};
|
||||
const updateInputValue = (key: string, value: any) => {
|
||||
const index = module.inputs.findIndex((item: any) => item.key === key);
|
||||
if (index === -1) return;
|
||||
module.inputs[index].value = value;
|
||||
};
|
||||
|
||||
const set = new Set();
|
||||
|
||||
return Promise.all(
|
||||
Object.entries(data).map(([key, val]: any) => {
|
||||
updateInputValue(key, val);
|
||||
|
||||
if (!set.has(module.moduleId) && checkInputFinish()) {
|
||||
set.add(module.moduleId);
|
||||
// remove switch
|
||||
updateInputValue(SystemInputEnum.switch, undefined);
|
||||
return moduleRun(module);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
function moduleOutput(
|
||||
module: RunningModuleItemType,
|
||||
result: Record<string, any> = {}
|
||||
): Promise<any> {
|
||||
pushStore(module, result);
|
||||
return Promise.all(
|
||||
module.outputs.map((outputItem) => {
|
||||
if (result[outputItem.key] === undefined) return;
|
||||
/* update output value */
|
||||
outputItem.value = result[outputItem.key];
|
||||
|
||||
/* update target */
|
||||
return Promise.all(
|
||||
outputItem.targets.map((target: any) => {
|
||||
// find module
|
||||
const targetModule = runningModules.find((item) => item.moduleId === target.moduleId);
|
||||
if (!targetModule) return;
|
||||
|
||||
return moduleInput(targetModule, { [target.key]: outputItem.value });
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
async function moduleRun(module: RunningModuleItemType): Promise<any> {
|
||||
if (res.closed) return Promise.resolve();
|
||||
|
||||
if (stream && detail && module.showStatus) {
|
||||
responseStatus({
|
||||
res,
|
||||
name: module.name,
|
||||
status: 'running'
|
||||
});
|
||||
}
|
||||
|
||||
// get fetch params
|
||||
const params: Record<string, any> = {};
|
||||
module.inputs.forEach((item: any) => {
|
||||
params[item.key] = item.value;
|
||||
});
|
||||
const props: ModuleDispatchProps<Record<string, any>> = {
|
||||
res,
|
||||
stream,
|
||||
detail,
|
||||
variables,
|
||||
outputs: module.outputs,
|
||||
user,
|
||||
teamId,
|
||||
tmbId,
|
||||
inputs: params
|
||||
};
|
||||
|
||||
const dispatchRes: Record<string, any> = await (async () => {
|
||||
const callbackMap: Record<string, Function> = {
|
||||
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
|
||||
[FlowNodeTypeEnum.questionInput]: dispatchChatInput,
|
||||
[FlowNodeTypeEnum.answerNode]: dispatchAnswer,
|
||||
[FlowNodeTypeEnum.chatNode]: dispatchChatCompletion,
|
||||
[FlowNodeTypeEnum.datasetSearchNode]: dispatchDatasetSearch,
|
||||
[FlowNodeTypeEnum.classifyQuestion]: dispatchClassifyQuestion,
|
||||
[FlowNodeTypeEnum.contentExtract]: dispatchContentExtract,
|
||||
[FlowNodeTypeEnum.httpRequest]: dispatchHttpRequest,
|
||||
[FlowNodeTypeEnum.runApp]: dispatchAppRequest,
|
||||
[FlowNodeTypeEnum.pluginModule]: dispatchRunPlugin,
|
||||
[FlowNodeTypeEnum.pluginInput]: dispatchPluginInput,
|
||||
[FlowNodeTypeEnum.pluginOutput]: dispatchPluginOutput
|
||||
};
|
||||
if (callbackMap[module.flowType]) {
|
||||
return callbackMap[module.flowType](props);
|
||||
}
|
||||
return {};
|
||||
})();
|
||||
|
||||
const formatResponseData = (() => {
|
||||
if (!dispatchRes[TaskResponseKeyEnum.responseData]) return undefined;
|
||||
if (Array.isArray(dispatchRes[TaskResponseKeyEnum.responseData]))
|
||||
return dispatchRes[TaskResponseKeyEnum.responseData];
|
||||
return {
|
||||
...dispatchRes[TaskResponseKeyEnum.responseData],
|
||||
moduleName: module.name,
|
||||
moduleType: module.flowType
|
||||
};
|
||||
})();
|
||||
|
||||
return moduleOutput(module, {
|
||||
[SystemOutputEnum.finish]: true,
|
||||
...dispatchRes,
|
||||
[TaskResponseKeyEnum.responseData]: formatResponseData
|
||||
});
|
||||
}
|
||||
|
||||
// start process width initInput
|
||||
const initModules = runningModules.filter((item) => initModuleType[item.flowType]);
|
||||
|
||||
await Promise.all(initModules.map((module) => moduleInput(module, params)));
|
||||
|
||||
// focus running pluginOutput
|
||||
const pluginOutputModule = runningModules.find(
|
||||
(item) => item.flowType === FlowNodeTypeEnum.pluginOutput
|
||||
);
|
||||
if (pluginOutputModule) {
|
||||
await moduleRun(pluginOutputModule);
|
||||
}
|
||||
|
||||
return {
|
||||
[TaskResponseKeyEnum.answerText]: chatAnswerText,
|
||||
[TaskResponseKeyEnum.responseData]: chatResponse
|
||||
};
|
||||
}
|
||||
|
||||
/* init store modules to running modules */
|
||||
function loadModules(
|
||||
modules: ModuleItemType[],
|
||||
variables: Record<string, any>
|
||||
): RunningModuleItemType[] {
|
||||
return modules.map((module) => {
|
||||
return {
|
||||
moduleId: module.moduleId,
|
||||
name: module.name,
|
||||
flowType: module.flowType,
|
||||
showStatus: module.showStatus,
|
||||
inputs: module.inputs
|
||||
.filter((item) => item.connected) // filter unconnected target input
|
||||
.map((item) => {
|
||||
if (typeof item.value !== 'string') {
|
||||
return {
|
||||
key: item.key,
|
||||
value: item.value
|
||||
};
|
||||
}
|
||||
|
||||
// variables replace
|
||||
const replacedVal = replaceVariable(item.value, variables);
|
||||
|
||||
return {
|
||||
key: item.key,
|
||||
value: replacedVal
|
||||
};
|
||||
}),
|
||||
outputs: module.outputs
|
||||
.map((item) => ({
|
||||
key: item.key,
|
||||
answer: item.key === TaskResponseKeyEnum.answerText,
|
||||
value: undefined,
|
||||
targets: item.targets
|
||||
}))
|
||||
.sort((a, b) => {
|
||||
// finish output always at last
|
||||
if (a.key === SystemOutputEnum.finish) return 1;
|
||||
if (b.key === SystemOutputEnum.finish) return -1;
|
||||
return 0;
|
||||
})
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/* sse response modules staus */
|
||||
export function responseStatus({
|
||||
res,
|
||||
status,
|
||||
name
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
status?: 'running' | 'finish';
|
||||
name?: string;
|
||||
}) {
|
||||
if (!name) return;
|
||||
responseWrite({
|
||||
res,
|
||||
event: sseResponseEventEnum.moduleStatus,
|
||||
data: JSON.stringify({
|
||||
status: 'running',
|
||||
name
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/* get system variable */
|
||||
export function getSystemVariable({ timezone }: { timezone: string }) {
|
||||
return {
|
||||
cTime: getSystemTime(timezone)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { ChatItemType } from '@/types/chat';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
export type HistoryProps = ModuleDispatchProps<{
|
||||
maxContext: number;
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||
import { dispatchModules } from '../index';
|
||||
import {
|
||||
FlowNodeSpecialInputKeyEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '@fastgpt/global/core/module/node/constant';
|
||||
import { getOnePluginDetail } from '@fastgpt/service/core/plugin/controller';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
|
||||
|
||||
type RunPluginProps = ModuleDispatchProps<{
|
||||
[FlowNodeSpecialInputKeyEnum.pluginId]: string;
|
||||
@@ -19,11 +19,6 @@ type RunPluginResponse = {
|
||||
|
||||
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
|
||||
const {
|
||||
res,
|
||||
variables,
|
||||
user,
|
||||
stream,
|
||||
detail,
|
||||
inputs: { pluginId, ...data }
|
||||
} = props;
|
||||
|
||||
@@ -31,19 +26,15 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const plugin = await getOnePluginDetail({ id: pluginId, userId: user._id });
|
||||
const plugin = await MongoPlugin.findOne({ _id: pluginId });
|
||||
if (!plugin) {
|
||||
return Promise.reject('Plugin not found');
|
||||
}
|
||||
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
res,
|
||||
...props,
|
||||
modules: plugin.modules,
|
||||
user,
|
||||
variables,
|
||||
params: data,
|
||||
stream,
|
||||
detail
|
||||
params: data
|
||||
});
|
||||
|
||||
const output = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
|
||||
export type PluginOutputProps = ModuleDispatchProps<{
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { HttpPropsEnum } from '@/constants/flow/flowField';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
export type HttpRequestProps = ModuleDispatchProps<{
|
||||
[HttpPropsEnum.url]: string;
|
||||
@@ -14,13 +14,15 @@ export type HttpResponse = {
|
||||
|
||||
export const dispatchHttpRequest = async (props: Record<string, any>): Promise<HttpResponse> => {
|
||||
const {
|
||||
chatId,
|
||||
variables,
|
||||
inputs: { url, ...body }
|
||||
} = props as HttpRequestProps;
|
||||
|
||||
const requestBody = {
|
||||
variables,
|
||||
...body
|
||||
...body,
|
||||
chatId,
|
||||
variables
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
|
||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||
import { App } from '@/service/mongo';
|
||||
import { dispatchModules } from '../index';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
@@ -18,21 +19,20 @@ type Response = {
|
||||
[TaskResponseKeyEnum.history]: ChatItemType[];
|
||||
};
|
||||
|
||||
export const dispatchAppRequest = async (props: Record<string, any>): Promise<Response> => {
|
||||
export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
const {
|
||||
res,
|
||||
variables,
|
||||
user,
|
||||
stream,
|
||||
detail,
|
||||
inputs: { userChatInput, history = [], app }
|
||||
} = props as Props;
|
||||
} = props;
|
||||
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const appData = await App.findOne({
|
||||
const appData = await MongoApp.findOne({
|
||||
_id: app.id,
|
||||
userId: user._id
|
||||
});
|
||||
@@ -52,16 +52,12 @@ export const dispatchAppRequest = async (props: Record<string, any>): Promise<Re
|
||||
}
|
||||
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
res,
|
||||
...props,
|
||||
modules: appData.modules,
|
||||
user,
|
||||
variables,
|
||||
params: {
|
||||
history,
|
||||
userChatInput
|
||||
},
|
||||
stream,
|
||||
detail
|
||||
}
|
||||
});
|
||||
|
||||
const completeMessages = history.concat([
|
||||
|
||||
Reference in New Issue
Block a user