This commit is contained in:
Archer
2023-10-17 10:00:32 +08:00
committed by GitHub
parent dd8f2744bf
commit 3b776b6639
98 changed files with 1525 additions and 983 deletions

View File

@@ -1,6 +1,5 @@
import { SystemInputEnum } from '@/constants/app';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { getChatModel } from '@/service/utils/data';
import { AppModuleItemType, VariableItemType } from '@/types/app';
export const getGuideModule = (modules: AppModuleItemType[]) =>
@@ -23,11 +22,3 @@ export const splitGuideModule = (guideModules?: AppModuleItemType) => {
questionGuide
};
};
export const getChatModelNameList = (modules: AppModuleItemType[]): string[] => {
const chatModules = modules.filter((item) => item.flowType === FlowModuleTypeEnum.chatNode);
return chatModules
.map(
(item) => getChatModel(item.inputs.find((input) => input.key === 'model')?.value)?.name || ''
)
.filter((item) => item);
};

View File

@@ -62,7 +62,9 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
[]
);
const formatSource = source.replace(/\\n/g, '\n ');
const formatSource = source
.replace(/\\n/g, '\n ')
.replace(/(http[s]?:\/\/[^\s。]+)([。,])/g, '$1 $2');
return (
<ReactMarkdown

View File

@@ -35,8 +35,6 @@ const MyModal = ({
>
<ModalOverlay />
<ModalContent
display={'flex'}
flexDirection={'column'}
w={w}
minW={['90vw', '400px']}
maxW={maxW}
@@ -46,7 +44,7 @@ const MyModal = ({
>
{!!title && <ModalHeader>{title}</ModalHeader>}
{onClose && <ModalCloseButton />}
<Box overflow={'overlay'} h={'100%'}>
<Box overflow={'overlay'} h={'100%'} display={'flex'} flexDirection={'column'}>
{children}
</Box>
</ModalContent>

View File

@@ -0,0 +1,64 @@
import React, { useState } from 'react';
import MyModal from '../MyModal';
import { Box, Button, Grid, useTheme } from '@chakra-ui/react';
import { PromptTemplateItem } from '@fastgpt/core/ai/type';
import { ModalBody, ModalFooter } from '@chakra-ui/react';
const PromptTemplate = ({
title,
templates,
onClose,
onSuccess
}: {
title: string;
templates: PromptTemplateItem[];
onClose: () => void;
onSuccess: (e: string) => void;
}) => {
const theme = useTheme();
const [selectTemplateTitle, setSelectTemplateTitle] = useState<PromptTemplateItem>();
return (
<MyModal isOpen title={title} onClose={onClose}>
<ModalBody w={'600px'}>
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gridGap={4}>
{templates.map((item) => (
<Box
key={item.title}
border={theme.borders.base}
py={2}
px={2}
borderRadius={'md'}
cursor={'pointer'}
{...(item.title === selectTemplateTitle?.title
? {
bg: 'myBlue.100'
}
: {})}
onClick={() => setSelectTemplateTitle(item)}
>
<Box>{item.title}</Box>
<Box color={'myGray.600'} fontSize={'sm'} whiteSpace={'pre-wrap'}>
{item.value}
</Box>
</Box>
))}
</Grid>
</ModalBody>
<ModalFooter>
<Button
disabled={!selectTemplateTitle}
onClick={() => {
if (!selectTemplateTitle) return;
onSuccess(selectTemplateTitle.value);
onClose();
}}
>
</Button>
</ModalFooter>
</MyModal>
);
};
export default PromptTemplate;

View File

@@ -5,7 +5,8 @@ export enum SystemInputEnum {
'switch' = 'switch', // a trigger switch
'history' = 'history',
'userChatInput' = 'userChatInput',
'questionGuide' = 'questionGuide'
'questionGuide' = 'questionGuide',
isResponseAnswerText = 'isResponseAnswerText'
}
export enum SystemOutputEnum {
finish = 'finish'

View File

@@ -9,7 +9,7 @@ import {
} from './index';
import type { AppItemType } from '@/types/app';
import type { FlowModuleTemplateType } from '@/types/core/app/flow';
import { chatModelList } from '@/web/common/store/static';
import { chatModelList, cqModelList } from '@/web/common/store/static';
import {
Input_Template_History,
Input_Template_TFSwitch,
@@ -136,14 +136,14 @@ export const ChatModule: FlowModuleTemplateType = {
key: 'model',
type: FlowInputItemTypeEnum.selectChatModel,
label: '对话模型',
value: chatModelList[0]?.model,
list: chatModelList.map((item) => ({ label: item.name, value: item.model })),
value: chatModelList?.[0]?.model,
customData: () => chatModelList,
required: true,
valueCheck: (val) => !!val
},
{
key: 'temperature',
type: FlowInputItemTypeEnum.slider,
type: FlowInputItemTypeEnum.hidden,
label: '温度',
value: 0,
min: 0,
@@ -156,20 +156,26 @@ export const ChatModule: FlowModuleTemplateType = {
},
{
key: 'maxToken',
type: FlowInputItemTypeEnum.maxToken,
type: FlowInputItemTypeEnum.hidden,
label: '回复上限',
value: chatModelList[0] ? chatModelList[0].contextMaxToken / 2 : 2000,
value: chatModelList?.[0] ? chatModelList[0].maxToken / 2 : 2000,
min: 100,
max: chatModelList[0]?.contextMaxToken || 4000,
max: chatModelList?.[0]?.maxToken || 4000,
step: 50,
markList: [
{ label: '100', value: 100 },
{
label: `${chatModelList[0]?.contextMaxToken || 4000}`,
value: chatModelList[0]?.contextMaxToken || 4000
label: `${chatModelList?.[0]?.maxToken || 4000}`,
value: chatModelList?.[0]?.maxToken || 4000
}
]
},
{
key: 'aiSettings',
type: FlowInputItemTypeEnum.aiSettings,
label: '',
connected: false
},
{
key: 'systemPrompt',
type: FlowInputItemTypeEnum.textarea,
@@ -180,6 +186,13 @@ export const ChatModule: FlowModuleTemplateType = {
placeholder: ChatModelSystemTip,
value: ''
},
{
key: SystemInputEnum.isResponseAnswerText,
type: FlowInputItemTypeEnum.hidden,
label: '返回AI内容',
valueType: FlowValueTypeEnum.boolean,
value: true
},
{
key: 'quoteTemplate',
type: FlowInputItemTypeEnum.hidden,
@@ -196,7 +209,7 @@ export const ChatModule: FlowModuleTemplateType = {
},
{
key: 'quoteQA',
type: FlowInputItemTypeEnum.quoteList,
type: FlowInputItemTypeEnum.target,
label: '引用内容',
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
valueType: FlowValueTypeEnum.kbQuote,
@@ -216,7 +229,7 @@ export const ChatModule: FlowModuleTemplateType = {
},
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
label: 'AI回复',
description: '将在 stream 回复完毕后触发',
valueType: FlowValueTypeEnum.string,
type: FlowOutputItemTypeEnum.source,
@@ -330,12 +343,21 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
showStatus: true,
inputs: [
Input_Template_TFSwitch,
{
key: 'model',
type: FlowInputItemTypeEnum.selectChatModel,
label: '分类模型',
value: cqModelList?.[0]?.model,
customData: () => cqModelList,
required: true,
valueCheck: (val) => !!val
},
{
key: 'systemPrompt',
type: FlowInputItemTypeEnum.textarea,
valueType: FlowValueTypeEnum.string,
value: '',
label: '系统提示词',
label: '背景知识',
description:
'你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统'
@@ -504,7 +526,7 @@ export const AppModule: FlowModuleTemplateType = {
},
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
label: 'AI回复',
description: '将在应用完全结束后触发',
valueType: FlowValueTypeEnum.string,
type: FlowOutputItemTypeEnum.source,
@@ -757,7 +779,7 @@ export const appTemplates: (AppItemType & {
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []
@@ -1094,7 +1116,7 @@ export const appTemplates: (AppItemType & {
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []
@@ -1401,7 +1423,7 @@ export const appTemplates: (AppItemType & {
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '将在 stream 回复完毕后触发',
valueType: 'string',
type: 'source',
@@ -1863,7 +1885,7 @@ export const appTemplates: (AppItemType & {
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '将在 stream 回复完毕后触发',
valueType: 'string',
type: 'source',

View File

@@ -13,7 +13,7 @@ export enum FlowInputItemTypeEnum {
chatInput = 'chatInput',
selectApp = 'selectApp',
// chat special input
quoteList = 'quoteList',
aiSettings = 'aiSettings',
maxToken = 'maxToken',
selectChatModel = 'selectChatModel',
// dataset special input

View File

@@ -1,5 +1,98 @@
import type { AppSchema } from '@/types/mongoSchema';
import type { OutLinkEditType } from '@fastgpt/support/outLink/type.d';
import type {
LLMModelItemType,
ChatModelItemType,
FunctionModelItemType,
VectorModelItemType
} from '@/types/model';
export const defaultChatModels: ChatModelItemType[] = [
{
model: 'gpt-3.5-turbo',
name: 'GPT35-4k',
price: 0,
maxToken: 4000,
quoteMaxToken: 2000,
maxTemperature: 1.2,
censor: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
quoteMaxToken: 8000,
maxTemperature: 1.2,
censor: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-4',
name: 'GPT4-8k',
maxToken: 8000,
price: 0,
quoteMaxToken: 4000,
maxTemperature: 1.2,
censor: false,
defaultSystemChatPrompt: ''
}
];
export const defaultQAModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0
}
];
export const defaultCQModels: FunctionModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
functionCall: true,
functionPrompt: ''
},
{
model: 'gpt-4',
name: 'GPT4-8k',
maxToken: 8000,
price: 0,
functionCall: true,
functionPrompt: ''
}
];
export const defaultExtractModels: FunctionModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
functionCall: true,
functionPrompt: ''
}
];
export const defaultQGModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo',
name: 'GPT35-4K',
maxToken: 4000,
price: 0
}
];
export const defaultVectorModels: VectorModelItemType[] = [
{
model: 'text-embedding-ada-002',
name: 'Embedding-2',
price: 0,
defaultToken: 500,
maxToken: 3000
}
];
export const defaultApp: AppSchema = {
_id: '',

View File

@@ -1,14 +1,17 @@
import {
type QAModelItemType,
type ChatModelItemType,
type VectorModelItemType,
FunctionModelItemType
import type {
ChatModelItemType,
FunctionModelItemType,
LLMModelItemType,
VectorModelItemType
} from '@/types/model';
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
export type InitDateResponse = {
chatModels: ChatModelItemType[];
qaModel: QAModelItemType;
qaModels: LLMModelItemType[];
cqModels: FunctionModelItemType[];
extractModels: FunctionModelItemType[];
qgModels: LLMModelItemType[];
vectorModels: VectorModelItemType[];
feConfigs: FeConfigsType;
priceMd: string;

View File

@@ -1,5 +1,23 @@
export const defaultQuoteTemplate = `{instruction:"{{q}}",output:"{{a}}"}`;
export const defaultQuotePrompt = `你的背景知识:
import { PromptTemplateItem } from '@fastgpt/core/ai/type.d';
export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
{
title: '标准模板',
desc: '包含 q 和 a 两个变量的标准模板',
value: `{instruction:"{{q}}",output:"{{a}}"}`
},
{
title: '全部变量',
desc: '包含 q 和 a 两个变量的标准模板',
value: `{instruction:"{{q}}",output:"{{a}}",source:"{{source}}",file_id:"{{file_id}}",index:"{{index}}"}`
}
];
export const Prompt_QuotePromptList: PromptTemplateItem[] = [
{
title: '标准模式',
desc: '',
value: `你的背景知识:
"""
{{quote}}
"""
@@ -7,4 +25,19 @@ export const defaultQuotePrompt = `你的背景知识:
1. 背景知识是最新的,其中 instruction 是相关介绍output 是预期回答或补充。
2. 使用背景知识回答问题。
3. 背景知识无法满足问题时,你需严谨的回答问题。
我的问题是:"{{question}}"`;
我的问题是:"{{question}}"`
},
{
title: '严格模式',
desc: '',
value: `你的背景知识:
"""
{{quote}}
"""
对话要求:
1. 背景知识是最新的,其中 instruction 是相关介绍output 是预期回答或补充。
2. 使用背景知识回答问题。
3. 背景知识无法满足问题时你需要回答我不清楚关于xxx的内容。
我的问题是:"{{question}}"`
}
];

View File

@@ -32,8 +32,6 @@ function Error() {
}
export async function getServerSideProps(context: any) {
console.log('[render error]: ', context);
return {
props: { ...(await serviceSideProps(context)) }
};

View File

@@ -3,7 +3,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { sseErrRes } from '@/service/response';
import { sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { pushChatBill } from '@/service/common/bill/push';
@@ -59,12 +59,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
detail: true
});
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.appStreamResponse,
data: JSON.stringify(responseData)

View File

@@ -6,7 +6,8 @@ import { authUser } from '@fastgpt/support/user/auth';
import { ChatItemType } from '@/types/chat';
import { authApp } from '@/service/utils/auth';
import type { ChatSchema } from '@/types/mongoSchema';
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
import { getGuideModule } from '@/components/ChatBox/utils';
import { getChatModelNameListByModules } from '@/service/core/app/module';
import { TaskResponseKeyEnum } from '@/constants/chat';
/* 初始化我的聊天框,需要身份验证 */
@@ -83,7 +84,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
appId,
app: {
userGuideModule: getGuideModule(app.modules),
chatModels: getChatModelNameList(app.modules),
chatModels: getChatModelNameListByModules(app.modules),
name: app.name,
avatar: app.avatar,
intro: app.intro,

View File

@@ -12,6 +12,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true, authApiKey: true });
const qaModel = global.qaModels[0];
const { _id } = await Bill.create({
userId,
appName: name,
@@ -25,7 +27,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
},
{
moduleName: 'QA 拆分',
model: global.qaModel.name,
model: qaModel?.name,
amount: 0,
tokenLen: 0
}

View File

@@ -4,7 +4,6 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import type { CreateQuestionGuideParams } from '@/global/core/api/aiReq.d';
import { pushQuestionGuideBill } from '@/service/common/bill/push';
import { defaultQGModel } from '@/pages/api/system/getInitData';
import { createQuestionGuide } from '@fastgpt/core/ai/functions/createQuestionGuide';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -23,9 +22,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('user not found');
}
const qgModel = global.qgModels[0];
const { result, tokens } = await createQuestionGuide({
messages,
model: (global.qgModel || defaultQGModel).model
model: qgModel.model
});
jsonRes(res, {

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import type { DatasetsItemType } from '@/types/core/dataset';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { PgDatasetTableName } from '@/constants/plugin';
import { connectToDatabase } from '@/service/mongo';

View File

@@ -8,7 +8,7 @@ import { findAllChildrenIds } from '../delete';
import QueryStream from 'pg-query-stream';
import { PgClient } from '@/service/pg';
import { addLog } from '@/service/utils/tools';
import { responseWriteController } from '@/service/common/stream';
import { responseWriteController } from '@fastgpt/common/tools/stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {

View File

@@ -7,10 +7,10 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authDataset } from '@/service/utils/auth';
import { authUser } from '@fastgpt/support/user/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { PgDatasetTableName } from '@/constants/plugin';
import { insertData2Dataset, PgClient } from '@/service/pg';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';

View File

@@ -5,15 +5,15 @@ import { connectToDatabase, TrainingData } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { authUser } from '@fastgpt/support/user/auth';
import { authDataset } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { getVectorModel } from '@/service/utils/data';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
import type { PushDataProps } from '@/global/core/api/datasetReq.d';
import { authFileIdValid } from '@/service/dataset/auth';
import { getVectorModel } from '@/service/core/ai/model';
const modeMap = {
[TrainingModeEnum.index]: true,
@@ -71,7 +71,7 @@ export async function pushDataToKb({
if (mode === TrainingModeEnum.index) {
const vectorModel = (await MongoDataset.findById(kbId, 'vectorModel'))?.vectorModel;
return getVectorModel(vectorModel || global.vectorModels[0].model);
return getVectorModel(vectorModel);
}
return global.vectorModels[0];
})()
@@ -79,7 +79,7 @@ export async function pushDataToKb({
const modeMaxToken = {
[TrainingModeEnum.index]: vectorModel.maxToken * 1.5,
[TrainingModeEnum.qa]: global.qaModel.maxToken * 0.8
[TrainingModeEnum.qa]: global.qaModels[0].maxToken * 0.8
};
// filter repeat or equal content

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { connectToDatabase } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { getVector } from '@/pages/api/openapi/plugin/vector';

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import type { DatasetsItemType } from '@/types/core/dataset';
import { DatasetTypeEnum } from '@fastgpt/core/dataset/constant';
import { MongoDataset } from '@fastgpt/core/dataset/schema';

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { getVector } from '../../openapi/plugin/vector';
import { PgDatasetTableName } from '@/constants/plugin';
import { MongoDataset } from '@fastgpt/core/dataset/schema';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authBalanceByUid, authUser } from '@fastgpt/support/user/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { getAIApi } from '@fastgpt/core/ai/config';
import { pushGenerateVectorBill } from '@/service/common/bill/push';
import { connectToDatabase } from '@/service/mongo';

View File

@@ -1,5 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import ChatCompletion from '@/pages/api/v1/chat/completions';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -6,8 +6,9 @@ import { MongoUser } from '@fastgpt/support/user/schema';
import type { InitShareChatResponse } from '@/global/support/api/outLinkRes.d';
import { authApp } from '@/service/utils/auth';
import { HUMAN_ICON } from '@/constants/chat';
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
import { getGuideModule } from '@/components/ChatBox/utils';
import { authShareChatInit } from '@fastgpt/support/outLink/auth';
import { getChatModelNameListByModules } from '@/service/core/app/module';
/* init share chat window */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -51,7 +52,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
userAvatar: user?.avatar || HUMAN_ICON,
app: {
userGuideModule: getGuideModule(app.modules),
chatModels: getChatModelNameList(app.modules),
chatModels: getChatModelNameListByModules(app.modules),
name: app.name,
avatar: app.avatar,
intro: app.intro

View File

@@ -4,10 +4,23 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { readFileSync } from 'fs';
import type { InitDateResponse } from '@/global/common/api/systemRes';
import type { VectorModelItemType, FunctionModelItemType } from '@/types/model';
import { formatPrice } from '@fastgpt/common/bill';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
import { initHttpAgent } from '@fastgpt/core/init';
import {
defaultChatModels,
defaultQAModels,
defaultCQModels,
defaultExtractModels,
defaultQGModels,
defaultVectorModels
} from '@/constants/model';
import {
ChatModelItemType,
FunctionModelItemType,
LLMModelItemType,
VectorModelItemType
} from '@/types/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
getInitConfig();
@@ -17,7 +30,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
data: {
feConfigs: global.feConfigs,
chatModels: global.chatModels,
qaModel: global.qaModel,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
priceMd: global.priceMd,
systemVersion: global.systemVersion || '0.0.0'
@@ -42,72 +58,6 @@ const defaultFeConfigs: FeConfigsType = {
},
scripts: []
};
const defaultChatModels = [
{
model: 'gpt-3.5-turbo',
name: 'GPT35-4k',
contextMaxToken: 4000,
quoteMaxToken: 2400,
maxTemperature: 1.2,
price: 0
},
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
contextMaxToken: 16000,
quoteMaxToken: 8000,
maxTemperature: 1.2,
price: 0
},
{
model: 'gpt-4',
name: 'GPT4-8k',
contextMaxToken: 8000,
quoteMaxToken: 4000,
maxTemperature: 1.2,
price: 0
}
];
const defaultQAModel = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0
};
export const defaultExtractModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
prompt: '',
functionCall: true
};
export const defaultCQModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
prompt: '',
functionCall: true
};
export const defaultQGModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo',
name: 'FastAI-4k',
maxToken: 4000,
price: 1.5,
prompt: '',
functionCall: false
};
const defaultVectorModels: VectorModelItemType[] = [
{
model: 'text-embedding-ada-002',
name: 'Embedding-2',
price: 0,
defaultToken: 500,
maxToken: 3000
}
];
export function initGlobal() {
// init tikToken
@@ -127,7 +77,16 @@ export function getInitConfig() {
const filename =
process.env.NODE_ENV === 'development' ? 'data/config.local.json' : '/app/data/config.json';
const res = JSON.parse(readFileSync(filename, 'utf-8'));
const res = JSON.parse(readFileSync(filename, 'utf-8')) as {
FeConfig: FeConfigsType;
SystemParams: SystemEnvType;
ChatModels: ChatModelItemType[];
QAModels: LLMModelItemType[];
CQModels: FunctionModelItemType[];
ExtractModels: FunctionModelItemType[];
QGModels: LLMModelItemType[];
VectorModels: VectorModelItemType[];
};
console.log(`System Version: ${global.systemVersion}`);
@@ -137,11 +96,13 @@ export function getInitConfig() {
? { ...defaultSystemEnv, ...res.SystemParams }
: defaultSystemEnv;
global.feConfigs = res.FeConfig ? { ...defaultFeConfigs, ...res.FeConfig } : defaultFeConfigs;
global.chatModels = res.ChatModels || defaultChatModels;
global.qaModel = res.QAModel || defaultQAModel;
global.extractModel = res.ExtractModel || defaultExtractModel;
global.cqModel = res.CQModel || defaultCQModel;
global.qgModel = res.QGModel || defaultQGModel;
global.qaModels = res.QAModels || defaultQAModels;
global.cqModels = res.CQModels || defaultCQModels;
global.extractModels = res.ExtractModels || defaultExtractModels;
global.qgModels = res.QGModels || defaultQGModels;
global.vectorModels = res.VectorModels || defaultVectorModels;
} catch (error) {
setDefaultData();
@@ -152,13 +113,27 @@ export function getInitConfig() {
export function setDefaultData() {
global.systemEnv = defaultSystemEnv;
global.feConfigs = defaultFeConfigs;
global.chatModels = defaultChatModels;
global.qaModel = defaultQAModel;
global.qaModels = defaultQAModels;
global.cqModels = defaultCQModels;
global.extractModels = defaultExtractModels;
global.qgModels = defaultQGModels;
global.vectorModels = defaultVectorModels;
global.extractModel = defaultExtractModel;
global.cqModel = defaultCQModel;
global.qgModel = defaultQGModel;
global.priceMd = '';
console.log('use default config');
console.log({
feConfigs: defaultFeConfigs,
systemEnv: defaultSystemEnv,
chatModels: defaultChatModels,
qaModels: defaultQAModels,
cqModels: defaultCQModels,
extractModels: defaultExtractModels,
qgModels: defaultQGModels,
vectorModels: defaultVectorModels
});
}
export function getSystemVersion() {
@@ -187,10 +162,18 @@ ${global.vectorModels
${global.chatModels
?.map((item) => `| 对话-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
| 文件QA拆分 | ${formatPrice(global.qaModel?.price, 1000)} |
| 高级编排 - 问题分类 | ${formatPrice(global.cqModel?.price, 1000)} |
| 高级编排 - 内容提取 | ${formatPrice(global.extractModel?.price, 1000)} |
| 下一步指引 | ${formatPrice(global.qgModel?.price, 1000)} |
${global.qaModels
?.map((item) => `| 文件QA拆分-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.cqModels
?.map((item) => `| 问题分类-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.extractModels
?.map((item) => `| 内容提取-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.qgModels
?.map((item) => `| 下一步指引-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
`;
console.log(global.priceMd);
}

View File

@@ -2,8 +2,8 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { MongoUser } from '@fastgpt/support/user/schema';
import { setCookie } from '@/service/utils/tools';
import { generateToken } from '@fastgpt/support/user/tools';
import { setCookie } from '@fastgpt/support/user/auth';
import { generateToken } from '@fastgpt/support/user/auth';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -1,7 +1,7 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { clearCookie } from '@/service/utils/tools';
import { clearCookie } from '@fastgpt/support/user/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {

View File

@@ -3,7 +3,8 @@ import { authApp } from '@/service/utils/auth';
import { authUser } from '@fastgpt/support/user/auth';
import { AuthUserTypeEnum } from '@fastgpt/support/user/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { addLog, withNextCors } from '@/service/utils/tools';
import { addLog } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
import {
dispatchHistory,
@@ -21,7 +22,7 @@ import type { MessageItemType } from '@/types/core/chat/type';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/service/utils/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
@@ -217,7 +218,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const feResponseData = isOwner ? responseData : selectShareResponse({ responseData });
if (stream) {
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
@@ -225,14 +226,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
finish_reason: 'stop'
})
});
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: '[DONE]'
});
if (responseDetail && detail) {
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.appStreamResponse,
data: JSON.stringify(feResponseData)
@@ -323,13 +324,16 @@ export async function dispatchModules({
let chatAnswerText = ''; // AI answer
let runningTime = Date.now();
function pushStore({
answerText = '',
responseData
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
}) {
function pushStore(
{ inputs = [] }: RunningModuleItemType,
{
answerText = '',
responseData
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
}
) {
const time = Date.now();
if (responseData) {
if (Array.isArray(responseData)) {
@@ -342,7 +346,12 @@ export async function dispatchModules({
}
}
runningTime = time;
chatAnswerText += answerText;
const isResponseAnswerText =
inputs.find((item) => item.key === SystemInputEnum.isResponseAnswerText)?.value ?? true;
if (isResponseAnswerText) {
chatAnswerText += answerText;
}
}
function moduleInput(
module: RunningModuleItemType,
@@ -376,7 +385,7 @@ export async function dispatchModules({
module: RunningModuleItemType,
result: Record<string, any> = {}
): Promise<any> {
pushStore(result);
pushStore(module, result);
return Promise.all(
module.outputs.map((outputItem) => {
if (result[outputItem.key] === undefined) return;
@@ -505,7 +514,7 @@ export function responseStatus({
name?: string;
}) {
if (!name) return;
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.moduleStatus,
data: JSON.stringify({

View File

@@ -1,4 +1,4 @@
import React from 'react';
import React, { useMemo, useState } from 'react';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'react-i18next';
import { EditFormType } from '@/utils/app';
@@ -11,43 +11,65 @@ import {
Link,
ModalBody,
ModalFooter,
Switch,
Textarea
} from '@chakra-ui/react';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/global/core/prompt/AIChat';
import { feConfigs } from '@/web/common/store/static';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
import { chatModelList, feConfigs } from '@/web/common/store/static';
import MySlider from '@/components/Slider';
import { SystemInputEnum } from '@/constants/app';
import dynamic from 'next/dynamic';
import { PromptTemplateItem } from '@fastgpt/core/ai/type';
const PromptTemplate = dynamic(() => import('@/components/PromptTemplate'));
const AIChatSettingsModal = ({
isAdEdit,
onClose,
onSuccess,
defaultData
}: {
isAdEdit?: boolean;
onClose: () => void;
onSuccess: (e: EditFormType['chatModel']) => void;
defaultData: EditFormType['chatModel'];
}) => {
const { t } = useTranslation();
const [refresh, setRefresh] = useState(false);
const { register, handleSubmit } = useForm({
const { register, handleSubmit, getValues, setValue } = useForm({
defaultValues: defaultData
});
const [selectTemplateData, setSelectTemplateData] = useState<{
title: string;
key: 'quoteTemplate' | 'quotePrompt';
templates: PromptTemplateItem[];
}>();
const tokenLimit = useMemo(() => {
return chatModelList.find((item) => item.model === getValues('model'))?.maxToken || 4000;
}, [getValues, refresh]);
const LabelStyles: BoxProps = {
fontWeight: 'bold',
mb: 1,
fontSize: ['sm', 'md']
};
const selectTemplateBtn: BoxProps = {
color: 'myBlue.600',
cursor: 'pointer'
};
return (
<MyModal
isOpen
title={
<Flex alignItems={'flex-end'}>
{t('app.Quote Prompt Settings')}
{t('app.AI Settings')}
{feConfigs?.show_doc && (
<Link
href={'https://doc.fastgpt.run/docs/use-cases/prompt/'}
href={'https://doc.fastgpt.run/docs/use-cases/ai_settings/'}
target={'_blank'}
ml={1}
textDecoration={'underline'}
@@ -59,39 +81,134 @@ const AIChatSettingsModal = ({
)}
</Flex>
}
isCentered
w={'700px'}
h={['90vh', 'auto']}
>
<ModalBody>
<ModalBody flex={['1 0 0', 'auto']} overflowY={'auto'}>
{isAdEdit && (
<Flex alignItems={'center'}>
<Box {...LabelStyles} w={'80px'}>
AI内容
</Box>
<Box flex={1} ml={'10px'}>
<Switch
isChecked={getValues(SystemInputEnum.isResponseAnswerText)}
size={'lg'}
onChange={(e) => {
const value = e.target.checked;
setValue(SystemInputEnum.isResponseAnswerText, value);
setRefresh((state) => !state);
}}
/>
</Box>
</Flex>
)}
<Flex alignItems={'center'} mb={10} mt={isAdEdit ? 8 : 5}>
<Box {...LabelStyles} mr={2} w={'80px'}>
</Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: '严谨', value: 0 },
{ label: '发散', value: 10 }
]}
width={'95%'}
min={0}
max={10}
value={getValues('temperature')}
onChange={(e) => {
setValue('temperature', e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex alignItems={'center'} mt={12} mb={10}>
<Box {...LabelStyles} mr={2} w={'80px'}>
</Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: `${tokenLimit}`, value: tokenLimit }
]}
width={'95%'}
min={100}
max={tokenLimit}
step={50}
value={getValues('maxToken')}
onChange={(val) => {
setValue('maxToken', val);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Box>
<Box {...LabelStyles}>
<Flex {...LabelStyles} mb={1}>
<MyTooltip
label={t('template.Quote Content Tip', { default: defaultQuoteTemplate })}
label={t('template.Quote Content Tip', {
default: Prompt_QuoteTemplateList[0].value
})}
forceShow
>
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
</MyTooltip>
</Box>
<Box flex={1} />
<Box
{...selectTemplateBtn}
onClick={() =>
setSelectTemplateData({
title: '选择引用内容模板',
key: 'quoteTemplate',
templates: Prompt_QuoteTemplateList
})
}
>
</Box>
</Flex>
<Textarea
rows={4}
placeholder={t('template.Quote Content Tip', { default: defaultQuoteTemplate }) || ''}
rows={6}
placeholder={
t('template.Quote Content Tip', { default: Prompt_QuoteTemplateList[0].value }) || ''
}
borderColor={'myGray.100'}
{...register('quoteTemplate')}
/>
</Box>
<Box mt={4}>
<Box {...LabelStyles}>
<Flex {...LabelStyles} mb={1}>
<MyTooltip
label={t('template.Quote Prompt Tip', { default: defaultQuotePrompt })}
label={t('template.Quote Prompt Tip', { default: Prompt_QuotePromptList[0].value })}
forceShow
>
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
</MyTooltip>
</Box>
<Box flex={1} />
<Box
{...selectTemplateBtn}
onClick={() =>
setSelectTemplateData({
title: '选择引用提示词模板',
key: 'quotePrompt',
templates: Prompt_QuotePromptList
})
}
>
</Box>
</Flex>
<Textarea
rows={6}
placeholder={t('template.Quote Prompt Tip', { default: defaultQuotePrompt }) || ''}
rows={11}
placeholder={
t('template.Quote Prompt Tip', { default: Prompt_QuotePromptList[0].value }) || ''
}
borderColor={'myGray.100'}
{...register('quotePrompt')}
/>
@@ -105,6 +222,14 @@ const AIChatSettingsModal = ({
{t('Confirm')}
</Button>
</ModalFooter>
{!!selectTemplateData && (
<PromptTemplate
title={selectTemplateData.title}
templates={selectTemplateData.templates}
onClose={() => setSelectTemplateData(undefined)}
onSuccess={(e) => setValue(selectTemplateData.key, e)}
/>
)}
</MyModal>
);
};

View File

@@ -0,0 +1,229 @@
import React, { useCallback, useRef, useState } from 'react';
import { Box, Flex, IconButton, useTheme, useDisclosure } from '@chakra-ui/react';
import { SmallCloseIcon } from '@chakra-ui/icons';
import { FlowInputItemTypeEnum } from '@/constants/flow';
import { FlowOutputTargetItemType } from '@/types/core/app/flow';
import { AppModuleItemType } from '@/types/app';
import { useRequest } from '@/web/common/hooks/useRequest';
import type { AppSchema } from '@/types/mongoSchema';
import { useUserStore } from '@/web/support/store/user';
import { useTranslation } from 'next-i18next';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { AppTypeEnum } from '@/constants/app';
import dynamic from 'next/dynamic';
import MyIcon from '@/components/Icon';
import MyTooltip from '@/components/MyTooltip';
import ChatTest, { type ChatTestComponentRef } from './ChatTest';
import { useFlowStore } from './Provider';
const ImportSettings = dynamic(() => import('./ImportSettings'));
type Props = { app: AppSchema; onCloseSettings: () => void };
const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
app,
ChatTestRef,
testModules,
setTestModules,
onCloseSettings
}: Props & {
ChatTestRef: React.RefObject<ChatTestComponentRef>;
testModules?: AppModuleItemType[];
setTestModules: React.Dispatch<AppModuleItemType[] | undefined>;
}) {
const theme = useTheme();
const { t } = useTranslation();
const { copyData } = useCopyData();
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
const { updateAppDetail } = useUserStore();
const { nodes, edges, onFixView } = useFlowStore();
const flow2AppModules = useCallback(() => {
const modules: AppModuleItemType[] = nodes.map((item) => ({
moduleId: item.data.moduleId,
name: item.data.name,
flowType: item.data.flowType,
showStatus: item.data.showStatus,
position: item.position,
inputs: item.data.inputs.map((item) => ({
...item,
connected: item.connected ?? item.type !== FlowInputItemTypeEnum.target
})),
outputs: item.data.outputs.map((item) => ({
...item,
targets: [] as FlowOutputTargetItemType[]
}))
}));
// update inputs and outputs
modules.forEach((module) => {
module.inputs.forEach((input) => {
input.connected =
input.connected ||
!!edges.find(
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
);
});
module.outputs.forEach((output) => {
output.targets = edges
.filter(
(edge) =>
edge.source === module.moduleId &&
edge.sourceHandle === output.key &&
edge.targetHandle
)
.map((edge) => ({
moduleId: edge.target,
key: edge.targetHandle || ''
}));
});
});
return modules;
}, [edges, nodes]);
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
const modules = flow2AppModules();
// check required connect
for (let i = 0; i < modules.length; i++) {
const item = modules[i];
if (item.inputs.find((input) => input.required && !input.connected)) {
return Promise.reject(`${item.name}】存在未连接的必填输入`);
}
if (item.inputs.find((input) => input.valueCheck && !input.valueCheck(input.value))) {
return Promise.reject(`${item.name}】存在为填写的必填项`);
}
}
return updateAppDetail(app._id, {
modules: modules,
type: AppTypeEnum.advanced
});
},
successToast: '保存配置成功',
errorToast: '保存配置异常',
onSuccess() {
ChatTestRef.current?.resetChatTest();
}
});
return (
<>
<Flex
py={3}
px={[2, 5, 8]}
borderBottom={theme.borders.base}
alignItems={'center'}
userSelect={'none'}
>
<MyTooltip label={'返回'} offset={[10, 10]}>
<IconButton
size={'sm'}
icon={<MyIcon name={'back'} w={'14px'} />}
borderRadius={'md'}
borderColor={'myGray.300'}
variant={'base'}
aria-label={''}
onClick={() => {
onCloseSettings();
onFixView();
}}
/>
</MyTooltip>
<Box ml={[3, 6]} fontSize={['md', '2xl']} flex={1}>
{app.name}
</Box>
<MyTooltip label={t('app.Import Configs')}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'importLight'} w={['14px', '16px']} />}
borderRadius={'lg'}
variant={'base'}
aria-label={'save'}
onClick={onOpenImport}
/>
</MyTooltip>
<MyTooltip label={t('app.Export Configs')}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'export'} w={['14px', '16px']} />}
borderRadius={'lg'}
variant={'base'}
aria-label={'save'}
onClick={() =>
copyData(
JSON.stringify(flow2AppModules(), null, 2),
t('app.Export Config Successful')
)
}
/>
</MyTooltip>
{testModules ? (
<IconButton
mr={[3, 6]}
icon={<SmallCloseIcon fontSize={'25px'} />}
variant={'base'}
color={'myGray.600'}
borderRadius={'lg'}
aria-label={''}
onClick={() => setTestModules(undefined)}
/>
) : (
<MyTooltip label={'测试对话'}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'chat'} w={['14px', '16px']} />}
borderRadius={'lg'}
aria-label={'save'}
variant={'base'}
onClick={() => {
setTestModules(flow2AppModules());
}}
/>
</MyTooltip>
)}
<MyTooltip label={'保存配置'}>
<IconButton
icon={<MyIcon name={'save'} w={['14px', '16px']} />}
borderRadius={'lg'}
isLoading={isLoading}
aria-label={'save'}
onClick={onclickSave}
/>
</MyTooltip>
</Flex>
{isOpenImport && <ImportSettings onClose={onCloseImport} />}
</>
);
});
const Header = (props: Props) => {
const { app } = props;
const ChatTestRef = useRef<ChatTestComponentRef>(null);
const [testModules, setTestModules] = useState<AppModuleItemType[]>();
return (
<>
<RenderHeaderContainer
{...props}
ChatTestRef={ChatTestRef}
testModules={testModules}
setTestModules={setTestModules}
/>
<ChatTest
ref={ChatTestRef}
modules={testModules}
app={app}
onClose={() => setTestModules(undefined)}
/>
</>
);
};
export default React.memo(Header);

View File

@@ -1,4 +1,4 @@
import React, { useMemo } from 'react';
import React from 'react';
import { NodeProps } from 'reactflow';
import NodeCard from '../modules/NodeCard';
import { FlowModuleItemType } from '@/types/core/app/flow';
@@ -7,11 +7,8 @@ import Container from '../modules/Container';
import RenderInput from '../render/RenderInput';
import RenderOutput from '../render/RenderOutput';
import { useFlowStore } from '../Provider';
const NodeChat = ({ data }: NodeProps<FlowModuleItemType>) => {
const { moduleId, inputs, outputs } = data;
const { onChangeNode } = useFlowStore();
return (
<NodeCard minW={'400px'} {...data}>

View File

@@ -5,14 +5,11 @@ import {
type EdgeChange,
useNodesState,
useEdgesState,
XYPosition,
useViewport,
Connection,
addEdge
} from 'reactflow';
import type {
FlowModuleItemType,
FlowModuleTemplateType,
FlowOutputTargetItemType,
FlowModuleItemChangeProps
} from '@/types/core/app/flow';
@@ -44,7 +41,6 @@ export type useFlowStoreType = {
setEdges: Dispatch<SetStateAction<Edge<any>[]>>;
onEdgesChange: OnChange<EdgeChange>;
onFixView: () => void;
onAddNode: (e: { template: FlowModuleTemplateType; position: XYPosition }) => void;
onDelNode: (nodeId: string) => void;
onChangeNode: (e: FlowModuleItemChangeProps) => void;
onCopyNode: (nodeId: string) => void;
@@ -80,9 +76,7 @@ const StateContext = createContext<useFlowStoreType>({
onFixView: function (): void {
return;
},
onAddNode: function (e: { template: FlowModuleTemplateType; position: XYPosition }): void {
return;
},
onDelNode: function (nodeId: string): void {
return;
},
@@ -117,7 +111,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
const { toast } = useToast();
const [nodes = [], setNodes, onNodesChange] = useNodesState<FlowModuleItemType>([]);
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
const { x, y, zoom } = useViewport();
const onFixView = useCallback(() => {
const btn = document.querySelector('.react-flow__controls-fitview') as HTMLButtonElement;
@@ -205,27 +198,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
[nodes, onDelConnect, setEdges, t, toast]
);
const onAddNode = useCallback(
({ template, position }: { template: FlowModuleTemplateType; position: XYPosition }) => {
if (!reactFlowWrapper.current) return;
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
const mouseY = (position.y - reactFlowBounds.top - y) / zoom;
setNodes((state) =>
state.concat(
appModule2FlowNode({
item: {
...template,
moduleId: nanoid(),
position: { x: mouseX, y: mouseY }
}
})
)
);
},
[setNodes, x, y, zoom]
);
const onDelNode = useCallback(
(nodeId: string) => {
setNodes((state) => state.filter((item) => item.id !== nodeId));
@@ -338,7 +310,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
setEdges,
onEdgesChange,
onFixView,
onAddNode,
onDelNode,
onChangeNode,
onCopyNode,

View File

@@ -1,24 +1,20 @@
import React, { useMemo } from 'react';
import React, { useCallback, useMemo } from 'react';
import { Box, Flex } from '@chakra-ui/react';
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
import { FlowModuleItemType, FlowModuleTemplateType } from '@/types/core/app/flow';
import type { Node } from 'reactflow';
import { FlowModuleTemplateType } from '@/types/core/app/flow';
import { useViewport, XYPosition } from 'reactflow';
import { useGlobalStore } from '@/web/common/store/global';
import Avatar from '@/components/Avatar';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { useFlowStore } from './Provider';
import { customAlphabet } from 'nanoid';
import { appModule2FlowNode } from '@/utils/adapt';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
const ModuleTemplateList = ({
nodes,
isOpen,
onClose
}: {
nodes?: Node<FlowModuleItemType>[];
isOpen: boolean;
onClose: () => void;
}) => {
const { onAddNode } = useFlowStore();
const ModuleTemplateList = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
const { nodes, setNodes, reactFlowWrapper } = useFlowStore();
const { isPc } = useGlobalStore();
const { x, y, zoom } = useViewport();
const filterTemplates = useMemo(() => {
const guideModulesIndex = ModuleTemplates.findIndex((item) => item.label === '引导模块');
@@ -47,6 +43,28 @@ const ModuleTemplateList = ({
];
}, [nodes]);
const onAddNode = useCallback(
({ template, position }: { template: FlowModuleTemplateType; position: XYPosition }) => {
if (!reactFlowWrapper?.current) return;
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
const mouseY = (position.y - reactFlowBounds.top - y) / zoom;
setNodes((state) =>
state.concat(
appModule2FlowNode({
item: {
...template,
moduleId: nanoid(),
position: { x: mouseX, y: mouseY }
}
})
)
);
},
[reactFlowWrapper, setNodes, x, y, zoom]
);
return (
<>
<Box

View File

@@ -32,6 +32,7 @@ import { formatPrice } from '@fastgpt/common/bill';
import { useDatasetStore } from '@/web/core/store/dataset';
import { SelectedDatasetType } from '@/types/core/dataset';
import { useQuery } from '@tanstack/react-query';
import { LLMModelItemType } from '@/types/model';
const SetInputFieldModal = dynamic(() => import('../modules/SetInputFieldModal'));
const SelectAppModal = dynamic(() => import('../../../SelectAppModal'));
@@ -186,8 +187,8 @@ const RenderInput = ({
{item.type === FlowInputItemTypeEnum.selectApp && (
<SelectAppRender item={item} moduleId={moduleId} />
)}
{item.type === FlowInputItemTypeEnum.quoteList && (
<QuoteListRender inputs={sortInputs} item={item} moduleId={moduleId} />
{item.type === FlowInputItemTypeEnum.aiSettings && (
<AISetting inputs={sortInputs} item={item} moduleId={moduleId} />
)}
{item.type === FlowInputItemTypeEnum.maxToken && (
<MaxTokenRender inputs={sortInputs} item={item} moduleId={moduleId} />
@@ -343,7 +344,7 @@ var SliderRender = React.memo(function SliderRender({ item, moduleId }: RenderPr
);
});
var QuoteListRender = React.memo(function QuoteListRender({ inputs = [], moduleId }: RenderProps) {
var AISetting = React.memo(function AISetting({ inputs = [], moduleId }: RenderProps) {
const { onChangeNode } = useFlowStore();
const { t } = useTranslation();
const chatModulesData = useMemo(() => {
@@ -367,10 +368,11 @@ var QuoteListRender = React.memo(function QuoteListRender({ inputs = [], moduleI
leftIcon={<MyIcon name={'settingLight'} w={'14px'} />}
onClick={onOpenAIChatSetting}
>
{t('app.Quote Prompt Settings')}
{t('app.AI Settings')}
</Button>
{isOpenAIChatSetting && (
<AIChatSettingsModal
isAdEdit
onClose={onCloseAIChatSetting}
onSuccess={(e) => {
for (let key in e) {
@@ -404,7 +406,7 @@ var MaxTokenRender = React.memo(function MaxTokenRender({
const { onChangeNode } = useFlowStore();
const model = inputs.find((item) => item.key === 'model')?.value;
const modelData = chatModelList.find((item) => item.model === model);
const maxToken = modelData ? modelData.contextMaxToken : 4000;
const maxToken = modelData ? modelData.maxToken : 4000;
const markList = [
{ label: '100', value: 100 },
{ label: `${maxToken}`, value: maxToken }
@@ -441,8 +443,42 @@ var SelectChatModelRender = React.memo(function SelectChatModelRender({
moduleId
}: RenderProps) {
const { onChangeNode } = useFlowStore();
const modelList = (item.customData?.() as LLMModelItemType[]) || chatModelList || [];
const list = chatModelList.map((item) => {
function onChangeModel(e: string) {
{
onChangeNode({
moduleId,
type: 'inputs',
key: item.key,
value: {
...item,
value: e
}
});
// update max tokens
const model = modelList.find((item) => item.model === e) || modelList[0];
if (!model) return;
onChangeNode({
moduleId,
type: 'inputs',
key: 'maxToken',
value: {
...inputs.find((input) => input.key === 'maxToken'),
markList: [
{ label: '100', value: 100 },
{ label: `${model.maxToken}`, value: model.maxToken }
],
max: model.maxToken,
value: model.maxToken / 2
}
});
}
}
const list = modelList.map((item) => {
const priceStr = `(${formatPrice(item.price, 1000)}元/1k Tokens)`;
return {
@@ -451,43 +487,11 @@ var SelectChatModelRender = React.memo(function SelectChatModelRender({
};
});
return (
<MySelect
width={'100%'}
value={item.value}
list={list}
onchange={(e) => {
onChangeNode({
moduleId,
type: 'inputs',
key: item.key,
value: {
...item,
value: e
}
});
if (!item.value && list.length > 0) {
onChangeModel(list[0].value);
}
// update max tokens
const model = chatModelList.find((item) => item.model === e) || chatModelList[0];
if (!model) return;
onChangeNode({
moduleId,
type: 'inputs',
key: 'maxToken',
value: {
...inputs.find((input) => input.key === 'maxToken'),
markList: [
{ label: '100', value: 100 },
{ label: `${model.contextMaxToken}`, value: model.contextMaxToken }
],
max: model.contextMaxToken,
value: model.contextMaxToken / 2
}
});
}}
/>
);
return <MySelect width={'100%'} value={item.value} list={list} onchange={onChangeModel} />;
});
var SelectDatasetRender = React.memo(function SelectDatasetRender({ item, moduleId }: RenderProps) {

View File

@@ -25,6 +25,7 @@ import MyTooltip from '@/components/MyTooltip';
import TemplateList from './components/TemplateList';
import ChatTest, { type ChatTestComponentRef } from './components/ChatTest';
import FlowProvider, { useFlowStore } from './components/Provider';
import Header from './components/Header';
const ImportSettings = dynamic(() => import('./components/ImportSettings'));
const NodeChat = dynamic(() => import('./components/Nodes/NodeChat'));
@@ -62,187 +63,7 @@ const edgeTypes = {
};
type Props = { app: AppSchema; onCloseSettings: () => void };
function FlowHeader({ app, onCloseSettings }: Props & {}) {
const theme = useTheme();
const { t } = useTranslation();
const { copyData } = useCopyData();
const ChatTestRef = useRef<ChatTestComponentRef>(null);
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
const { updateAppDetail } = useUserStore();
const { nodes, edges, onFixView } = useFlowStore();
const [testModules, setTestModules] = useState<AppModuleItemType[]>();
const flow2AppModules = useCallback(() => {
const modules: AppModuleItemType[] = nodes.map((item) => ({
moduleId: item.data.moduleId,
name: item.data.name,
flowType: item.data.flowType,
showStatus: item.data.showStatus,
position: item.position,
inputs: item.data.inputs.map((item) => ({
...item,
connected: item.connected ?? item.type !== FlowInputItemTypeEnum.target
})),
outputs: item.data.outputs.map((item) => ({
...item,
targets: [] as FlowOutputTargetItemType[]
}))
}));
// update inputs and outputs
modules.forEach((module) => {
module.inputs.forEach((input) => {
input.connected =
input.connected ||
!!edges.find(
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
);
});
module.outputs.forEach((output) => {
output.targets = edges
.filter(
(edge) =>
edge.source === module.moduleId &&
edge.sourceHandle === output.key &&
edge.targetHandle
)
.map((edge) => ({
moduleId: edge.target,
key: edge.targetHandle || ''
}));
});
});
return modules;
}, [edges, nodes]);
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
const modules = flow2AppModules();
// check required connect
for (let i = 0; i < modules.length; i++) {
const item = modules[i];
if (item.inputs.find((input) => input.required && !input.connected)) {
return Promise.reject(`${item.name}】存在未连接的必填输入`);
}
if (item.inputs.find((input) => input.valueCheck && !input.valueCheck(input.value))) {
return Promise.reject(`${item.name}】存在为填写的必填项`);
}
}
return updateAppDetail(app._id, {
modules: modules,
type: AppTypeEnum.advanced
});
},
successToast: '保存配置成功',
errorToast: '保存配置异常',
onSuccess() {
ChatTestRef.current?.resetChatTest();
}
});
return (
<>
<Flex
py={3}
px={[2, 5, 8]}
borderBottom={theme.borders.base}
alignItems={'center'}
userSelect={'none'}
>
<MyTooltip label={'返回'} offset={[10, 10]}>
<IconButton
size={'sm'}
icon={<MyIcon name={'back'} w={'14px'} />}
borderRadius={'md'}
borderColor={'myGray.300'}
variant={'base'}
aria-label={''}
onClick={() => {
onCloseSettings();
onFixView();
}}
/>
</MyTooltip>
<Box ml={[3, 6]} fontSize={['md', '2xl']} flex={1}>
{app.name}
</Box>
<MyTooltip label={t('app.Import Configs')}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'importLight'} w={['14px', '16px']} />}
borderRadius={'lg'}
variant={'base'}
aria-label={'save'}
onClick={onOpenImport}
/>
</MyTooltip>
<MyTooltip label={t('app.Export Configs')}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'export'} w={['14px', '16px']} />}
borderRadius={'lg'}
variant={'base'}
aria-label={'save'}
onClick={() =>
copyData(
JSON.stringify(flow2AppModules(), null, 2),
t('app.Export Config Successful')
)
}
/>
</MyTooltip>
{testModules ? (
<IconButton
mr={[3, 6]}
icon={<SmallCloseIcon fontSize={'25px'} />}
variant={'base'}
color={'myGray.600'}
borderRadius={'lg'}
aria-label={''}
onClick={() => setTestModules(undefined)}
/>
) : (
<MyTooltip label={'测试对话'}>
<IconButton
mr={[3, 6]}
icon={<MyIcon name={'chat'} w={['14px', '16px']} />}
borderRadius={'lg'}
aria-label={'save'}
variant={'base'}
onClick={() => {
setTestModules(flow2AppModules());
}}
/>
</MyTooltip>
)}
<MyTooltip label={'保存配置'}>
<IconButton
icon={<MyIcon name={'save'} w={['14px', '16px']} />}
borderRadius={'lg'}
isLoading={isLoading}
aria-label={'save'}
onClick={onclickSave}
/>
</MyTooltip>
</Flex>
{isOpenImport && <ImportSettings onClose={onCloseImport} />}
<ChatTest
ref={ChatTestRef}
modules={testModules}
app={app}
onClose={() => setTestModules(undefined)}
/>
</>
);
}
const Header = React.memo(FlowHeader);
const AppEdit = (props: Props) => {
const AppEdit = React.memo(function AppEdit(props: Props) {
const { app } = props;
const {
@@ -261,7 +82,7 @@ const AppEdit = (props: Props) => {
return (
<>
{/* header */}
<Header {...props} />
<Header app={app} onCloseSettings={props.onCloseSettings} />
<Box
minH={'400px'}
flex={'1 0 0'}
@@ -318,11 +139,11 @@ const AppEdit = (props: Props) => {
<Controls position={'bottom-right'} style={{ display: 'flex' }} showInteractive={false} />
</ReactFlow>
<TemplateList isOpen={isOpenTemplate} nodes={nodes} onClose={onCloseTemplate} />
<TemplateList isOpen={isOpenTemplate} onClose={onCloseTemplate} />
</Box>
</>
);
};
});
const Flow = (data: Props) => {
return (

View File

@@ -34,7 +34,6 @@ import { chatModelList } from '@/web/common/store/static';
import { formatPrice } from '@fastgpt/common/bill/index';
import {
ChatModelSystemTip,
ChatModelLimitTip,
welcomeTextTip,
questionGuideTip
} from '@/constants/flow/ModuleTemplate';
@@ -128,12 +127,7 @@ const Settings = ({ appId }: { appId: string }) => {
label: `${item.name} (${formatPrice(item.price, 1000)} 元/1k tokens)`
}));
}, [refresh]);
const tokenLimit = useMemo(() => {
return (
chatModelList.find((item) => item.model === getValues('chatModel.model'))?.contextMaxToken ||
4000
);
}, [getValues, refresh]);
const selectedKbList = useMemo(
() => allDatasets.filter((item) => kbList.find((kb) => kb.kbId === item._id)),
[allDatasets, kbList]
@@ -411,6 +405,10 @@ const Settings = ({ appId }: { appId: string }) => {
<Box ml={2} flex={1}>
AI
</Box>
<Flex {...BoxBtnStyles} onClick={onOpenAIChatSetting}>
<MyIcon mr={1} name={'settingLight'} w={'14px'} />
</Flex>
</Flex>
<Flex alignItems={'center'} mt={5}>
@@ -424,7 +422,7 @@ const Settings = ({ appId }: { appId: string }) => {
setValue('chatModel.model', val);
const maxToken =
chatModelList.find((item) => item.model === getValues('chatModel.model'))
?.contextMaxToken || 4000;
?.maxToken || 4000;
const token = maxToken / 2;
setValue('chatModel.maxToken', token);
setRefresh(!refresh);
@@ -432,45 +430,6 @@ const Settings = ({ appId }: { appId: string }) => {
/>
</Box>
</Flex>
<Flex alignItems={'center'} my={10}>
<Box {...LabelStyles}></Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: '严谨', value: 0 },
{ label: '发散', value: 10 }
]}
width={'95%'}
min={0}
max={10}
value={getValues('chatModel.temperature')}
onChange={(e) => {
setValue('chatModel.temperature', e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex alignItems={'center'} mt={12} mb={10}>
<Box {...LabelStyles}></Box>
<Box flex={1} ml={'10px'}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: `${tokenLimit}`, value: tokenLimit }
]}
width={'95%'}
min={100}
max={tokenLimit}
step={50}
value={getValues('chatModel.maxToken')}
onChange={(val) => {
setValue('chatModel.maxToken', val);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex mt={10} alignItems={'flex-start'}>
<Box {...LabelStyles}>
@@ -502,10 +461,6 @@ const Settings = ({ appId }: { appId: string }) => {
<MyIcon name={'edit'} w={'14px'} mr={1} />
</Flex>
<Flex {...BoxBtnStyles} onClick={onOpenAIChatSetting}>
<MyIcon mr={1} name={'settingLight'} w={'14px'} />
</Flex>
</Flex>
<Flex mt={1} color={'myGray.600'} fontSize={['sm', 'md']}>
: {getValues('kb.searchSimilarity')}, : {getValues('kb.searchLimit')},

View File

@@ -6,7 +6,7 @@ import { useMutation } from '@tanstack/react-query';
import { splitText2Chunks } from '@/utils/file';
import { getErrText } from '@/utils/tools';
import { formatPrice } from '@fastgpt/common/bill/index';
import { qaModel } from '@/web/common/store/static';
import { qaModelList } from '@/web/common/store/static';
import MyIcon from '@/components/Icon';
import CloseIcon from '@/components/Icon/close';
import DeleteIcon, { hoverDeleteStyles } from '@/components/Icon/delete';
@@ -23,8 +23,9 @@ import { chunksUpload } from '@/web/core/utils/dataset';
const fileExtension = '.txt, .doc, .docx, .pdf, .md';
const QAImport = ({ kbId }: { kbId: string }) => {
const unitPrice = qaModel.price || 3;
const chunkLen = qaModel.maxToken * 0.45;
const qaModel = qaModelList[0];
const unitPrice = qaModel?.price || 3;
const chunkLen = qaModel?.maxToken * 0.45;
const theme = useTheme();
const router = useRouter();
const { toast } = useToast();

View File

@@ -13,9 +13,9 @@ import MyTooltip from '@/components/MyTooltip';
import MyModal from '@/components/MyModal';
import { postCreateDataset } from '@/web/core/api/dataset';
import type { CreateDatasetParams } from '@/global/core/api/datasetReq.d';
import { vectorModelList } from '@/web/common/store/static';
import MySelect from '@/components/Select';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { vectorModelList } from '@/web/common/store/static';
import Tag from '@/components/Tag';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {

View File

@@ -1,12 +1,12 @@
import { Bill } from '@/service/mongo';
import { MongoUser } from '@fastgpt/support/user/schema';
import { BillSourceEnum } from '@/constants/user';
import { getModel } from '@/service/utils/data';
import { getModelMap, ModelTypeEnum } from '@/service/core/ai/model';
import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@fastgpt/common/bill/index';
import { addLog } from '@/service/utils/tools';
import type { CreateBillType } from '@/types/common/bill';
import { defaultQGModel } from '@/pages/api/system/getInitData';
import { defaultQGModels } from '@/constants/model';
async function createBill(data: CreateBillType) {
try {
@@ -106,7 +106,7 @@ export const pushQABill = async ({
addLog.info('splitData generate success', { totalTokens });
// 获取模型单价格, 都是用 gpt35 拆分
const unitPrice = global.qaModel.price || 3;
const unitPrice = global.qaModels?.[0]?.price || 3;
// 计算价格
const total = unitPrice * totalTokens;
@@ -158,7 +158,7 @@ export const pushGenerateVectorBill = async ({
{
moduleName: '索引生成',
amount: total,
model: vectorModel.model,
model: vectorModel.name,
tokenLen
}
]
@@ -167,14 +167,22 @@ export const pushGenerateVectorBill = async ({
return { total };
};
export const countModelPrice = ({ model, tokens }: { model: string; tokens: number }) => {
const modelData = getModel(model);
export const countModelPrice = ({
model,
tokens,
type
}: {
model: string;
tokens: number;
type: `${ModelTypeEnum}`;
}) => {
const modelData = getModelMap?.[type]?.(model);
if (!modelData) return 0;
return modelData.price * tokens;
};
export const pushQuestionGuideBill = ({ tokens, userId }: { tokens: number; userId: string }) => {
const qgModel = global.qgModel || defaultQGModel;
const qgModel = global.qgModels?.[0] || defaultQGModels[0];
const total = qgModel.price * tokens;
createBill({
userId,

View File

@@ -1,39 +0,0 @@
import type { NextApiResponse } from 'next';
export function responseWriteController({
res,
readStream
}: {
res: NextApiResponse;
readStream: any;
}) {
res.on('drain', () => {
readStream.resume();
});
return (text: string | Buffer) => {
const writeResult = res.write(text);
if (!writeResult) {
readStream.pause();
}
};
}
export function responseWrite({
res,
write,
event,
data
}: {
res?: NextApiResponse;
write?: (text: string) => void;
event?: string;
data: string;
}) {
const Write = write || res?.write;
if (!Write) return;
event && Write(`event: ${event}\n`);
Write(`data: ${data}\n\n`);
}

View File

@@ -0,0 +1,68 @@
import {
defaultChatModels,
defaultCQModels,
defaultExtractModels,
defaultQAModels,
defaultQGModels,
defaultVectorModels
} from '@/constants/model';
export const getChatModel = (model?: string) => {
return (
(global.chatModels || defaultChatModels).find((item) => item.model === model) ||
defaultChatModels[0]
);
};
export const getQAModel = (model?: string) => {
return (
(global.qaModels || defaultQAModels).find((item) => item.model === model) ||
global.qaModels?.[0] ||
defaultQAModels[0]
);
};
export const getCQModel = (model?: string) => {
return (
(global.cqModels || defaultCQModels).find((item) => item.model === model) ||
global.cqModels?.[0] ||
defaultCQModels[0]
);
};
export const getExtractModel = (model?: string) => {
return (
(global.extractModels || defaultExtractModels).find((item) => item.model === model) ||
global.extractModels?.[0] ||
defaultExtractModels[0]
);
};
export const getQGModel = (model?: string) => {
return (
(global.qgModels || defaultQGModels).find((item) => item.model === model) ||
global.qgModels?.[0] ||
defaultQGModels[0]
);
};
export const getVectorModel = (model?: string) => {
return (
global.vectorModels.find((item) => item.model === model) ||
global.vectorModels?.[0] ||
defaultVectorModels[0]
);
};
export enum ModelTypeEnum {
chat = 'chat',
qa = 'qa',
cq = 'cq',
extract = 'extract',
qg = 'qg',
vector = 'vector'
}
export const getModelMap = {
[ModelTypeEnum.chat]: getChatModel,
[ModelTypeEnum.qa]: getQAModel,
[ModelTypeEnum.cq]: getCQModel,
[ModelTypeEnum.extract]: getExtractModel,
[ModelTypeEnum.qg]: getQGModel,
[ModelTypeEnum.vector]: getVectorModel
};

View File

@@ -0,0 +1,12 @@
import { FlowModuleTypeEnum } from '@/constants/flow';
import { AppModuleItemType } from '@/types/app';
export const getChatModelNameListByModules = (modules: AppModuleItemType[]): string[] => {
const chatModules = modules.filter((item) => item.flowType === FlowModuleTypeEnum.chatNode);
return chatModules
.map((item) => {
const model = item.inputs.find((input) => input.key === 'model')?.value;
return global.chatModels.find((item) => item.model === model)?.name || '';
})
.filter((item) => item);
};

View File

@@ -73,7 +73,7 @@ export async function generateQA(): Promise<any> {
];
const ai = getAIApi(undefined, 480000);
const chatResponse = await ai.chat.completions.create({
model: global.qaModel.model,
model: global.qaModels[0].model,
temperature: 0.01,
messages,
stream: false

View File

@@ -10,9 +10,11 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { replaceVariable } from '@/utils/common/tools/text';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { defaultCQModel } from '@/pages/api/system/getInitData';
import { FunctionModelItemType } from '@/types/model';
import { getCQModel } from '@/service/core/ai/model';
type Props = ModuleDispatchProps<{
model: string;
systemPrompt?: string;
history?: ChatItemType[];
[SystemInputEnum.userChatInput]: string;
@@ -30,20 +32,26 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const {
moduleName,
user,
inputs: { agents, userChatInput }
inputs: { model, agents, userChatInput }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const cqModel = global.cqModel || defaultCQModel;
const cqModel = getCQModel(model);
const { arg, tokens } = await (async () => {
if (cqModel.functionCall) {
return functionCall(props);
return functionCall({
...props,
cqModel
});
}
return completions(props);
return completions({
...props,
cqModel
});
})();
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
@@ -64,45 +72,45 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
async function functionCall({
user,
cqModel,
inputs: { agents, systemPrompt, history = [], userChatInput }
}: Props) {
const cqModel = global.cqModel;
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
value: systemPrompt
? `补充的背景知识:
"""
${systemPrompt}
"""
我的问题: ${userChatInput}
`
: userChatInput
}
];
const filterMessages = ChatContextFilter({
messages,
maxTokens: cqModel.maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
// function body
// function body
const agentFunction = {
name: agentFunName,
description: '判断用户问题类型属于哪方面,返回对应的字段',
description: '请根据对话记录及补充的背景知识,判断用户问题类型,返回对应的字段',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: agents.map((item) => `${item.value},返回:'${item.key}'`).join(''),
description: `判断用户的问题类型,并返回对应的字段。下面是几种问题类型: ${agents
.map((item) => `${item.value},返回:'${item.key}'`)
.join('')}`,
enum: agents.map((item) => item.key)
}
},
required: ['type']
}
}
};
const ai = getAIApi(user.openaiAccount, 48000);
@@ -133,15 +141,14 @@ async function functionCall({
}
async function completions({
cqModel,
user,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
}: Props) {
const extractModel = global.extractModel;
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_CQJson, {
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
systemPrompt,
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
@@ -153,7 +160,7 @@ Human:${userChatInput}`
const ai = getAIApi(user.openaiAccount, 480000);
const data = await ai.chat.completions.create({
model: extractModel.model,
model: cqModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false

View File

@@ -9,7 +9,7 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@/utils/common/tools/text';
import { defaultExtractModel } from '@/pages/api/system/getInitData';
import { FunctionModelItemType } from '@/types/model';
type Props = ModuleDispatchProps<{
history?: ChatItemType[];
@@ -37,13 +37,19 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
return Promise.reject('Input is empty');
}
const extractModel = global.extractModel || defaultExtractModel;
const extractModel = global.extractModels[0];
const { arg, tokens } = await (async () => {
if (extractModel.functionCall) {
return functionCall(props);
return functionCall({
...props,
extractModel
});
}
return completions(props);
return completions({
...props,
extractModel
});
})();
// remove invalid key
@@ -83,11 +89,10 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
async function functionCall({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...history,
{
@@ -152,15 +157,14 @@ async function functionCall({
}
async function completions({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_ExtractJson, {
value: replaceVariable(extractModel.functionPrompt || Prompt_ExtractJson, {
description,
json: extractKeys
.map(

View File

@@ -7,7 +7,6 @@ import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/core/ai/type';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { postTextCensor } from '@fastgpt/common/plusApi/censor';
@@ -15,12 +14,13 @@ import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant'
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/global/core/prompt/AIChat';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { responseWrite, responseWriteController } from '@/service/common/stream';
import { responseWrite, responseWriteController } from '@fastgpt/common/tools/stream';
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
@@ -47,12 +47,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
user,
outputs,
inputs: {
model = global.chatModels[0]?.model,
model,
temperature = 0,
maxToken = 4000,
history = [],
quoteQA = [],
userChatInput,
isResponseAnswerText = true,
systemPrompt = '',
limitPrompt,
quoteTemplate,
@@ -63,6 +64,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return Promise.reject('Question is empty');
}
stream = stream && isResponseAnswerText;
// temperature adapt
const modelConstantsData = getChatModel(model);
@@ -110,18 +113,18 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
model,
temperature,
max_tokens,
stream,
messages: [
...(modelConstantsData.defaultSystem
...(modelConstantsData.defaultSystemChatPrompt
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystem
content: modelConstantsData.defaultSystemChatPrompt
}
]
: []),
...messages
],
stream
]
});
const { answerText, totalTokens, completeMessages } = await (async () => {
@@ -172,7 +175,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
price: user.openaiAccount?.key
? 0
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
@@ -198,7 +203,7 @@ function filterQuote({
maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item, index) => ({
obj: ChatRoleEnum.System,
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
value: replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
...item,
index: index + 1
})
@@ -212,7 +217,7 @@ function filterQuote({
filterQuoteQA.length > 0
? `${filterQuoteQA
.map((item, index) =>
replaceVariable(quoteTemplate || defaultQuoteTemplate, {
replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
...item,
index: `${index + 1}`
})
@@ -243,7 +248,7 @@ function getChatMessages({
model: ChatModelItemType;
}) {
const question = quoteText
? replaceVariable(quotePrompt || defaultQuotePrompt, {
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
quote: quoteText,
question: userChatInput
})
@@ -275,7 +280,7 @@ function getChatMessages({
const filterMessages = ChatContextFilter({
messages,
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
maxTokens: Math.ceil(model.maxToken - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
@@ -294,7 +299,7 @@ function getMaxTokens({
model: ChatModelItemType;
filterMessages: ChatProps['inputs']['history'];
}) {
const tokensLimit = model.contextMaxToken;
const tokensLimit = model.maxToken;
/* count response max token */
const promptsToken = countMessagesTokens({
@@ -349,7 +354,7 @@ async function streamResponse({
stream.controller?.abort();
break;
}
const content = part.choices[0]?.delta?.content || '';
const content = part.choices?.[0]?.delta?.content || '';
answer += content;
responseWrite({

View File

@@ -8,6 +8,7 @@ import type { QuoteItemType } from '@/types/chat';
import { PgDatasetTableName } from '@/constants/plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModelTypeEnum } from '@/service/core/ai/model';
type KBSearchProps = ModuleDispatchProps<{
kbList: SelectedDatasetType;
similarity: number;
@@ -66,7 +67,11 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
responseData: {
moduleType: FlowModuleTypeEnum.kbSearchNode,
moduleName,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
price: countModelPrice({
model: vectorModel.model,
tokens: tokenLen,
type: ModelTypeEnum.vector
}),
model: vectorModel.name,
tokens: tokenLen,
similarity,

View File

@@ -1,5 +1,5 @@
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
export type AnswerProps = ModuleDispatchProps<{
@@ -21,7 +21,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
if (stream) {
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({

View File

@@ -3,7 +3,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { SelectAppItemType } from '@/types/core/app/flow';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { App } from '@/service/mongo';
import { responseWrite } from '@/service/common/stream';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
import { textAdaptGptResponse } from '@/utils/adapt';

View File

@@ -232,6 +232,6 @@ export async function initPg() {
`);
console.log('init pg successful');
} catch (error) {
addLog.error('init pg error', error);
console.log('init pg error', error);
}
}

View File

@@ -1,7 +1,9 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { NextApiResponse } from 'next';
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
import { clearCookie, sseResponse, addLog } from './utils/tools';
import { addLog } from './utils/tools';
import { clearCookie } from '@fastgpt/support/user/auth';
import { responseWrite } from '@fastgpt/common/tools/stream';
export interface ResponseType<T = any> {
code: number;
@@ -66,7 +68,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
clearCookie(res);
}
return sseResponse({
return responseWrite({
res,
event: sseResponseEventEnum.error,
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
@@ -86,7 +88,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
addLog.error(`sse error: ${msg}`, error);
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.error,
data: JSON.stringify({ message: msg })

View File

@@ -1,24 +0,0 @@
export const getChatModel = (model?: string) => {
return global.chatModels.find((item) => item.model === model);
};
export const getVectorModel = (model?: string) => {
return (
global.vectorModels.find((item) => item.model === model) || {
model: 'UnKnow',
name: 'UnKnow',
defaultToken: 500,
price: 0,
maxToken: 3000
}
);
};
export const getModel = (model?: string) => {
return [
...global.chatModels,
...global.vectorModels,
global.qaModel,
global.extractModel,
global.cqModel
].find((item) => item.model === model);
};

View File

@@ -1,37 +1,7 @@
import type { NextApiResponse, NextApiHandler, NextApiRequest } from 'next';
import NextCors from 'nextjs-cors';
import type { NextApiResponse } from 'next';
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
/* set cookie */
export const setCookie = (res: NextApiResponse, token: string) => {
res.setHeader(
'Set-Cookie',
`token=${token}; Path=/; HttpOnly; Max-Age=604800; Samesite=None; Secure;`
);
};
/* clear cookie */
export const clearCookie = (res: NextApiResponse) => {
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
};
export function withNextCors(handler: NextApiHandler): NextApiHandler {
return async function nextApiHandlerWrappedWithNextCors(
req: NextApiRequest,
res: NextApiResponse
) {
const methods = ['GET', 'eHEAD', 'PUT', 'PATCH', 'POST', 'DELETE'];
const origin = req.headers.origin;
await NextCors(req, res, {
methods,
origin: origin,
optionsSuccessStatus: 200
});
return handler(req, res);
};
}
/* start task */
export const startQueue = () => {
if (!global.systemEnv) return;
@@ -43,20 +13,6 @@ export const startQueue = () => {
}
};
export const sseResponse = ({
res,
event,
data
}: {
res: NextApiResponse;
event?: string;
data: string;
}) => {
if (res.closed) return;
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};
/* add logger */
export const addLog = {
info: (msg: string, obj?: Record<string, any>) => {

View File

@@ -1,9 +1,12 @@
import { SystemInputEnum } from '@/constants/app';
/* ai chat modules props */
export type AIChatProps = {
model: string;
systemPrompt?: string;
temperature: number;
maxToken: number;
[SystemInputEnum.isResponseAnswerText]: boolean;
quoteTemplate?: string;
quotePrompt?: string;
frequency: number;

View File

@@ -31,6 +31,7 @@ export type FlowInputItemType = {
required?: boolean;
list?: { label: string; value: any }[];
markList?: { label: string; value: any }[];
customData?: () => any;
valueCheck?: (value: any) => boolean;
};

View File

@@ -3,7 +3,7 @@ import type { Tiktoken } from 'js-tiktoken';
import {
ChatModelItemType,
FunctionModelItemType,
QAModelItemType,
LLMModelItemType,
VectorModelItemType
} from './model';
import { TrackEventName } from '@/constants/common';
@@ -36,10 +36,10 @@ declare global {
var vectorModels: VectorModelItemType[];
var chatModels: ChatModelItemType[];
var qaModel: QAModelItemType;
var extractModel: FunctionModelItemType;
var cqModel: FunctionModelItemType;
var qgModel: FunctionModelItemType;
var qaModels: LLMModelItemType[];
var cqModels: FunctionModelItemType[];
var extractModels: FunctionModelItemType[];
var qgModels: LLMModelItemType[];
var priceMd: string;
var systemVersion: string;

View File

@@ -1,19 +1,23 @@
export type ChatModelItemType = {
model: string;
name: string;
contextMaxToken: number;
quoteMaxToken: number;
maxTemperature: number;
price: number;
censor?: boolean;
defaultSystem?: string;
};
export type QAModelItemType = {
import { LLMModelUsageEnum } from '@/constants/model';
export type LLMModelItemType = {
model: string;
name: string;
maxToken: number;
price: number;
};
export type ChatModelItemType = LLMModelItemType & {
quoteMaxToken: number;
maxTemperature: number;
censor?: boolean;
defaultSystemChatPrompt?: string;
};
export type FunctionModelItemType = LLMModelItemType & {
functionCall: boolean;
functionPrompt: string;
};
export type VectorModelItemType = {
model: string;
name: string;
@@ -21,11 +25,3 @@ export type VectorModelItemType = {
price: number;
maxToken: number;
};
export type FunctionModelItemType = {
model: string;
name: string;
maxToken: number;
price: number;
prompt: string;
functionCall: boolean;
};

View File

@@ -36,9 +36,10 @@ export const getDefaultAppForm = (): EditFormType => {
model: defaultChatModel?.model,
systemPrompt: '',
temperature: 0,
[SystemInputEnum.isResponseAnswerText]: true,
quotePrompt: '',
quoteTemplate: '',
maxToken: defaultChatModel ? defaultChatModel.contextMaxToken / 2 : 4000,
maxToken: defaultChatModel ? defaultChatModel.maxToken / 2 : 4000,
frequency: 0.5,
presence: -0.5
},
@@ -185,6 +186,13 @@ const chatModelInput = (formData: EditFormType): FlowInputItemType[] => [
label: '系统提示词',
connected: true
},
{
key: SystemInputEnum.isResponseAnswerText,
value: true,
type: 'hidden',
label: '返回AI内容',
connected: true
},
{
key: 'quoteTemplate',
value: formData.chatModel.quoteTemplate || '',
@@ -328,7 +336,7 @@ const simpleChatTemplate = (formData: EditFormType): AppModuleItemType[] => [
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []
@@ -533,7 +541,7 @@ const kbTemplate = (formData: EditFormType): AppModuleItemType[] => [
outputs: [
{
key: 'answerText',
label: '模型回复',
label: 'AI回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []

View File

@@ -12,11 +12,12 @@ export const splitText2Chunks = ({ text = '', maxLen }: { text: string; maxLen:
const tempMarker = 'SPLIT_HERE_SPLIT_HERE';
const stepReg: Record<number, RegExp> = {
0: /(\n)/g,
1: /([。]|\.\s)/g,
2: /([]|!\s|\?\s)/g,
3: /([]|;\s)/g,
4: /([]|,\s)/g
0: /(\n\n)/g,
1: /([\n])/g,
2: /([。]|\.\s)/g,
3: /([]|!\s|\?\s)/g,
4: /([]|;\s)/g,
5: /([]|,\s)/g
};
const splitTextRecursively = ({ text = '', step }: { text: string; step: number }) => {
@@ -43,7 +44,6 @@ export const splitText2Chunks = ({ text = '', maxLen }: { text: string; maxLen:
.filter((part) => part);
let chunks: string[] = [];
let preChunk = '';
let chunk = '';
for (let i = 0; i < splitTexts.length; i++) {

View File

@@ -1,34 +1,41 @@
import {
type QAModelItemType,
type ChatModelItemType,
type VectorModelItemType
} from '@/types/model';
import type { InitDateResponse } from '@/global/common/api/systemRes';
import { getSystemInitData } from '@/web/common/api/system';
import { delay } from '@/utils/tools';
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
import {
defaultChatModels,
defaultQAModels,
defaultCQModels,
defaultExtractModels,
defaultQGModels,
defaultVectorModels
} from '@/constants/model';
export let chatModelList: ChatModelItemType[] = [];
export let qaModel: QAModelItemType = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0
};
export let vectorModelList: VectorModelItemType[] = [];
export let feConfigs: FeConfigsType = {};
export let priceMd = '';
export let systemVersion = '0.0.0';
export let vectorModelList = defaultVectorModels;
export let chatModelList = defaultChatModels;
export let qaModelList = defaultQAModels;
export let cqModelList = defaultCQModels;
export let extractModelList = defaultExtractModels;
export let qgModelList = defaultQGModels;
let retryTimes = 3;
export const clientInitData = async (): Promise<InitDateResponse> => {
try {
const res = await getSystemInitData();
chatModelList = res.chatModels;
qaModel = res.qaModel;
vectorModelList = res.vectorModels;
chatModelList = res.chatModels || [];
qaModelList = res.qaModels || [];
cqModelList = res.cqModels || [];
extractModelList = res.extractModels || [];
qgModelList = res.qgModels || [];
vectorModelList = res.vectorModels || [];
feConfigs = res.feConfigs;
priceMd = res.priceMd;
systemVersion = res.systemVersion;