* model config

* feat: model config ui

* perf: rename variable

* feat: custom request url

* perf: model buffer

* perf: init model

* feat: json model config

* auto login

* fix: ts

* update packages

* package

* fix: dockerfile
This commit is contained in:
Archer
2025-01-22 22:59:28 +08:00
committed by archer
parent c393002f1d
commit 12c6ecb987
93 changed files with 2361 additions and 564 deletions

View File

@@ -25,7 +25,6 @@
"usedInClassify": true, // 是否用于问题分类务必保证至少有一个为true
"usedInExtractFields": true, // 是否用于内容提取务必保证至少有一个为true
"usedInToolCall": true, // 是否用于工具调用务必保证至少有一个为true
"usedInQueryExtension": true, // 是否用于问题优化务必保证至少有一个为true
"toolChoice": true, // 是否支持工具选择(分类,内容提取,工具调用会用到。)
"functionCall": false, // 是否支持函数调用(分类,内容提取,工具调用会用到。会优先使用 toolChoice如果为false则使用 functionCall如果仍为 false则使用提示词模式
"customCQPrompt": "", // 自定义文本分类提示词(不支持工具和函数调用的模型
@@ -49,7 +48,6 @@
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
@@ -73,7 +71,6 @@
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": false,
"functionCall": false,
"customCQPrompt": "",
@@ -100,7 +97,6 @@
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": false,
"functionCall": false,
"customCQPrompt": "",

View File

@@ -22,7 +22,7 @@ type Props = SelectProps & {
const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
const { t } = useTranslation();
const { feConfigs, llmModelList, vectorModelList } = useSystemStore();
const { feConfigs, llmModelList, embeddingModelList } = useSystemStore();
const avatarSize = useMemo(() => {
const size = {
@@ -35,7 +35,7 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
}, [props.size]);
const avatarList = list.map((item) => {
const modelData = getModelFromList([...llmModelList, ...vectorModelList], item.value);
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], item.value);
return {
value: item.value,
@@ -100,7 +100,7 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
};
const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
const { t } = useTranslation();
const { llmModelList, vectorModelList } = useSystemStore();
const { llmModelList, embeddingModelList } = useSystemStore();
const [value, setValue] = useState<string[]>([]);
const avatarSize = useMemo(() => {
@@ -136,7 +136,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}));
for (const item of list) {
const modelData = getModelFromList([...llmModelList, ...vectorModelList], item.value);
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], item.value);
const provider =
renderList.find((item) => item.value === (modelData?.provider || 'Other')) ??
renderList[renderList.length - 1];
@@ -148,7 +148,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}
return renderList.filter((item) => item.children.length > 0);
}, [avatarSize, list, llmModelList, t, vectorModelList]);
}, [avatarSize, list, llmModelList, t, embeddingModelList]);
const onSelect = useCallback(
(e: string[]) => {
@@ -158,7 +158,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
);
const SelectedModel = useMemo(() => {
const modelData = getModelFromList([...llmModelList, ...vectorModelList], props.value);
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], props.value);
setValue([modelData.provider, props.value]);
@@ -174,7 +174,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
<Box>{modelData?.name}</Box>
</HStack>
);
}, [avatarSize, llmModelList, props.value, vectorModelList]);
}, [avatarSize, llmModelList, props.value, embeddingModelList]);
return (
<Box

View File

@@ -53,7 +53,8 @@ const ModelTable = () => {
const [search, setSearch] = useState('');
const { llmModelList, audioSpeechModelList, vectorModelList, whisperModel } = useSystemStore();
const { llmModelList, ttsModelList, embeddingModelList, sttModelList, reRankModelList } =
useSystemStore();
const modelList = useMemo(() => {
const formatLLMModelList = llmModelList.map((item) => ({
@@ -87,7 +88,7 @@ const ModelTable = () => {
),
tagColor: 'blue'
}));
const formatVectorModelList = vectorModelList.map((item) => ({
const formatVectorModelList = embeddingModelList.map((item) => ({
...item,
typeLabel: t('common:model.type.embedding'),
priceLabel: (
@@ -100,7 +101,7 @@ const ModelTable = () => {
),
tagColor: 'yellow'
}));
const formatAudioSpeechModelList = audioSpeechModelList.map((item) => ({
const formatAudioSpeechModelList = ttsModelList.map((item) => ({
...item,
typeLabel: t('common:model.type.tts'),
priceLabel: (
@@ -113,31 +114,39 @@ const ModelTable = () => {
),
tagColor: 'green'
}));
const formatWhisperModel = {
...whisperModel,
const formatWhisperModelList = sttModelList.map((item) => ({
...item,
typeLabel: t('common:model.type.stt'),
priceLabel: (
<Flex color={'myGray.700'}>
<Box fontWeight={'bold'} color={'myGray.900'} mr={0.5}>
{whisperModel.charsPointsPrice}
{item.charsPointsPrice}
</Box>
{` ${t('common:support.wallet.subscription.point')} / 60${t('common:unit.seconds')}`}
</Flex>
),
tagColor: 'purple'
};
}));
const formatRerankModelList = reRankModelList.map((item) => ({
...item,
typeLabel: t('common:model.type.reRank'),
priceLabel: <Flex color={'myGray.700'}>- </Flex>,
tagColor: 'red'
}));
const list = (() => {
if (modelType === ModelTypeEnum.chat) return formatLLMModelList;
if (modelType === ModelTypeEnum.llm) return formatLLMModelList;
if (modelType === ModelTypeEnum.embedding) return formatVectorModelList;
if (modelType === ModelTypeEnum.tts) return formatAudioSpeechModelList;
if (modelType === ModelTypeEnum.stt) return [formatWhisperModel];
if (modelType === ModelTypeEnum.stt) return formatWhisperModelList;
if (modelType === ModelTypeEnum.rerank) return formatRerankModelList;
return [
...formatLLMModelList,
...formatVectorModelList,
...formatAudioSpeechModelList,
formatWhisperModel
...formatWhisperModelList,
...formatRerankModelList
];
})();
const formatList = list.map((item) => {
@@ -167,9 +176,10 @@ const ModelTable = () => {
return filterList;
}, [
llmModelList,
vectorModelList,
audioSpeechModelList,
whisperModel,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
t,
modelType,
provider,
@@ -179,15 +189,16 @@ const ModelTable = () => {
const filterProviderList = useMemo(() => {
const allProviderIds: string[] = [
...llmModelList,
...vectorModelList,
...audioSpeechModelList,
whisperModel
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
].map((model) => model.provider);
return providerList.current.filter(
(item) => allProviderIds.includes(item.value) || item.value === ''
);
}, [audioSpeechModelList, llmModelList, vectorModelList, whisperModel]);
}, [ttsModelList, llmModelList, embeddingModelList, sttModelList, reRankModelList]);
return (
<Flex flexDirection={'column'} h={'100%'}>

View File

@@ -70,12 +70,10 @@ const DatasetParamsModal = ({
const [currentTabType, setCurrentTabType] = useState(SearchSettingTabEnum.searchMode);
const chatModelSelectList = (() =>
llmModelList
.filter((model) => model.usedInQueryExtension)
.map((item) => ({
value: item.model,
label: item.name
})))();
llmModelList.map((item) => ({
value: item.model,
label: item.name
})))();
const { register, setValue, getValues, handleSubmit, watch } = useForm<DatasetParamsProps>({
defaultValues: {

View File

@@ -25,7 +25,7 @@ const TTSSelect = ({
onChange: (e: AppTTSConfigType) => void;
}) => {
const { t } = useTranslation();
const { audioSpeechModelList } = useSystemStore();
const { ttsModelList } = useSystemStore();
const { isOpen, onOpen, onClose } = useDisclosure();
const appId = useContextSelector(AppContext, (v) => v.appId);
@@ -34,9 +34,9 @@ const TTSSelect = ({
() => [
{ label: t('common:core.app.tts.Close'), value: TTSTypeEnum.none },
{ label: t('common:core.app.tts.Web'), value: TTSTypeEnum.web },
...audioSpeechModelList.map((item) => item?.voices || []).flat()
...ttsModelList.map((item) => item?.voices || []).flat()
],
[audioSpeechModelList, t]
[ttsModelList, t]
);
const formatValue = useMemo(() => {
@@ -63,7 +63,7 @@ const TTSSelect = ({
if (e === TTSTypeEnum.none || e === TTSTypeEnum.web) {
onChange({ type: e as `${TTSTypeEnum}` });
} else {
const audioModel = audioSpeechModelList.find((item) =>
const audioModel = ttsModelList.find((item) =>
item.voices?.find((voice) => voice.value === e)
);
if (!audioModel) {
@@ -77,7 +77,7 @@ const TTSSelect = ({
});
}
},
[audioSpeechModelList, onChange, value]
[ttsModelList, onChange, value]
);
const onCloseTTSModal = useCallback(() => {

View File

@@ -107,7 +107,7 @@ const ChatInput = ({
);
/* whisper init */
const { whisperModel } = useSystemStore();
const { sttModelList } = useSystemStore();
const canvasRef = useRef<HTMLCanvasElement>(null);
const {
isSpeaking,
@@ -293,7 +293,7 @@ const ChatInput = ({
/>
<Flex alignItems={'center'} position={'absolute'} right={[2, 4]} bottom={['10px', '12px']}>
{/* voice-input */}
{whisperConfig.open && !inputValue && !isChatting && !!whisperModel && (
{whisperConfig.open && !inputValue && !isChatting && sttModelList.length > 0 && (
<>
<canvas
ref={canvasRef}
@@ -431,7 +431,7 @@ const ChatInput = ({
stopSpeak,
t,
whisperConfig.open,
whisperModel
sttModelList
]
);

View File

@@ -1,6 +1,6 @@
import type {
LLMModelItemType,
VectorModelItemType,
EmbeddingModelItemType,
AudioSpeechModels,
STTModelType,
ReRankModelItemType
@@ -8,15 +8,14 @@ import type {
import type { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types/index.d';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { SystemModelItemType } from '@fastgpt/service/core/ai/type';
export type InitDateResponse = {
bufferId?: string;
llmModels: LLMModelItemType[];
vectorModels: VectorModelItemType[];
audioSpeechModels: AudioSpeechModels[];
reRankModels: ReRankModelItemType[];
whisperModel: STTModelType;
feConfigs: FastGPTFeConfigsType;
subPlans?: SubPlanType;
systemVersion: string;
activeModelList?: SystemModelItemType[];
};

View File

@@ -1,72 +1,43 @@
import React, { useMemo, useState } from 'react';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import { Box, Flex, ModalBody } from '@chakra-ui/react';
import { MultipleRowArraySelect } from '@fastgpt/web/components/common/MySelect/MultipleRowSelect';
import { ModalBody } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { ModelProviderList } from '@fastgpt/global/core/ai/provider';
import Avatar from '@fastgpt/web/components/common/Avatar';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
import { getModelFromList } from '@fastgpt/global/core/ai/model';
import { getSystemModelList } from '@/web/core/ai/config';
import MultipleSelect from '@fastgpt/web/components/common/MySelect/MultipleSelect';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
const DefaultModal = ({ onClose }: { onClose: () => void }) => {
const { t } = useTranslation();
const { llmModelList, vectorModelList, whisperModel, audioSpeechModelList, reRankModelList } =
const { data: systemModelList = [] } = useRequest2(getSystemModelList, {
manual: false
});
const selectorList = useMemo(() => {
return systemModelList.map((item) => ({
icon: item.avatar,
label: item.name,
value: item.model
}));
}, [systemModelList]);
const { llmModelList, embeddingModelList, sttModelList, ttsModelList, reRankModelList } =
useSystemStore();
const [value, setValue] = useState<string[]>([]);
const modelList = useMemo(() => {
return [
...llmModelList,
...vectorModelList,
...audioSpeechModelList,
...embeddingModelList,
...ttsModelList,
...reRankModelList,
whisperModel
...sttModelList
].map((item) => ({
provider: item.provider,
name: item.name,
model: item.model
}));
}, [llmModelList, vectorModelList, whisperModel, audioSpeechModelList, reRankModelList]);
const selectorList = useMemo(() => {
const renderList = ModelProviderList.map<{
label: React.JSX.Element;
value: string;
children: { label: string | React.ReactNode; value: string }[];
}>((provider) => ({
label: (
<Flex alignItems={'center'} py={1}>
<Avatar
borderRadius={'0'}
mr={2}
src={provider?.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={'1rem'}
/>
<Box>{t(provider.name as any)}</Box>
</Flex>
),
value: provider.id,
children: []
}));
for (const item of modelList) {
const modelData = getModelFromList(modelList, item.model);
const provider =
renderList.find((item) => item.value === (modelData?.provider || 'Other')) ??
renderList[renderList.length - 1];
provider.children.push({
label: modelData.name,
value: modelData.model
});
}
return renderList.filter((item) => item.children.length > 0);
}, [modelList, t]);
console.log(selectorList);
}, [llmModelList, embeddingModelList, sttModelList, ttsModelList, reRankModelList]);
return (
<MyModal
@@ -76,7 +47,15 @@ const DefaultModal = ({ onClose }: { onClose: () => void }) => {
iconColor="primary.600"
onClose={onClose}
>
<ModalBody>11</ModalBody>
<ModalBody>
<MultipleSelect<string>
list={selectorList}
value={value}
onSelect={(e) => {
setValue(e);
}}
/>
</ModalBody>
</MyModal>
);
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,72 +1,43 @@
import { serviceSideProps } from '@fastgpt/web/common/system/nextjs';
import React, { useState } from 'react';
import React, { useMemo, useState } from 'react';
import AccountContainer from '../components/AccountContainer';
import { Box, Button, Flex, useDisclosure } from '@chakra-ui/react';
import { Box, Flex } from '@chakra-ui/react';
import ModelTable from '@/components/core/ai/ModelTable';
import { useUserStore } from '@/web/support/user/useUserStore';
import FillRowTabs from '@fastgpt/web/components/common/Tabs/FillRowTabs';
import { useTranslation } from 'next-i18next';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import dynamic from 'next/dynamic';
const DefaultModal = dynamic(() => import('./components/DefaultModal'), {
ssr: false
});
const ModelConfigTable = dynamic(() => import('./components/ModelConfigTable'));
type TabType = 'model' | 'config' | 'channel';
const ModelProvider = () => {
const { t } = useTranslation();
const { userInfo } = useUserStore();
const isRoot = userInfo?.username === 'root';
const [tab, setTab] = useState<'model' | 'channel'>('model');
const [tab, setTab] = useState<TabType>('model');
const { isOpen: isOpenDefault, onOpen: onOpenDefault, onClose: onCloseDefault } = useDisclosure();
const Tab = useMemo(() => {
return (
<FillRowTabs<TabType>
list={[
{ label: t('account:active_model'), value: 'model' },
{ label: t('account:config_model'), value: 'config' }
// { label: t('account:channel'), value: 'channel' }
]}
value={tab}
py={1}
onChange={setTab}
/>
);
}, [t, tab]);
return (
<AccountContainer>
<Flex h={'100%'} flexDirection={'column'} gap={4} py={4} px={6}>
{/* Header */}
{/* <Flex justifyContent={'space-between'}>
<FillRowTabs<'model' | 'channel'>
list={[
{ label: t('account:active_model'), value: 'model' },
{ label: t('account:channel'), value: 'channel' }
]}
value={tab}
px={8}
py={1}
onChange={setTab}
/>
{tab === 'model' && (
<MyMenu
trigger="hover"
size="mini"
Button={<Button>{t('account:create_model')}</Button>}
menuList={[
{
children: [
{
label: t('account:default_model'),
onClick: onOpenDefault
},
{
label: t('account:custom_model')
}
]
}
]}
/>
)}
{tab === 'channel' && <Button>{t('account:create_channel')}</Button>}
</Flex> */}
<Box flex={'1 0 0'}>
{tab === 'model' && <ModelTable />}
{/* {tab === 'channel' && <ChannelTable />} */}
</Box>
{tab === 'model' && <ValidModelTable Tab={Tab} />}
{tab === 'config' && <ModelConfigTable Tab={Tab} />}
</Flex>
{isOpenDefault && <DefaultModal onClose={onCloseDefault} />}
</AccountContainer>
);
};
@@ -80,3 +51,16 @@ export async function getServerSideProps(content: any) {
}
export default ModelProvider;
const ValidModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
const { userInfo } = useUserStore();
const isRoot = userInfo?.username === 'root';
return (
<>
{isRoot && <Flex justifyContent={'space-between'}>{Tab}</Flex>}
<Box flex={'1 0 0'}>
<ModelTable />
</Box>
</>
);
};

View File

@@ -0,0 +1,77 @@
import { readConfigData } from '@/service/common/system';
import { NextAPI } from '@/service/middleware/entry';
import {
getFastGPTConfigFromDB,
updateFastGPTConfigBuffer
} from '@fastgpt/service/common/system/config/controller';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { NextApiRequest, NextApiResponse } from 'next';
import json5 from 'json5';
import { FastGPTConfigFileType } from '@fastgpt/global/common/system/types';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
/*
简单版迁移:直接升级到最新镜像,会去除 MongoDatasetData 里的索引。直接执行这个脚本。
无缝迁移:
1. 移动 User 表中的 avatar 字段到 TeamMember 表中。
*/
async function handler(req: NextApiRequest, res: NextApiResponse) {
await authCert({ req, authRoot: true });
// load config
const [{ config: dbConfig }, fileConfig] = await Promise.all([
getFastGPTConfigFromDB(),
readConfigData('config.json')
]);
const fileRes = json5.parse(fileConfig) as FastGPTConfigFileType;
const llmModels = dbConfig.llmModels || fileRes.llmModels || [];
const vectorModels = dbConfig.vectorModels || fileRes.vectorModels || [];
const reRankModels = dbConfig.reRankModels || fileRes.reRankModels || [];
const audioSpeechModels = dbConfig.audioSpeechModels || fileRes.audioSpeechModels || [];
const whisperModel = dbConfig.whisperModel || fileRes.whisperModel;
const list = [
...llmModels.map((item) => ({
...item,
type: ModelTypeEnum.llm
})),
...vectorModels.map((item) => ({
...item,
type: ModelTypeEnum.embedding
})),
...reRankModels.map((item) => ({
...item,
type: ModelTypeEnum.rerank
})),
...audioSpeechModels.map((item) => ({
...item,
type: ModelTypeEnum.tts
})),
{
...whisperModel,
type: ModelTypeEnum.stt
}
];
for await (const item of list) {
try {
await MongoSystemModel.updateOne(
{ model: item.model },
{ $set: { model: item.model, metadata: { ...item, isActive: true } } },
{ upsert: true }
);
} catch (error) {
console.log(error);
}
}
await loadSystemModels(true);
await updateFastGPTConfigBuffer();
return { success: true };
}
export default NextAPI(handler);

View File

@@ -5,6 +5,20 @@ import { NextAPI } from '@/service/middleware/entry';
async function handler(req: ApiRequestProps<{}, { bufferId?: string }>, res: NextApiResponse) {
const { bufferId } = req.query;
const activeModelList = global.systemActiveModelList.map((model) => ({
...model,
customCQPrompt: undefined,
customExtractPrompt: undefined,
defaultSystemChatPrompt: undefined,
fieldMap: undefined,
defaultConfig: undefined,
weight: undefined,
dbConfig: undefined,
queryConfig: undefined,
requestUrl: undefined,
requestAuth: undefined
}));
// If bufferId is the same as the current bufferId, return directly
if (bufferId && global.systemInitBufferId && global.systemInitBufferId === bufferId) {
return {
@@ -17,21 +31,7 @@ async function handler(req: ApiRequestProps<{}, { bufferId?: string }>, res: Nex
bufferId: global.systemInitBufferId,
feConfigs: global.feConfigs,
subPlans: global.subPlans,
llmModels: global.llmModels.map((model) => ({
...model,
customCQPrompt: '',
customExtractPrompt: '',
defaultSystemChatPrompt: ''
})),
vectorModels: global.vectorModels,
reRankModels:
global.reRankModels?.map((item) => ({
...item,
requestUrl: '',
requestAuth: ''
})) || [],
whisperModel: global.whisperModel,
audioSpeechModels: global.audioSpeechModels,
activeModelList,
systemVersion: global.systemVersion || '0.0.0'
};
}

View File

@@ -16,6 +16,7 @@ import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSc
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getFirstLLMModel } from '@fastgpt/service/core/ai/model';
async function handler(
req: ApiRequestProps<
@@ -35,7 +36,7 @@ async function handler(
authApiKey: true
});
const qgModel = global.llmModels[0];
const qgModel = getFirstLLMModel();
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,

View File

@@ -9,6 +9,7 @@ import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
import { getFirstLLMModel } from '@fastgpt/service/core/ai/model';
export type CreateQuestionGuideParams = OutLinkChatAuthProps & {
appId: string;
@@ -50,7 +51,7 @@ async function handler(req: ApiRequestProps<CreateQuestionGuideParams>, res: Nex
});
const messages = chats2GPTMessages({ messages: histories, reserveId: false });
const qgModel = questionGuide?.model || global.llmModels[0].model;
const qgModel = questionGuide?.model || getFirstLLMModel().model;
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,

View File

@@ -0,0 +1,43 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { findModelFromAlldata } from '@fastgpt/service/core/ai/model';
import { updateFastGPTConfigBuffer } from '@fastgpt/service/common/system/config/controller';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
export type deleteQuery = {
model: string;
};
export type deleteBody = {};
export type deleteResponse = {};
async function handler(
req: ApiRequestProps<deleteBody, deleteQuery>,
res: ApiResponseType<any>
): Promise<deleteResponse> {
await authSystemAdmin({ req });
const { model } = req.query;
const modelData = findModelFromAlldata(model);
if (!modelData) {
return Promise.reject('Model not found');
}
if (!modelData.isCustom) {
return Promise.reject('System model cannot be deleted');
}
await MongoSystemModel.deleteOne({ model });
await loadSystemModels(true);
await updateFastGPTConfigBuffer();
return {};
}
export default NextAPI(handler);

View File

@@ -0,0 +1,29 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { SystemModelItemType } from '@fastgpt/service/core/ai/type';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { findModelFromAlldata } from '@fastgpt/service/core/ai/model';
export type detailQuery = {
model: string;
};
export type detailBody = {};
export type detailResponse = SystemModelItemType;
async function handler(
req: ApiRequestProps<detailBody, detailQuery>,
res: ApiResponseType<any>
): Promise<detailResponse> {
await authSystemAdmin({ req });
const { model } = req.query;
const modelItem = findModelFromAlldata(model);
if (!modelItem) {
return Promise.reject('Model not found');
}
return modelItem;
}
export default NextAPI(handler);

View File

@@ -0,0 +1,29 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
export type getConfigJsonQuery = {};
export type getConfigJsonBody = {};
export type getConfigJsonResponse = {};
async function handler(
req: ApiRequestProps<getConfigJsonBody, getConfigJsonQuery>,
res: ApiResponseType<any>
): Promise<getConfigJsonResponse> {
await authSystemAdmin({ req });
const data = await MongoSystemModel.find({}).lean();
return JSON.stringify(
data.map((item) => ({
model: item.model,
metadata: item.metadata
})),
null,
2
);
}
export default NextAPI(handler);

View File

@@ -0,0 +1,46 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { ModelProviderIdType } from '@fastgpt/global/core/ai/provider';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
export type listQuery = {};
export type listBody = {};
export type listResponse = {
type: `${ModelTypeEnum}`;
name: string;
avatar: string | undefined;
provider: ModelProviderIdType;
model: string;
charsPointsPrice?: number;
inputPrice?: number;
outputPrice?: number;
isActive: boolean;
isCustom: boolean;
}[];
async function handler(
req: ApiRequestProps<listBody, listQuery>,
res: ApiResponseType<any>
): Promise<listResponse> {
await authSystemAdmin({ req });
// Read db
return global.systemModelList.map((model) => ({
type: model.type,
provider: model.provider,
model: model.model,
name: model.name,
avatar: model.avatar,
charsPointsPrice: model.charsPointsPrice,
inputPrice: model.inputPrice,
outputPrice: model.outputPrice,
isActive: model.isActive ?? false,
isCustom: model.isCustom ?? false
}));
}
export default NextAPI(handler);

View File

@@ -0,0 +1,67 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
import { delay } from '@fastgpt/global/common/system/utils';
import { updateFastGPTConfigBuffer } from '@fastgpt/service/common/system/config/controller';
import { findModelFromAlldata } from '@fastgpt/service/core/ai/model';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
export type updateQuery = {};
export type updateBody = {
model: string;
metadata?: Record<string, any>;
};
export type updateResponse = {};
async function handler(
req: ApiRequestProps<updateBody, updateQuery>,
res: ApiResponseType<any>
): Promise<updateResponse> {
await authSystemAdmin({ req });
let { model, metadata } = req.body;
if (!model) return Promise.reject(new Error('model is required'));
model = model.trim();
const dbModel = await MongoSystemModel.findOne({ model }).lean();
const modelData = findModelFromAlldata(model);
const metadataConcat: Record<string, any> = {
...modelData, // system config
...dbModel?.metadata, // db config
...metadata // user config
};
delete metadataConcat.avatar;
delete metadataConcat.isCustom;
// 强制赋值 model避免脏的 metadata 覆盖真实 model
metadataConcat.model = model;
metadataConcat.name = metadataConcat?.name?.trim();
// Delete null value
Object.keys(metadataConcat).forEach((key) => {
if (metadataConcat[key] === null || metadataConcat[key] === undefined) {
delete metadataConcat[key];
}
});
await MongoSystemModel.updateOne(
{ model },
{
model,
metadata: metadataConcat
},
{
upsert: true
}
);
await loadSystemModels(true);
await updateFastGPTConfigBuffer();
return {};
}
export default NextAPI(handler);

View File

@@ -0,0 +1,61 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { SystemModelSchemaType } from '@fastgpt/service/core/ai/type';
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
import { updateFastGPTConfigBuffer } from '@fastgpt/service/common/system/config/controller';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
export type updateWithJsonQuery = {};
export type updateWithJsonBody = {
config: string;
};
export type updateWithJsonResponse = {};
async function handler(
req: ApiRequestProps<updateWithJsonBody, updateWithJsonQuery>,
res: ApiResponseType<any>
): Promise<updateWithJsonResponse> {
await authSystemAdmin({ req });
const { config } = req.body;
const data = JSON.parse(config) as SystemModelSchemaType[];
// Check
for (const item of data) {
if (!item.model || !item.metadata || typeof item.metadata !== 'object') {
return Promise.reject('Invalid model or metadata');
}
if (!item.metadata.type) {
return Promise.reject(`${item.model} metadata.type is required`);
}
if (!item.metadata.model) {
return Promise.reject(`${item.model} metadata.model is required`);
}
if (!item.metadata.provider) {
return Promise.reject(`${item.model} metadata.provider is required`);
}
item.metadata.model = item.model.trim();
}
await mongoSessionRun(async (session) => {
await MongoSystemModel.deleteMany({}, { session });
for await (const item of data) {
await MongoSystemModel.updateOne(
{ model: item.model },
{ $set: { model: item.model, metadata: item.metadata } },
{ upsert: true, session }
);
}
});
await loadSystemModels(true);
await updateFastGPTConfigBuffer();
return {};
}
export default NextAPI(handler);

View File

@@ -6,7 +6,7 @@ import { text2Speech } from '@fastgpt/service/core/ai/audio/speech';
import { pushAudioSpeechUsage } from '@/service/support/wallet/usage/push';
import { authChatCrud } from '@/service/support/permission/auth/chat';
import { authType2UsageSource } from '@/service/support/wallet/usage/utils';
import { getAudioSpeechModel } from '@fastgpt/service/core/ai/model';
import { getTTSModel } from '@fastgpt/service/core/ai/model';
import { MongoTTSBuffer } from '@fastgpt/service/common/buffer/tts/schema';
import { ApiRequestProps } from '@fastgpt/service/type/next';
@@ -31,17 +31,19 @@ async function handler(req: ApiRequestProps<GetChatSpeechProps>, res: NextApiRes
...req.body
});
const ttsModel = getAudioSpeechModel(ttsConfig.model);
const ttsModel = getTTSModel(ttsConfig.model);
const voiceData = ttsModel.voices?.find((item) => item.value === ttsConfig.voice);
if (!voiceData) {
throw new Error('voice not found');
}
const bufferId = `${ttsModel.model}-${ttsConfig.voice}`;
/* get audio from buffer */
const ttsBuffer = await MongoTTSBuffer.findOne(
{
bufferId: voiceData.bufferId,
bufferId,
text: JSON.stringify({ text: input, speed: ttsConfig.speed })
},
'buffer'
@@ -70,11 +72,21 @@ async function handler(req: ApiRequestProps<GetChatSpeechProps>, res: NextApiRes
});
/* create buffer */
await MongoTTSBuffer.create({
bufferId: voiceData.bufferId,
text: JSON.stringify({ text: input, speed: ttsConfig.speed }),
buffer
});
await MongoTTSBuffer.create(
{
bufferId,
text: JSON.stringify({ text: input, speed: ttsConfig.speed }),
buffer
},
ttsModel.requestUrl && ttsModel.requestAuth
? {
path: ttsModel.requestUrl,
headers: {
Authorization: `Bearer ${ttsModel.requestAuth}`
}
}
: {}
);
} catch (error) {}
},
onError: (err) => {

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
@@ -31,7 +31,7 @@ async function handler(req: NextApiRequest): Promise<DatasetSimpleItemType[]> {
_id: item._id,
avatar: item.avatar,
name: item.name,
vectorModel: getVectorModel(item.vectorModel)
vectorModel: getEmbeddingModel(item.vectorModel)
}));
}

View File

@@ -2,7 +2,12 @@ import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
import {
getLLMModel,
getEmbeddingModel,
getDatasetModel,
getFirstEmbeddingModel
} from '@fastgpt/service/core/ai/model';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
@@ -27,7 +32,7 @@ async function handler(
intro,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
vectorModel = getFirstEmbeddingModel().model,
agentModel = getDatasetModel().model,
apiServer,
feishuServer,
@@ -56,7 +61,7 @@ async function handler(
]);
// check model valid
const vectorModelStore = getVectorModel(vectorModel);
const vectorModelStore = getEmbeddingModel(vectorModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
return Promise.reject(DatasetErrEnum.invalidVectorModelOrQAModel);

View File

@@ -4,7 +4,7 @@
*/
import type { NextApiRequest } from 'next';
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { hasSameValue } from '@/service/core/dataset/data/utils';
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
@@ -59,7 +59,7 @@ async function handler(req: NextApiRequest) {
// token check
const token = await countPromptTokens(formatQ + formatA, '');
const vectorModelData = getVectorModel(vectorModel);
const vectorModelData = getEmbeddingModel(vectorModel);
if (token > vectorModelData.maxToken) {
return Promise.reject('Q Over Tokens');

View File

@@ -1,4 +1,4 @@
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { getLLMModel, getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
@@ -50,7 +50,7 @@ async function handler(req: ApiRequestProps<Query>): Promise<DatasetItemType> {
}
: undefined,
permission,
vectorModel: getVectorModel(dataset.vectorModel),
vectorModel: getEmbeddingModel(dataset.vectorModel),
agentModel: getLLMModel(dataset.agentModel)
};
}

View File

@@ -18,7 +18,7 @@ import { getGroupsByTmbId } from '@fastgpt/service/support/permission/memberGrou
import { concatPer } from '@fastgpt/service/support/permission/controller';
import { getOrgIdSetWithParentByTmbId } from '@fastgpt/service/support/permission/org/controllers';
import { addSourceMember } from '@fastgpt/service/support/user/utils';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
export type GetDatasetListBody = {
parentId: ParentIdType;
@@ -172,7 +172,7 @@ async function handler(req: ApiRequestProps<GetDatasetListBody>) {
name: dataset.name,
intro: dataset.intro,
type: dataset.type,
vectorModel: getVectorModel(dataset.vectorModel),
vectorModel: getEmbeddingModel(dataset.vectorModel),
inheritPermission: dataset.inheritPermission,
tmbId: dataset.tmbId,
updateTime: dataset.updateTime,

View File

@@ -6,7 +6,7 @@ import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { getLLMModel, getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
@@ -49,7 +49,7 @@ async function handler(req: ApiRequestProps<rebuildEmbeddingBody>): Promise<Resp
tmbId,
appName: '切换索引模型',
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
vectorModel: getEmbeddingModel(dataset.vectorModel)?.name,
agentModel: getLLMModel(dataset.agentModel)?.name
});

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { getLLMModel, getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
@@ -23,7 +23,7 @@ async function handler(req: NextApiRequest) {
tmbId,
appName: name,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
vectorModel: getEmbeddingModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name
});

View File

@@ -9,6 +9,7 @@ import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { NextAPI } from '@/service/middleware/entry';
import { aiTranscriptions } from '@fastgpt/service/core/ai/audio/transcriptions';
import { useReqFrequencyLimit } from '@fastgpt/service/common/middle/reqFrequencyLimit';
import { getFirstSTTModel } from '@fastgpt/service/core/ai/model';
const upload = getUploadModel({
maxSize: 5
@@ -36,7 +37,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
filePaths = [file.path];
if (!global.whisperModel) {
if (!getFirstSTTModel()) {
throw new Error('whisper model not found');
}
@@ -65,7 +66,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
// }
const result = await aiTranscriptions({
model: global.whisperModel.model,
model: getFirstSTTModel().model,
fileStream: fs.createReadStream(file.path)
});

View File

@@ -4,7 +4,7 @@ import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
import { getUsageSourceByAuthType } from '@fastgpt/global/support/wallet/usage/tools';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { checkTeamAIPoints } from '@fastgpt/service/support/permission/teamLimit';
import { EmbeddingTypeEnm } from '@fastgpt/global/core/ai/constants';
import { NextAPI } from '@/service/middleware/entry';
@@ -36,7 +36,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const { tokens, vectors } = await getVectorsByText({
input: query,
model: getVectorModel(model),
model: getEmbeddingModel(model),
type
});

View File

@@ -9,7 +9,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import AIModelSelector from '@/components/Select/AIModelSelector';
import { postRebuildEmbedding } from '@/web/core/dataset/api';
import type { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
import type { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
import { useContextSelector } from 'use-context-selector';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
@@ -50,7 +50,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const vectorModel = watch('vectorModel');
const agentModel = watch('agentModel');
const { feConfigs, datasetModelList, vectorModelList } = useSystemStore();
const { feConfigs, datasetModelList, embeddingModelList } = useSystemStore();
const { ConfirmModal: ConfirmDelModal } = useConfirm({
content: t('common:core.dataset.Delete Confirm'),
type: 'delete'
@@ -80,7 +80,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
);
const { runAsync: onRebuilding } = useRequest2(
(vectorModel: VectorModelItemType) => {
(vectorModel: EmbeddingModelItemType) => {
return postRebuildEmbedding({
datasetId,
vectorModel: vectorModel.model
@@ -186,12 +186,12 @@ const Info = ({ datasetId }: { datasetId: string }) => {
)
: undefined
}
list={vectorModelList.map((item) => ({
list={embeddingModelList.map((item) => ({
label: item.name,
value: item.model
}))}
onchange={(e) => {
const vectorModel = vectorModelList.find((item) => item.model === e);
const vectorModel = embeddingModelList.find((item) => item.model === e);
if (!vectorModel) return;
return onOpenConfirmRebuild(async () => {
await onRebuilding(vectorModel);

View File

@@ -67,7 +67,7 @@ const InputDataModal = ({
const theme = useTheme();
const { toast } = useToast();
const [currentTab, setCurrentTab] = useState(TabEnum.content);
const { vectorModelList } = useSystemStore();
const { embeddingModelList } = useSystemStore();
const { isPc } = useSystem();
const { register, handleSubmit, reset, control } = useForm<InputDataType>();
const {
@@ -158,11 +158,11 @@ const InputDataModal = ({
const maxToken = useMemo(() => {
const vectorModel =
vectorModelList.find((item) => item.model === collection.dataset.vectorModel) ||
vectorModelList[0];
embeddingModelList.find((item) => item.model === collection.dataset.vectorModel) ||
embeddingModelList[0];
return vectorModel?.maxToken || 3000;
}, [collection.dataset.vectorModel, vectorModelList]);
}, [collection.dataset.vectorModel, embeddingModelList]);
// import new data
const { mutate: sureImportData, isLoading: isImporting } = useRequest({

View File

@@ -41,7 +41,7 @@ const CreateModal = ({
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { vectorModelList, datasetModelList } = useSystemStore();
const { embeddingModelList, datasetModelList } = useSystemStore();
const { isPc } = useSystem();
const datasetTypeMap = useMemo(() => {
@@ -69,7 +69,7 @@ const CreateModal = ({
};
}, [t]);
const filterNotHiddenVectorModelList = vectorModelList.filter((item) => !item.hidden);
const filterNotHiddenVectorModelList = embeddingModelList.filter((item) => !item.hidden);
const form = useForm<CreateDatasetParams>({
defaultValues: {
@@ -172,73 +172,69 @@ const CreateModal = ({
/>
</Flex>
</Box>
{filterNotHiddenVectorModelList.length > 1 && (
<Flex
mt={6}
alignItems={['flex-start', 'center']}
justify={'space-between'}
flexDir={['column', 'row']}
<Flex
mt={6}
alignItems={['flex-start', 'center']}
justify={'space-between'}
flexDir={['column', 'row']}
>
<HStack
spacing={1}
alignItems={'center'}
flex={['', '0 0 110px']}
fontSize={'sm'}
color={'myGray.900'}
fontWeight={500}
pb={['12px', '0']}
>
<HStack
spacing={1}
alignItems={'center'}
flex={['', '0 0 110px']}
fontSize={'sm'}
color={'myGray.900'}
fontWeight={500}
pb={['12px', '0']}
>
<Box>{t('common:core.ai.model.Vector Model')}</Box>
<QuestionTip label={t('common:core.dataset.embedding model tip')} />
</HStack>
<Box w={['100%', '300px']}>
<AIModelSelector
w={['100%', '300px']}
value={vectorModel}
list={filterNotHiddenVectorModelList.map((item) => ({
label: item.name,
value: item.model
}))}
onchange={(e) => {
setValue('vectorModel' as const, e);
}}
/>
</Box>
</Flex>
)}
{datasetModelList.length > 1 && (
<Flex
mt={6}
alignItems={['flex-start', 'center']}
justify={'space-between'}
flexDir={['column', 'row']}
<Box>{t('common:core.ai.model.Vector Model')}</Box>
<QuestionTip label={t('common:core.dataset.embedding model tip')} />
</HStack>
<Box w={['100%', '300px']}>
<AIModelSelector
w={['100%', '300px']}
value={vectorModel}
list={filterNotHiddenVectorModelList.map((item) => ({
label: item.name,
value: item.model
}))}
onchange={(e) => {
setValue('vectorModel' as const, e);
}}
/>
</Box>
</Flex>
<Flex
mt={6}
alignItems={['flex-start', 'center']}
justify={'space-between'}
flexDir={['column', 'row']}
>
<HStack
spacing={1}
flex={['', '0 0 110px']}
fontSize={'sm'}
color={'myGray.900'}
fontWeight={500}
pb={['12px', '0']}
>
<HStack
spacing={1}
flex={['', '0 0 110px']}
fontSize={'sm'}
color={'myGray.900'}
fontWeight={500}
pb={['12px', '0']}
>
<Box>{t('common:core.ai.model.Dataset Agent Model')}</Box>
<QuestionTip label={t('dataset:file_model_function_tip')} />
</HStack>
<Box w={['100%', '300px']}>
<AIModelSelector
w={['100%', '300px']}
value={agentModel}
list={datasetModelList.map((item) => ({
label: item.name,
value: item.model
}))}
onchange={(e) => {
setValue('agentModel' as const, e);
}}
/>
</Box>
</Flex>
)}
<Box>{t('common:core.ai.model.Dataset Agent Model')}</Box>
<QuestionTip label={t('dataset:file_model_function_tip')} />
</HStack>
<Box w={['100%', '300px']}>
<AIModelSelector
w={['100%', '300px']}
value={agentModel}
list={datasetModelList.map((item) => ({
label: item.name,
value: item.model
}))}
onchange={(e) => {
setValue('agentModel' as const, e);
}}
/>
</Box>
</Flex>
{/* @ts-ignore */}
<ApiDatasetForm type={type} form={form} />
</ModalBody>

View File

@@ -31,6 +31,7 @@ type OAuthItem = {
const FormLayout = ({ children, setPageType, pageType }: Props) => {
const { t } = useTranslation();
const router = useRouter();
const rootLogin = router.query.rootLogin === '1';
const { setLoginStore, feConfigs } = useSystemStore();
const { isPc } = useSystem();
@@ -147,7 +148,9 @@ const FormLayout = ({ children, setPageType, pageType }: Props) => {
[lastRoute, router, setLoginStore, setPageType]
);
// Auto login
useEffect(() => {
if (rootLogin) return;
const sso = oAuthList.find((item) => item.provider === OAuthEnum.sso);
const wecom = oAuthList.find((item) => item.provider === OAuthEnum.wecom);
if (feConfigs?.sso?.autoLogin && sso) {
@@ -157,7 +160,7 @@ const FormLayout = ({ children, setPageType, pageType }: Props) => {
// Auto wecom login
onClickOauth(wecom);
}
}, [feConfigs?.sso?.autoLogin, isWecomWorkTerminal, onClickOauth]);
}, [rootLogin, feConfigs?.sso?.autoLogin, isWecomWorkTerminal, onClickOauth]);
return (
<Flex flexDirection={'column'} h={'100%'}>

View File

@@ -31,7 +31,7 @@ export default React.memo(Points);
export const AiPointsTable = () => {
const { t } = useTranslation();
const { llmModelList, audioSpeechModelList, vectorModelList, whisperModel } = useSystemStore();
const { llmModelList, ttsModelList, embeddingModelList, sttModelList } = useSystemStore();
return (
<Grid gap={6} w={'100%'} color={'myGray.900'}>
@@ -85,7 +85,7 @@ export const AiPointsTable = () => {
</Box>
</Box>
<Box flex={4} textAlign={'center'}>
{vectorModelList?.map((item, i) => (
{embeddingModelList?.map((item, i) => (
<Flex key={item.model} py={4} bg={i % 2 !== 0 ? 'myGray.100' : ''}>
<Box flex={'1 0 0'}>{item.name}</Box>
<Box flex={'1 0 0'}>
@@ -111,7 +111,7 @@ export const AiPointsTable = () => {
</Box>
</Box>
<Box flex={4} textAlign={'center'}>
{audioSpeechModelList?.map((item, i) => (
{ttsModelList?.map((item, i) => (
<Flex key={item.model} py={4} bg={i % 2 !== 0 ? 'myGray.50' : ''}>
<Box flex={'1 0 0'}>{item.name}</Box>
<Box flex={'1 0 0'}>
@@ -138,15 +138,17 @@ export const AiPointsTable = () => {
</Box>
</Box>
<Box flex={4} textAlign={'center'} h={'100%'}>
<Flex py={4}>
<Box flex={'1 0 0'}>{whisperModel?.name}</Box>
<Box flex={'1 0 0'}>
{whisperModel?.charsPointsPrice +
t('common:support.wallet.subscription.point') +
' / 60' +
t('common:unit.seconds')}
</Box>
</Flex>
{sttModelList.map((item) => (
<Flex key={item.model} py={4}>
<Box flex={'1 0 0'}>{item.name}</Box>
<Box flex={'1 0 0'}>
{item.charsPointsPrice +
t('common:support.wallet.subscription.point') +
' / 60' +
t('common:unit.seconds')}
</Box>
</Flex>
))}
</Box>
</Box>
</Grid>

View File

@@ -12,6 +12,7 @@ import { SystemPluginTemplateItemType } from '@fastgpt/global/core/workflow/type
import { defaultGroup, defaultTemplateTypes } from '@fastgpt/web/core/workflow/constants';
import { MongoPluginGroups } from '@fastgpt/service/core/app/plugin/pluginGroupSchema';
import { MongoTemplateTypes } from '@fastgpt/service/core/app/templates/templateTypeSchema';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
export const readConfigData = async (name: string) => {
const splitName = name.split('.');
@@ -50,6 +51,7 @@ export async function getInitConfig() {
return Promise.all([
initSystemConfig(),
getSystemVersion(),
loadSystemModels(),
// abandon
getSystemPlugin()
@@ -78,7 +80,7 @@ const defaultFeConfigs: FastGPTFeConfigsType = {
export async function initSystemConfig() {
// load config
const [{ config: dbConfig, configId }, fileConfig] = await Promise.all([
const [{ config: dbConfig }, fileConfig] = await Promise.all([
getFastGPTConfigFromDB(),
readConfigData('config.json')
]);
@@ -96,27 +98,16 @@ export async function initSystemConfig() {
...fileRes.systemEnv,
...(dbConfig.systemEnv || {})
},
subPlans: dbConfig.subPlans || fileRes.subPlans,
llmModels: dbConfig.llmModels || fileRes.llmModels || [],
vectorModels: dbConfig.vectorModels || fileRes.vectorModels || [],
reRankModels: dbConfig.reRankModels || fileRes.reRankModels || [],
audioSpeechModels: dbConfig.audioSpeechModels || fileRes.audioSpeechModels || [],
whisperModel: dbConfig.whisperModel || fileRes.whisperModel
subPlans: dbConfig.subPlans || fileRes.subPlans
};
// set config
global.systemInitBufferId = configId;
initFastGPTConfig(config);
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
subPlans: global.subPlans,
llmModels: global.llmModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel
subPlans: global.subPlans
});
}

View File

@@ -6,12 +6,14 @@ import { MongoSystemPlugin } from '@fastgpt/service/core/app/plugin/systemPlugin
import { debounce } from 'lodash';
import { MongoAppTemplate } from '@fastgpt/service/core/app/templates/templateSchema';
import { getAppTemplatesAndLoadThem } from '@fastgpt/templates/register';
import { watchSystemModelUpdate } from '@fastgpt/service/core/ai/config/utils';
export const startMongoWatch = async () => {
reloadConfigWatch();
refetchSystemPlugins();
createDatasetTrainingMongoWatch();
refetchAppTemplates();
watchSystemModelUpdate();
};
const reloadConfigWatch = () => {

View File

@@ -1,11 +1,12 @@
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
import { getLLMModel } from '@fastgpt/service/core/ai/model';
export const getChatModelNameListByModules = (nodes: StoreNodeItemType[]): string[] => {
const modelList = nodes
.map((item) => {
const model = item.inputs.find((input) => input.key === NodeInputKeyEnum.aiModel)?.value;
return global.llmModels.find((item) => item.model === model)?.name || '';
return getLLMModel(model)?.name || '';
})
.filter(Boolean);

View File

@@ -9,7 +9,7 @@ import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { jiebaSplit } from '@fastgpt/service/common/string/jieba';
import { deleteDatasetDataVector } from '@fastgpt/service/common/vectorStore/controller';
import { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { ClientSession } from '@fastgpt/service/common/mongo';
import { MongoDatasetDataText } from '@fastgpt/service/core/dataset/data/dataTextSchema';
@@ -71,7 +71,7 @@ export async function insertData2Dataset({
indexes.map((item) =>
insertDatasetDataVector({
query: item.text,
model: getVectorModel(model),
model: getEmbeddingModel(model),
teamId,
datasetId,
collectionId
@@ -219,7 +219,7 @@ export async function updateData2Dataset({
if (item.type === 'create' || item.type === 'update') {
const result = await insertDatasetDataVector({
query: item.index.text,
model: getVectorModel(model),
model: getEmbeddingModel(model),
teamId: mongoData.teamId,
datasetId: mongoData.datasetId,
collectionId: mongoData.collectionId

View File

@@ -11,7 +11,7 @@ import {
deleteDatasetDataVector,
insertDatasetDataVector
} from '@fastgpt/service/common/vectorStore/controller';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { getEmbeddingModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
import { Document } from '@fastgpt/service/common/mongo';
@@ -207,7 +207,7 @@ const rebuildData = async ({
mongoData.indexes.map(async (index, i) => {
const result = await insertDatasetDataVector({
query: index.text,
model: getVectorModel(trainingData.model),
model: getEmbeddingModel(trainingData.model),
teamId: mongoData.teamId,
datasetId: mongoData.datasetId,
collectionId: mongoData.collectionId

View File

@@ -1,10 +1,11 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { ModelTypeEnum } from '@fastgpt/service/core/ai/model';
import { addLog } from '@fastgpt/service/common/system/log';
import { createUsage, concatUsage } from './controller';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { getFirstLLMModel, getFirstSTTModel } from '@fastgpt/service/core/ai/model';
export const pushChatUsage = ({
appName,
@@ -108,7 +109,7 @@ export const pushGenerateVectorUsage = ({
extensionOutputTokens?: number;
}) => {
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
modelType: ModelTypeEnum.vector,
modelType: ModelTypeEnum.embedding,
model,
inputTokens
});
@@ -185,7 +186,7 @@ export const pushQuestionGuideUsage = ({
teamId: string;
tmbId: string;
}) => {
const qgModel = global.llmModels[0];
const qgModel = getFirstLLMModel();
const { totalPoints, modelName } = formatModelChars2Points({
inputTokens,
outputTokens,
@@ -229,7 +230,7 @@ export function pushAudioSpeechUsage({
const { totalPoints, modelName } = formatModelChars2Points({
model,
inputTokens: charsLength,
modelType: ModelTypeEnum.audioSpeech
modelType: ModelTypeEnum.tts
});
createUsage({
@@ -258,14 +259,14 @@ export function pushWhisperUsage({
tmbId: string;
duration: number;
}) {
const whisperModel = global.whisperModel;
const whisperModel = getFirstSTTModel();
if (!whisperModel) return;
const { totalPoints, modelName } = formatModelChars2Points({
model: whisperModel.model,
inputTokens: duration,
modelType: ModelTypeEnum.whisper,
modelType: ModelTypeEnum.stt,
multiple: 60
});

View File

@@ -1,10 +1,10 @@
import {
AudioSpeechModelType,
TTSModelType,
ChatModelItemType,
FunctionModelItemType,
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
EmbeddingModelItemType,
STTModelType
} from '@fastgpt/global/core/ai/model.d';
import { TrackEventName } from '@/web/common/system/constants';

View File

@@ -4,16 +4,16 @@ import { immer } from 'zustand/middleware/immer';
import axios from 'axios';
import { OAuthEnum } from '@fastgpt/global/support/user/constant';
import type {
AudioSpeechModelType,
TTSModelType,
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
EmbeddingModelItemType,
STTModelType
} from '@fastgpt/global/core/ai/model.d';
import { InitDateResponse } from '@/global/common/api/systemRes';
import { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { defaultWhisperModel } from '@fastgpt/global/core/ai/model';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
type LoginStoreType = { provider: `${OAuthEnum}`; lastRoute: string; state: string };
@@ -51,10 +51,10 @@ type State = {
systemVersion: string;
llmModelList: LLMModelItemType[];
datasetModelList: LLMModelItemType[];
vectorModelList: VectorModelItemType[];
audioSpeechModelList: AudioSpeechModelType[];
embeddingModelList: EmbeddingModelItemType[];
ttsModelList: TTSModelType[];
reRankModelList: ReRankModelItemType[];
whisperModel: STTModelType;
sttModelList: STTModelType[];
initStaticData: (e: InitDateResponse) => void;
appType?: string;
setAppType: (e?: string) => void;
@@ -127,10 +127,10 @@ export const useSystemStore = create<State>()(
systemVersion: '0.0.0',
llmModelList: [],
datasetModelList: [],
vectorModelList: [],
audioSpeechModelList: [],
embeddingModelList: [],
ttsModelList: [],
reRankModelList: [],
whisperModel: defaultWhisperModel,
sttModelList: [],
initStaticData(res) {
set((state) => {
state.initDataBufferId = res.bufferId;
@@ -139,12 +139,22 @@ export const useSystemStore = create<State>()(
state.subPlans = res.subPlans ?? state.subPlans;
state.systemVersion = res.systemVersion ?? state.systemVersion;
state.llmModelList = res.llmModels ?? state.llmModelList;
state.llmModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.llm) ??
state.llmModelList;
state.datasetModelList = state.llmModelList.filter((item) => item.datasetProcess);
state.vectorModelList = res.vectorModels ?? state.vectorModelList;
state.audioSpeechModelList = res.audioSpeechModels ?? state.audioSpeechModelList;
state.reRankModelList = res.reRankModels ?? state.reRankModelList;
state.whisperModel = res.whisperModel ?? state.whisperModel;
state.embeddingModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.embedding) ??
state.embeddingModelList;
state.ttsModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.tts) ??
state.ttsModelList;
state.reRankModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.rerank) ??
state.reRankModelList;
state.sttModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.stt) ??
state.sttModelList;
});
}
})),
@@ -158,10 +168,10 @@ export const useSystemStore = create<State>()(
systemVersion: state.systemVersion,
llmModelList: state.llmModelList,
datasetModelList: state.datasetModelList,
vectorModelList: state.vectorModelList,
audioSpeechModelList: state.audioSpeechModelList,
embeddingModelList: state.embeddingModelList,
ttsModelList: state.ttsModelList,
reRankModelList: state.reRankModelList,
whisperModel: state.whisperModel
sttModelList: state.sttModelList
})
}
)

View File

@@ -0,0 +1,18 @@
import { GET, PUT, DELETE } from '@/web/common/api/request';
import type { listResponse } from '@/pages/api/core/ai/model/list';
import type { updateBody } from '@/pages/api/core/ai/model/update';
import type { deleteQuery } from '@/pages/api/core/ai/model/delete';
import type { SystemModelItemType } from '@fastgpt/service/core/ai/type';
import type { updateWithJsonBody } from '@/pages/api/core/ai/model/updateWithJson';
export const getSystemModelList = () => GET<listResponse>('/core/ai/model/list');
export const getSystemModelDetail = (model: string) =>
GET<SystemModelItemType>('/core/ai/model/detail', { model });
export const putSystemModel = (data: updateBody) => PUT('/core/ai/model/update', data);
export const deleteSystemModel = (data: deleteQuery) => DELETE('/core/ai/model/delete', data);
export const getModelConfigJson = () => GET<string>('/core/ai/model/getConfigJson');
export const putUpdateWithJson = (data: updateWithJsonBody) =>
PUT('/core/ai/model/updateWithJson', data);