feat: default model (#3662)

* move model config

* feat: default model
This commit is contained in:
Archer
2025-01-24 18:44:43 +08:00
committed by GitHub
parent 5ce889942a
commit 38efa3e050
167 changed files with 2999 additions and 2899 deletions

View File

@@ -3,8 +3,8 @@ import React, { useCallback, useMemo, useState } from 'react';
import { useTranslation } from 'next-i18next';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MySelect, { SelectProps } from '@fastgpt/web/components/common/MySelect';
import { HUGGING_FACE_ICON, LOGO_ICON } from '@fastgpt/global/common/system/constants';
import { Box, Flex, HStack, useDisclosure } from '@chakra-ui/react';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
import { Box, Flex, HStack } from '@chakra-ui/react';
import Avatar from '@fastgpt/web/components/common/Avatar';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import dynamic from 'next/dynamic';
@@ -22,7 +22,8 @@ type Props = SelectProps & {
const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
const { t } = useTranslation();
const { feConfigs, llmModelList, embeddingModelList } = useSystemStore();
const { llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList } =
useSystemStore();
const avatarSize = useMemo(() => {
const size = {
@@ -35,7 +36,16 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
}, [props.size]);
const avatarList = list.map((item) => {
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], item.value);
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
item.value
);
return {
value: item.value,
@@ -54,20 +64,6 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
};
});
const expandList = useMemo(() => {
return feConfigs?.show_pay
? avatarList.concat({
label: (
<Flex alignItems={'center'}>
<Avatar borderRadius={'0'} mr={2} src={LOGO_ICON} w={avatarSize} />
<Box>{t('common:support.user.Price')}</Box>
</Flex>
),
value: 'price'
})
: avatarList;
}, [feConfigs.show_pay, avatarList, avatarSize, t]);
return (
<Box
css={{
@@ -82,7 +78,8 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
<MySelect
className="nowheel"
isDisabled={!!disableTip}
list={expandList}
list={avatarList}
h={'40px'}
{...props}
onchange={(e) => {
if (e === 'price') {
@@ -100,7 +97,8 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
};
const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
const { t } = useTranslation();
const { llmModelList, embeddingModelList } = useSystemStore();
const { llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList } =
useSystemStore();
const [value, setValue] = useState<string[]>([]);
const avatarSize = useMemo(() => {
@@ -158,7 +156,16 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
);
const SelectedModel = useMemo(() => {
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], props.value);
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
props.value
);
setValue([modelData.provider, props.value]);
@@ -174,7 +181,15 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
<Box>{modelData?.name}</Box>
</HStack>
);
}, [avatarSize, llmModelList, props.value, embeddingModelList]);
}, [
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
props.value,
avatarSize
]);
return (
<Box
@@ -192,7 +207,9 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
value={value}
rowMinWidth="160px"
ButtonProps={{
isDisabled: !!disableTip
isDisabled: !!disableTip,
h: '40px',
...props
}}
/>
</MyTooltip>

View File

@@ -1,4 +1,4 @@
import React, { useMemo } from 'react';
import React, { useEffect, useMemo } from 'react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { Box, css, HStack, IconButton, useDisclosure } from '@chakra-ui/react';
@@ -24,7 +24,7 @@ const SettingLLMModel = ({
...props
}: AIChatSettingsModalProps & Props) => {
const { t } = useTranslation();
const { llmModelList } = useSystemStore();
const { llmModelList, defaultModels } = useSystemStore();
const model = defaultData.model;
@@ -39,15 +39,16 @@ const SettingLLMModel = ({
}),
[llmModelList, llmModelType]
);
// Set default model
useMount(() => {
if (!model && modelList.length > 0) {
useEffect(() => {
if (!llmModelList.find((item) => item.model === model) && !!defaultModels.llm) {
onChange({
...defaultData,
model: modelList[0].model
model: defaultModels.llm.model
});
}
});
}, [model, defaultData, llmModelList, defaultModels.llm, onChange]);
const {
isOpen: isOpenAIChatSetting,

View File

@@ -8,14 +8,15 @@ import type {
import type { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types/index.d';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { SystemModelItemType } from '@fastgpt/service/core/ai/type';
import { SystemDefaultModelType, SystemModelItemType } from '@fastgpt/service/core/ai/type';
export type InitDateResponse = {
bufferId?: string;
feConfigs: FastGPTFeConfigsType;
feConfigs?: FastGPTFeConfigsType;
subPlans?: SubPlanType;
systemVersion: string;
activeModelList?: SystemModelItemType[];
defaultModels?: SystemDefaultModelType;
};

View File

@@ -36,7 +36,8 @@ import {
getSystemModelDetail,
getSystemModelList,
getTestModel,
putSystemModel
putSystemModel,
putUpdateDefaultModels
} from '@/web/core/ai/config';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { SystemModelItemType } from '@fastgpt/service/core/ai/type';
@@ -54,20 +55,14 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { putUpdateWithJson } from '@/web/core/ai/config';
import CopyBox from '@fastgpt/web/components/common/String/CopyBox';
import MyIcon from '@fastgpt/web/components/common/Icon';
import AIModelSelector from '@/components/Select/AIModelSelector';
const MyModal = dynamic(() => import('@fastgpt/web/components/common/MyModal'));
const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
const { t } = useTranslation();
const { userInfo } = useUserStore();
const {
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
feConfigs
} = useSystemStore();
const { defaultModels, feConfigs } = useSystemStore();
const isRoot = userInfo?.username === 'root';
@@ -273,14 +268,7 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
}
);
const onCreateModel = (type: ModelTypeEnum) => {
const defaultModel = (() => {
if (type === ModelTypeEnum.llm) return llmModelList[0];
if (type === ModelTypeEnum.embedding) return embeddingModelList[0];
if (type === ModelTypeEnum.tts) return ttsModelList[0];
if (type === ModelTypeEnum.stt) return sttModelList[0];
if (type === ModelTypeEnum.rerank) return reRankModelList[0];
return llmModelList[0];
})();
const defaultModel = defaultModels[type];
setEditModelData({
...defaultModel,
@@ -302,6 +290,11 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
onOpen: onOpenJsonConfig,
onClose: onCloseJsonConfig
} = useDisclosure();
const {
onOpen: onOpenDefaultModel,
onClose: onCloseDefaultModel,
isOpen: isOpenDefaultModel
} = useDisclosure();
const isLoading = loadingModels || loadingData || updatingModel || testingModel;
@@ -313,6 +306,9 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
<Flex alignItems={'center'}>
{Tab}
<Box flex={1} />
<Button variant={'whiteBase'} mr={2} onClick={onOpenDefaultModel}>
{t('account:model.default_model')}
</Button>
<Button variant={'whiteBase'} mr={2} onClick={onOpenJsonConfig}>
{t('account:model.json_config')}
</Button>
@@ -505,6 +501,9 @@ const ModelTable = ({ Tab }: { Tab: React.ReactNode }) => {
{isOpenJsonConfig && (
<JsonConfigModal onClose={onCloseJsonConfig} onSuccess={refreshModels} />
)}
{isOpenDefaultModel && (
<DefaultModelModal onClose={onCloseDefaultModel} onSuccess={refreshModels} />
)}
</>
);
};
@@ -1084,4 +1083,164 @@ const JsonConfigModal = ({
);
};
const labelStyles = {
fontSize: 'sm',
color: 'myGray.900',
mb: 0.5
};
const DefaultModelModal = ({
onSuccess,
onClose
}: {
onSuccess: () => void;
onClose: () => void;
}) => {
const { t } = useTranslation();
const {
defaultModels,
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList
} = useSystemStore();
// Create a copy of defaultModels for local state management
const [defaultData, setDefaultData] = useState(defaultModels);
const { runAsync, loading } = useRequest2(putUpdateDefaultModels, {
onSuccess: () => {
onSuccess();
onClose();
},
successToast: t('common:common.Update Success')
});
return (
<MyModal
isOpen
onClose={onClose}
title={t('account:default_model_config')}
iconSrc="modal/edit"
>
<ModalBody>
<Box>
<Box {...labelStyles}>{t('common:model.type.chat')}</Box>
<Box flex={1}>
<AIModelSelector
bg="myGray.50"
value={defaultData.llm?.model}
list={llmModelList.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={(e) => {
setDefaultData((state) => ({
...state,
llm: llmModelList.find((item) => item.model === e)
}));
}}
/>
</Box>
</Box>
<Box mt={4}>
<Box {...labelStyles}>{t('common:model.type.embedding')}</Box>
<Box flex={1}>
<AIModelSelector
bg="myGray.50"
value={defaultData.embedding?.model}
list={embeddingModelList.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={(e) => {
setDefaultData((state) => ({
...state,
embedding: embeddingModelList.find((item) => item.model === e)
}));
}}
/>
</Box>
</Box>
<Box mt={4}>
<Box {...labelStyles}>{t('common:model.type.tts')}</Box>
<Box flex={1}>
<AIModelSelector
bg="myGray.50"
value={defaultData.tts?.model}
list={ttsModelList.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={(e) => {
setDefaultData((state) => ({
...state,
tts: ttsModelList.find((item) => item.model === e)
}));
}}
/>
</Box>
</Box>
<Box mt={4}>
<Box {...labelStyles}>{t('common:model.type.stt')}</Box>
<Box flex={1}>
<AIModelSelector
bg="myGray.50"
value={defaultData.stt?.model}
list={sttModelList.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={(e) => {
setDefaultData((state) => ({
...state,
stt: sttModelList.find((item) => item.model === e)
}));
}}
/>
</Box>
</Box>
<Box mt={4}>
<Box {...labelStyles}>{t('common:model.type.reRank')}</Box>
<Box flex={1}>
<AIModelSelector
bg="myGray.50"
value={defaultData.rerank?.model}
list={reRankModelList.map((item) => ({
value: item.model,
label: item.name
}))}
onchange={(e) => {
setDefaultData((state) => ({
...state,
rerank: reRankModelList.find((item) => item.model === e)
}));
}}
/>
</Box>
</Box>
</ModalBody>
<ModalFooter>
<Button variant={'whiteBase'} mr={4} onClick={onClose}>
{t('common:common.Cancel')}
</Button>
<Button
isLoading={loading}
onClick={() =>
runAsync({
[ModelTypeEnum.llm]: defaultData.llm?.model,
[ModelTypeEnum.embedding]: defaultData.embedding?.model,
[ModelTypeEnum.tts]: defaultData.tts?.model,
[ModelTypeEnum.stt]: defaultData.stt?.model,
[ModelTypeEnum.rerank]: defaultData.rerank?.model
})
}
>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
);
};
export default ModelTable;

View File

@@ -233,7 +233,7 @@ const Share = ({ appId }: { appId: string; type: PublishChannelEnum }) => {
onEdit={() => {
toast({
status: 'success',
title: t('common:common.Update Successful')
title: t('common:common.Update Success')
});
refetchShareChatList();
setEditLinkData(undefined);

View File

@@ -19,7 +19,7 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
const nodeList = useContextSelector(WorkflowContext, (v) => v.nodeList);
const { t } = useTranslation();
const { llmModelList } = useSystemStore();
const { defaultModels } = useSystemStore();
const [data, setData] = useState<DatasetParamsProps>({
searchMode: DatasetSearchModeEnum.embedding,
@@ -27,7 +27,7 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
similarity: 0.5,
usingReRank: false,
datasetSearchUsingExtensionQuery: true,
datasetSearchExtensionModel: llmModelList[0]?.model,
datasetSearchExtensionModel: defaultModels.llm?.model,
datasetSearchExtensionBg: ''
});
@@ -45,7 +45,7 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
});
return maxTokens;
}, [nodeList, llmModelList]);
}, [nodeList]);
const { isOpen, onOpen, onClose } = useDisclosure();

View File

@@ -67,7 +67,7 @@ const InputDataModal = ({
const theme = useTheme();
const { toast } = useToast();
const [currentTab, setCurrentTab] = useState(TabEnum.content);
const { embeddingModelList } = useSystemStore();
const { embeddingModelList, defaultModels } = useSystemStore();
const { isPc } = useSystem();
const { register, handleSubmit, reset, control } = useForm<InputDataType>();
const {
@@ -159,10 +159,10 @@ const InputDataModal = ({
const maxToken = useMemo(() => {
const vectorModel =
embeddingModelList.find((item) => item.model === collection.dataset.vectorModel) ||
embeddingModelList[0];
defaultModels.embedding;
return vectorModel?.maxToken || 3000;
}, [collection.dataset.vectorModel, embeddingModelList]);
}, [collection.dataset.vectorModel, defaultModels.embedding, embeddingModelList]);
// import new data
const { mutate: sureImportData, isLoading: isImporting } = useRequest({

View File

@@ -48,7 +48,7 @@ type FormType = {
const Test = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
const { toast } = useToast();
const { llmModelList } = useSystemStore();
const { defaultModels } = useSystemStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { pushDatasetTestItem } = useSearchTestStore();
const [inputType, setInputType] = useState<'text' | 'file'>('text');
@@ -70,7 +70,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
limit: 5000,
similarity: 0,
datasetSearchUsingExtensionQuery: true,
datasetSearchExtensionModel: llmModelList[0].model,
datasetSearchExtensionModel: defaultModels.llm?.model,
datasetSearchExtensionBg: ''
}
}

View File

@@ -41,7 +41,7 @@ const CreateModal = ({
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { embeddingModelList, datasetModelList } = useSystemStore();
const { defaultModels, embeddingModelList, datasetModelList } = useSystemStore();
const { isPc } = useSystem();
const datasetTypeMap = useMemo(() => {
@@ -78,8 +78,8 @@ const CreateModal = ({
avatar: datasetTypeMap[type].icon,
name: '',
intro: '',
vectorModel: filterNotHiddenVectorModelList[0].model,
agentModel: datasetModelList[0].model
vectorModel: defaultModels.embedding?.model,
agentModel: defaultModels.llm?.model
}
});
const { register, setValue, handleSubmit, watch } = form;

View File

@@ -1,8 +1,13 @@
import type { NextApiResponse } from 'next';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { InitDateResponse } from '@/global/common/api/systemRes';
import { SystemModelItemType } from '@fastgpt/service/core/ai/type';
async function handler(req: ApiRequestProps<{}, { bufferId?: string }>, res: NextApiResponse) {
async function handler(
req: ApiRequestProps<{}, { bufferId?: string }>,
res: NextApiResponse
): Promise<InitDateResponse> {
const { bufferId } = req.query;
const activeModelList = global.systemActiveModelList.map((model) => ({
@@ -17,7 +22,7 @@ async function handler(req: ApiRequestProps<{}, { bufferId?: string }>, res: Nex
queryConfig: undefined,
requestUrl: undefined,
requestAuth: undefined
}));
})) as SystemModelItemType[];
// If bufferId is the same as the current bufferId, return directly
if (bufferId && global.systemInitBufferId && global.systemInitBufferId === bufferId) {
@@ -31,8 +36,9 @@ async function handler(req: ApiRequestProps<{}, { bufferId?: string }>, res: Nex
bufferId: global.systemInitBufferId,
feConfigs: global.feConfigs,
subPlans: global.subPlans,
systemVersion: global.systemVersion || '0.0.0',
activeModelList,
systemVersion: global.systemVersion || '0.0.0'
defaultModels: global.systemDefaultModel
};
}

View File

@@ -16,7 +16,7 @@ import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSc
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getFirstLLMModel } from '@fastgpt/service/core/ai/model';
import { getDefaultLLMModel } from '@fastgpt/service/core/ai/model';
async function handler(
req: ApiRequestProps<
@@ -36,7 +36,7 @@ async function handler(
authApiKey: true
});
const qgModel = getFirstLLMModel();
const qgModel = getDefaultLLMModel();
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,
@@ -48,6 +48,7 @@ async function handler(
});
pushQuestionGuideUsage({
model: qgModel.model,
inputTokens,
outputTokens,
teamId,

View File

@@ -9,7 +9,7 @@ import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
import { getFirstLLMModel } from '@fastgpt/service/core/ai/model';
import { getDefaultLLMModel } from '@fastgpt/service/core/ai/model';
export type CreateQuestionGuideParams = OutLinkChatAuthProps & {
appId: string;
@@ -51,7 +51,7 @@ async function handler(req: ApiRequestProps<CreateQuestionGuideParams>, res: Nex
});
const messages = chats2GPTMessages({ messages: histories, reserveId: false });
const qgModel = questionGuide?.model || getFirstLLMModel().model;
const qgModel = questionGuide?.model || getDefaultLLMModel().model;
const { result, inputTokens, outputTokens } = await createQuestionGuide({
messages,
@@ -60,6 +60,7 @@ async function handler(req: ApiRequestProps<CreateQuestionGuideParams>, res: Nex
});
pushQuestionGuideUsage({
model: qgModel,
inputTokens,
outputTokens,
teamId,

View File

@@ -0,0 +1,73 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoSystemModel } from '@fastgpt/service/core/ai/config/schema';
import { loadSystemModels } from '@fastgpt/service/core/ai/config/utils';
import { updateFastGPTConfigBuffer } from '@fastgpt/service/common/system/config/controller';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
export type updateDefaultQuery = {};
export type updateDefaultBody = {
[ModelTypeEnum.llm]?: string;
[ModelTypeEnum.embedding]?: string;
[ModelTypeEnum.tts]?: string;
[ModelTypeEnum.stt]?: string;
[ModelTypeEnum.rerank]?: string;
};
export type updateDefaultResponse = {};
async function handler(
req: ApiRequestProps<updateDefaultBody, updateDefaultQuery>,
res: ApiResponseType<any>
): Promise<updateDefaultResponse> {
const { llm, embedding, tts, stt, rerank } = req.body;
await mongoSessionRun(async (session) => {
await MongoSystemModel.updateMany({}, { $set: { 'metadata.isDefault': false } }, { session });
if (llm) {
await MongoSystemModel.updateOne(
{ model: llm },
{ $set: { 'metadata.isDefault': true } },
{ session }
);
}
if (embedding) {
await MongoSystemModel.updateOne(
{ model: embedding },
{ $set: { 'metadata.isDefault': true } },
{ session }
);
}
if (tts) {
await MongoSystemModel.updateOne(
{ model: tts },
{ $set: { 'metadata.isDefault': true } },
{ session }
);
}
if (stt) {
await MongoSystemModel.updateOne(
{ model: stt },
{ $set: { 'metadata.isDefault': true } },
{ session }
);
}
if (rerank) {
await MongoSystemModel.updateOne(
{ model: rerank },
{ $set: { 'metadata.isDefault': true } },
{ session }
);
}
});
await loadSystemModels(true);
await updateFastGPTConfigBuffer();
return {};
}
export default NextAPI(handler);

View File

@@ -6,7 +6,7 @@ import {
getLLMModel,
getEmbeddingModel,
getDatasetModel,
getFirstEmbeddingModel
getDefaultEmbeddingModel
} from '@fastgpt/service/core/ai/model';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
@@ -32,7 +32,7 @@ async function handler(
intro,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = getFirstEmbeddingModel().model,
vectorModel = getDefaultEmbeddingModel().model,
agentModel = getDatasetModel().model,
apiServer,
feishuServer,

View File

@@ -9,7 +9,7 @@ import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { NextAPI } from '@/service/middleware/entry';
import { aiTranscriptions } from '@fastgpt/service/core/ai/audio/transcriptions';
import { useIPFrequencyLimit } from '@fastgpt/service/common/middle/reqFrequencyLimit';
import { getFirstSTTModel } from '@fastgpt/service/core/ai/model';
import { getDefaultSTTModel } from '@fastgpt/service/core/ai/model';
const upload = getUploadModel({
maxSize: 5
@@ -37,7 +37,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
filePaths = [file.path];
if (!getFirstSTTModel()) {
if (!getDefaultSTTModel()) {
throw new Error('whisper model not found');
}
@@ -66,7 +66,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
// }
const result = await aiTranscriptions({
model: getFirstSTTModel().model,
model: getDefaultSTTModel().model,
fileStream: fs.createReadStream(file.path)
});

View File

@@ -5,7 +5,7 @@ import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/u
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { getFirstLLMModel, getFirstSTTModel } from '@fastgpt/service/core/ai/model';
import { getDefaultTTSModel } from '@fastgpt/service/core/ai/model';
export const pushChatUsage = ({
appName,
@@ -176,21 +176,22 @@ export const pushGenerateVectorUsage = ({
};
export const pushQuestionGuideUsage = ({
model,
inputTokens,
outputTokens,
teamId,
tmbId
}: {
model: string;
inputTokens: number;
outputTokens: number;
teamId: string;
tmbId: string;
}) => {
const qgModel = getFirstLLMModel();
const { totalPoints, modelName } = formatModelChars2Points({
inputTokens,
outputTokens,
model: qgModel.model,
model,
modelType: ModelTypeEnum.llm
});
@@ -259,7 +260,7 @@ export function pushWhisperUsage({
tmbId: string;
duration: number;
}) {
const whisperModel = getFirstSTTModel();
const whisperModel = getDefaultTTSModel();
if (!whisperModel) return;

View File

@@ -15,6 +15,7 @@ import { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import { SystemDefaultModelType } from '@fastgpt/service/core/ai/type';
type LoginStoreType = { provider: `${OAuthEnum}`; lastRoute: string; state: string };
@@ -49,6 +50,7 @@ type State = {
feConfigs: FastGPTFeConfigsType;
subPlans?: SubPlanType;
systemVersion: string;
defaultModels: SystemDefaultModelType;
llmModelList: LLMModelItemType[];
datasetModelList: LLMModelItemType[];
embeddingModelList: EmbeddingModelItemType[];
@@ -125,6 +127,7 @@ export const useSystemStore = create<State>()(
feConfigs: {},
subPlans: undefined,
systemVersion: '0.0.0',
defaultModels: {},
llmModelList: [],
datasetModelList: [],
embeddingModelList: [],
@@ -155,6 +158,8 @@ export const useSystemStore = create<State>()(
state.sttModelList =
res.activeModelList?.filter((item) => item.type === ModelTypeEnum.stt) ??
state.sttModelList;
state.defaultModels = res.defaultModels ?? state.defaultModels;
});
}
})),
@@ -166,6 +171,7 @@ export const useSystemStore = create<State>()(
feConfigs: state.feConfigs,
subPlans: state.subPlans,
systemVersion: state.systemVersion,
defaultModels: state.defaultModels,
llmModelList: state.llmModelList,
datasetModelList: state.datasetModelList,
embeddingModelList: state.embeddingModelList,

View File

@@ -4,6 +4,7 @@ import type { updateBody } from '@/pages/api/core/ai/model/update';
import type { deleteQuery } from '@/pages/api/core/ai/model/delete';
import type { SystemModelItemType } from '@fastgpt/service/core/ai/type';
import type { updateWithJsonBody } from '@/pages/api/core/ai/model/updateWithJson';
import type { updateDefaultBody } from '@/pages/api/core/ai/model/updateDefault';
export const getSystemModelList = () => GET<listResponse>('/core/ai/model/list');
export const getSystemModelDetail = (model: string) =>
@@ -18,3 +19,6 @@ export const putUpdateWithJson = (data: updateWithJsonBody) =>
PUT('/core/ai/model/updateWithJson', data);
export const getTestModel = (model: String) => GET('/core/ai/model/test', { model });
export const putUpdateDefaultModels = (data: updateDefaultBody) =>
PUT('/core/ai/model/updateDefault', data);