perf: model provider show; perf: get init data buffer (#3459)

* pr code

* perf: model table show

* perf: model provider show

* perf: get init data buffer

* perf: get init data buffer

* perf: icon
This commit is contained in:
Archer
2024-12-24 15:12:07 +08:00
committed by GitHub
parent f646ef8595
commit 108e1b92ef
59 changed files with 558 additions and 329 deletions

View File

@@ -1,4 +1,7 @@
import type { InitDateResponse } from '@/global/common/api/systemRes';
import { GET } from '@/web/common/api/request';
export const getSystemInitData = () => GET<InitDateResponse>('/common/system/getInitData');
export const getSystemInitData = (bufferId?: string) =>
GET<InitDateResponse>('/common/system/getInitData', {
bufferId
});

View File

@@ -10,7 +10,7 @@ export const clientInitData = async (
feConfigs: FastGPTFeConfigsType;
}> => {
try {
const res = await getSystemInitData();
const res = await getSystemInitData(useSystemStore.getState().initDataBufferId);
useSystemStore.getState().initStaticData(res);
return {

View File

@@ -8,11 +8,12 @@ import type {
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
WhisperModelType
STTModelType
} from '@fastgpt/global/core/ai/model.d';
import { InitDateResponse } from '@/global/common/api/systemRes';
import { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { defaultWhisperModel } from '@fastgpt/global/core/ai/model';
type LoginStoreType = { provider: `${OAuthEnum}`; lastRoute: string; state: string };
@@ -35,6 +36,7 @@ type State = {
isNotSufficientModal: boolean;
setIsNotSufficientModal: (val: boolean) => void;
initDataBufferId?: string;
feConfigs: FastGPTFeConfigsType;
subPlans?: SubPlanType;
systemVersion: string;
@@ -43,7 +45,7 @@ type State = {
vectorModelList: VectorModelItemType[];
audioSpeechModelList: AudioSpeechModelType[];
reRankModelList: ReRankModelItemType[];
whisperModel?: WhisperModelType;
whisperModel: STTModelType;
initStaticData: (e: InitDateResponse) => void;
appType?: string;
setAppType: (e?: string) => void;
@@ -110,6 +112,7 @@ export const useSystemStore = create<State>()(
});
},
initDataBufferId: undefined,
feConfigs: {},
subPlans: undefined,
systemVersion: '0.0.0',
@@ -118,26 +121,38 @@ export const useSystemStore = create<State>()(
vectorModelList: [],
audioSpeechModelList: [],
reRankModelList: [],
whisperModel: undefined,
whisperModel: defaultWhisperModel,
initStaticData(res) {
set((state) => {
state.feConfigs = res.feConfigs || {};
state.subPlans = res.subPlans;
state.systemVersion = res.systemVersion;
state.initDataBufferId = res.bufferId;
state.feConfigs = res.feConfigs ?? state.feConfigs;
state.subPlans = res.subPlans ?? state.subPlans;
state.systemVersion = res.systemVersion ?? state.systemVersion;
state.llmModelList = res.llmModels ?? state.llmModelList;
state.datasetModelList = state.llmModelList.filter((item) => item.datasetProcess);
state.vectorModelList = res.vectorModels ?? state.vectorModelList;
state.audioSpeechModelList = res.audioSpeechModels ?? state.audioSpeechModelList;
state.reRankModelList = res.reRankModels ?? state.reRankModelList;
state.whisperModel = res.whisperModel;
state.whisperModel = res.whisperModel ?? state.whisperModel;
});
}
})),
{
name: 'globalStore',
partialize: (state) => ({
loginStore: state.loginStore
loginStore: state.loginStore,
initDataBufferId: state.initDataBufferId,
feConfigs: state.feConfigs,
subPlans: state.subPlans,
systemVersion: state.systemVersion,
llmModelList: state.llmModelList,
datasetModelList: state.datasetModelList,
vectorModelList: state.vectorModelList,
audioSpeechModelList: state.audioSpeechModelList,
reRankModelList: state.reRankModelList,
whisperModel: state.whisperModel
})
}
)

View File

@@ -5,6 +5,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import type { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types/index.d';
import { useMemoizedFn, useMount } from 'ahooks';
import { TrackEventName } from '../common/system/constants';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
export const useInitApp = () => {
const router = useRouter();
@@ -41,8 +42,6 @@ export const useInitApp = () => {
});
useMount(() => {
initFetch();
const errorTrack = (event: ErrorEvent) => {
window.umami?.track(TrackEventName.windowError, {
device: {
@@ -62,6 +61,11 @@ export const useInitApp = () => {
};
});
useRequest2(initFetch, {
manual: false,
pollingInterval: 300000
});
useEffect(() => {
hiId && localStorage.setItem('inviterId', hiId);
bd_vid && sessionStorage.setItem('bd_vid', bd_vid);