4.6.8-alpha (#804)

* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-02-01 21:57:41 +08:00
committed by GitHub
parent fc19c4cf09
commit 34602b25df
285 changed files with 10345 additions and 11223 deletions

View File

@@ -1,16 +1,21 @@
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { parseStreamChunk, SSEParseData } from '@/utils/sse';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { StartChatFnProps } from '@/components/ChatBox';
import { getToken } from '@/web/support/user/auth';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import dayjs from 'dayjs';
import {
// refer to https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
EventStreamContentType,
fetchEventSource
} from '@fortaine/fetch-event-source';
type StreamFetchProps = {
url?: string;
data: Record<string, any>;
onMessage: StartChatFnProps['generatingMessage'];
abortSignal: AbortController;
abortCtrl: AbortController;
};
type StreamResponseType = {
responseText: string;
@@ -20,105 +25,158 @@ export const streamFetch = ({
url = '/api/v1/chat/completions',
data,
onMessage,
abortSignal
abortCtrl
}: StreamFetchProps) =>
new Promise<StreamResponseType>(async (resolve, reject) => {
const timeoutId = setTimeout(() => {
abortCtrl.abort('Time out');
}, 60000);
// response data
let responseText = '';
let remainText = '';
let errMsg = '';
let responseData: ChatHistoryItemResType[] = [];
let finished = false;
const finish = () => {
if (errMsg) {
return failedFinish();
}
return resolve({
responseText,
responseData
});
};
const failedFinish = (err?: any) => {
finished = true;
reject({
message: getErrText(err, errMsg || '响应过程出现异常~'),
responseText
});
};
// animate response to make it looks smooth
function animateResponseText() {
// abort message
if (abortCtrl.signal.aborted) {
onMessage({ text: remainText });
responseText += remainText;
return finish();
}
if (remainText) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
onMessage({ text: fetchText });
responseText += fetchText;
remainText = remainText.slice(fetchCount);
}
if (finished && !remainText) {
return finish();
}
requestAnimationFrame(animateResponseText);
}
// start animation
animateResponseText();
try {
const response = await window.fetch(url, {
// auto complete variables
const variables = data?.variables || {};
variables.cTime = dayjs().format('YYYY-MM-DD HH:mm:ss');
const requestData = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
token: getToken()
},
signal: abortSignal.signal,
signal: abortCtrl.signal,
body: JSON.stringify({
...data,
variables,
detail: true,
stream: true
})
});
if (!response?.body || !response?.ok) {
throw new Error('Request Error');
}
const reader = response.body?.getReader();
// response data
let responseText = '';
let errMsg = '';
let responseData: ChatHistoryItemResType[] = [];
const parseData = new SSEParseData();
const read = async () => {
try {
const { done, value } = await reader.read();
if (done) {
if (response.status === 200 && !errMsg) {
return resolve({
responseText,
responseData
});
} else {
return reject({
message: errMsg || '响应过程出现异常~',
responseText
});
}
}
const chunkResponse = parseStreamChunk(value);
chunkResponse.forEach((item) => {
// parse json data
const { eventName, data } = parseData.parse(item);
if (!eventName || !data) return;
if (eventName === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices?.[0]?.delta?.content || '';
onMessage({ text: answer });
responseText += answer;
} else if (
eventName === sseResponseEventEnum.moduleStatus &&
data?.name &&
data?.status
) {
onMessage(data);
} else if (
eventName === sseResponseEventEnum.appStreamResponse &&
Array.isArray(data)
) {
responseData = data;
} else if (eventName === sseResponseEventEnum.error) {
errMsg = getErrText(data, '流响应错误');
}
});
read();
} catch (err: any) {
if (abortSignal.signal.aborted) {
return resolve({
responseText,
responseData
});
}
reject({
responseText,
message: getErrText(err, '请求异常')
});
}
};
read();
// send request
await fetchEventSource(url, {
...requestData,
async onopen(res) {
clearTimeout(timeoutId);
const contentType = res.headers.get('content-type');
// not stream
if (contentType?.startsWith('text/plain')) {
return failedFinish(await res.clone().text());
}
// failed stream
if (
!res.ok ||
!res.headers.get('content-type')?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
try {
failedFinish(await res.clone().json());
} catch {
failedFinish(await res.clone().text());
}
}
},
onmessage({ event, data }) {
if (data === '[DONE]') {
return;
}
// parse text to json
const parseJson = (() => {
try {
return JSON.parse(data);
} catch (error) {
return {};
}
})();
if (event === sseResponseEventEnum.answer) {
const answer: string = parseJson?.choices?.[0]?.delta?.content || '';
remainText += answer;
} else if (
event === sseResponseEventEnum.moduleStatus &&
parseJson?.name &&
parseJson?.status
) {
onMessage(parseJson);
} else if (event === sseResponseEventEnum.appStreamResponse && Array.isArray(parseJson)) {
responseData = parseJson;
} else if (event === sseResponseEventEnum.error) {
errMsg = getErrText(parseJson, '流响应错误');
}
},
onclose() {
finished = true;
},
onerror(e) {
clearTimeout(timeoutId);
failedFinish(getErrText(e));
},
openWhenHidden: true
});
} catch (err: any) {
if (abortSignal.signal.aborted) {
return resolve({
responseText: '',
responseData: []
});
clearTimeout(timeoutId);
if (abortCtrl.signal.aborted) {
finished = true;
return;
}
console.log(err, 'fetch error');
reject(getErrText(err, '请求异常'));
failedFinish(err);
}
});

View File

@@ -1,6 +1,6 @@
import React, { useRef, useCallback } from 'react';
import { Box } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useTranslation } from 'next-i18next';
export const useSelectFile = (props?: {

View File

@@ -9,6 +9,7 @@ export const useConfirm = (props?: {
content?: string;
showCancel?: boolean;
type?: 'common' | 'delete';
hideFooter?: boolean;
}) => {
const { t } = useTranslation();
@@ -33,7 +34,8 @@ export const useConfirm = (props?: {
title = map?.title || t('Warning'),
iconSrc = map?.iconSrc,
content,
showCancel = true
showCancel = true,
hideFooter = false
} = props || {};
const [customContent, setCustomContent] = useState<string | React.ReactNode>(content);
@@ -54,6 +56,7 @@ export const useConfirm = (props?: {
},
[onOpen]
),
onClose,
ConfirmModal: useCallback(
({
closeText = t('common.Close'),
@@ -91,36 +94,38 @@ export const useConfirm = (props?: {
maxW={['90vw', '500px']}
>
<ModalBody pt={5}>{customContent}</ModalBody>
<ModalFooter>
{showCancel && (
{!hideFooter && (
<ModalFooter>
{showCancel && (
<Button
variant={'whiteBase'}
onClick={() => {
onClose();
typeof cancelCb.current === 'function' && cancelCb.current();
}}
>
{closeText}
</Button>
)}
<Button
variant={'whiteBase'}
bg={bg ? bg : map.bg}
isDisabled={countDownAmount > 0}
ml={4}
isLoading={isLoading}
onClick={() => {
onClose();
typeof cancelCb.current === 'function' && cancelCb.current();
typeof confirmCb.current === 'function' && confirmCb.current();
}}
>
{closeText}
{countDownAmount > 0 ? `${countDownAmount}s` : confirmText}
</Button>
)}
<Button
bg={bg ? bg : map.bg}
isDisabled={countDownAmount > 0}
ml={4}
isLoading={isLoading}
onClick={() => {
onClose();
typeof confirmCb.current === 'function' && confirmCb.current();
}}
>
{countDownAmount > 0 ? `${countDownAmount}s` : confirmText}
</Button>
</ModalFooter>
</ModalFooter>
)}
</MyModal>
);
},
[customContent, iconSrc, isOpen, map.bg, onClose, showCancel, t, title]
[customContent, hideFooter, iconSrc, isOpen, map.bg, onClose, showCancel, t, title]
)
};
};

View File

@@ -1,5 +1,5 @@
import { useTranslation } from 'next-i18next';
import { useToast } from './useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
/**
* copy text data

View File

@@ -1,7 +1,7 @@
import React, { useCallback, useRef } from 'react';
import { ModalFooter, ModalBody, Input, useDisclosure, Button, Box } from '@chakra-ui/react';
import MyModal from '@/components/MyModal';
import { useToast } from './useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useTranslation } from 'next-i18next';
export const useEditTitle = ({

View File

@@ -3,7 +3,7 @@ import type { PagingData } from '@/types/index.d';
import { IconButton, Flex, Box, Input } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
import { useMutation } from '@tanstack/react-query';
import { useToast } from './useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { throttle } from 'lodash';
const thresholdVal = 100;

View File

@@ -1,4 +1,4 @@
import { useToast } from '@/web/common/hooks/useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useMutation } from '@tanstack/react-query';
import type { UseMutationOptions } from '@tanstack/react-query';
import { getErrText } from '@fastgpt/global/common/error/utils';

View File

@@ -1,6 +1,6 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { POST } from '../api/request';
import { useToast } from './useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useTranslation } from 'next-i18next';
import { getErrText } from '@fastgpt/global/common/error/utils';

View File

@@ -1,13 +0,0 @@
import { useToast as uToast, UseToastOptions } from '@chakra-ui/react';
export const useToast = (props?: UseToastOptions) => {
const toast = uToast({
position: 'top',
duration: 2000,
...(props && props)
});
return {
toast
};
};

View File

@@ -1,6 +1,7 @@
import { feConfigs } from './staticData';
import { useSystemStore } from './useSystemStore';
export const getDocPath = (path: string) => {
const feConfigs = useSystemStore.getState().feConfigs;
if (!feConfigs?.docUrl) return '';
if (feConfigs.docUrl.endsWith('/')) return feConfigs.docUrl;
return feConfigs.docUrl + path;

View File

@@ -1,64 +1,26 @@
import type { InitDateResponse } from '@/global/common/api/systemRes';
import { getSystemInitData } from '@/web/common/system/api';
import { delay } from '@fastgpt/global/common/system/utils';
import type { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types/index.d';
import { AppSimpleEditConfigTemplateType } from '@fastgpt/global/core/app/type';
import type {
ChatModelItemType,
FunctionModelItemType,
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
AudioSpeechModelType,
WhisperModelType
} from '@fastgpt/global/core/ai/model.d';
export let feConfigs: FastGPTFeConfigsType = {};
export let systemVersion = '0.0.0';
import { useSystemStore } from './useSystemStore';
export let chatModelList: ChatModelItemType[] = [];
export let vectorModelList: VectorModelItemType[] = [];
export let qaModelList: LLMModelItemType[] = [];
export let cqModelList: FunctionModelItemType[] = [];
export let qgModelList: LLMModelItemType[] = [];
export let extractModelList: FunctionModelItemType[] = [];
export let audioSpeechModelList: AudioSpeechModelType[] = [];
export let reRankModelList: ReRankModelItemType[] = [];
export let whisperModel: WhisperModelType;
export let simpleModeTemplates: AppSimpleEditConfigTemplateType[] = [];
let retryTimes = 3;
export const clientInitData = async (): Promise<{
export const clientInitData = async (
retry = 3
): Promise<{
feConfigs: FastGPTFeConfigsType;
}> => {
try {
const res = await getSystemInitData();
feConfigs = res.feConfigs || {};
chatModelList = res.chatModels ?? chatModelList;
vectorModelList = res.vectorModels ?? vectorModelList;
qaModelList = res.qaModels ?? qaModelList;
cqModelList = res.cqModels ?? cqModelList;
extractModelList = res.extractModels ?? extractModelList;
qgModelList = res.qgModes ?? qgModelList;
audioSpeechModelList = res.audioSpeechModels ?? audioSpeechModelList;
reRankModelList = res.reRankModels ?? reRankModelList;
whisperModel = res.whisperModel;
systemVersion = res.systemVersion;
simpleModeTemplates = res.simpleModeTemplates;
useSystemStore.getState().initStaticData(res);
return {
feConfigs
feConfigs: res.feConfigs
};
} catch (error) {
retryTimes--;
await delay(500);
return clientInitData();
if (retry > 0) {
await delay(500);
return clientInitData(retry - 1);
}
return Promise.reject(error);
}
};

View File

@@ -3,10 +3,23 @@ import { devtools, persist } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer';
import axios from 'axios';
import { OAuthEnum } from '@fastgpt/global/support/user/constant';
import type {
AudioSpeechModelType,
LLMModelItemType,
ReRankModelItemType,
VectorModelItemType,
WhisperModelType
} from '@fastgpt/global/core/ai/model.d';
import { InitDateResponse } from '@/global/common/api/systemRes';
import { FastGPTFeConfigsType } from '@fastgpt/global/common/system/types';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { AppSimpleEditConfigTemplateType } from '@fastgpt/global/core/app/type';
type LoginStoreType = { provider: `${OAuthEnum}`; lastRoute: string; state: string };
type State = {
initd: boolean;
setInitd: () => void;
lastRoute: string;
setLastRoute: (e: string) => void;
loginStore?: LoginStoreType;
@@ -19,12 +32,30 @@ type State = {
initIsPc(val: boolean): void;
gitStar: number;
loadGitStar: () => Promise<void>;
feConfigs: FastGPTFeConfigsType;
subPlans?: SubPlanType;
systemVersion: string;
llmModelList: LLMModelItemType[];
datasetModelList: LLMModelItemType[];
vectorModelList: VectorModelItemType[];
audioSpeechModelList: AudioSpeechModelType[];
reRankModelList: ReRankModelItemType[];
whisperModel?: WhisperModelType;
simpleModeTemplates: AppSimpleEditConfigTemplateType[];
initStaticData: (e: InitDateResponse) => void;
};
export const useSystemStore = create<State>()(
devtools(
persist(
immer((set, get) => ({
initd: false,
setInitd() {
set((state) => {
state.initd = true;
});
},
lastRoute: '/app/list',
setLastRoute(e) {
set((state) => {
@@ -59,7 +90,7 @@ export const useSystemStore = create<State>()(
state.isPc = val;
});
},
gitStar: 6100,
gitStar: 9300,
async loadGitStar() {
try {
const { data: git } = await axios.get('https://api.github.com/repos/labring/FastGPT');
@@ -68,6 +99,33 @@ export const useSystemStore = create<State>()(
state.gitStar = git.stargazers_count;
});
} catch (error) {}
},
feConfigs: {},
subPlans: undefined,
systemVersion: '0.0.0',
llmModelList: [],
datasetModelList: [],
vectorModelList: [],
audioSpeechModelList: [],
reRankModelList: [],
whisperModel: undefined,
simpleModeTemplates: [],
initStaticData(res) {
set((state) => {
state.feConfigs = res.feConfigs || {};
state.subPlans = res.subPlans;
state.systemVersion = res.systemVersion;
state.llmModelList = res.llmModels ?? state.llmModelList;
state.datasetModelList = state.llmModelList.filter((item) => item.datasetProcess);
state.vectorModelList = res.vectorModels ?? state.vectorModelList;
state.audioSpeechModelList = res.audioSpeechModels ?? state.audioSpeechModelList;
state.reRankModelList = res.reRankModels ?? state.reRankModelList;
state.whisperModel = res.whisperModel;
state.simpleModeTemplates = res.simpleModeTemplates;
});
}
})),
{

View File

@@ -1,5 +1,5 @@
import { useState, useCallback, useEffect, useMemo, useRef } from 'react';
import { useToast } from '@/web/common/hooks/useToast';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import type { AppTTSConfigType } from '@fastgpt/global/core/module/type.d';
import { TTSTypeEnum } from '@/constants/app';