Release update (#1580)

* release doc

* fix: reg metch

* perf: tool call arg

* fix: stream update variables

* remove status

* update prompt

* rename embeddong model
This commit is contained in:
Archer
2024-05-24 11:07:03 +08:00
committed by GitHub
parent 92a3d6d268
commit 9c7c74050b
23 changed files with 119 additions and 93 deletions

View File

@@ -59,6 +59,7 @@ import ChatItem from './components/ChatItem';
import dynamic from 'next/dynamic';
import { useCreation } from 'ahooks';
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
import type { StreamResponseType } from '@/web/common/api/fetch';
const ResponseTags = dynamic(() => import('./ResponseTags'));
const FeedbackModal = dynamic(() => import('./FeedbackModal'));
@@ -90,12 +91,11 @@ type Props = OutLinkChatAuthProps & {
chatId?: string;
onUpdateVariable?: (e: Record<string, any>) => void;
onStartChat?: (e: StartChatFnProps) => Promise<{
responseText: string;
[DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[];
newVariables?: Record<string, any>;
isNewChat?: boolean;
}>;
onStartChat?: (e: StartChatFnProps) => Promise<
StreamResponseType & {
isNewChat?: boolean;
}
>;
onDelMessage?: (e: { contentId: string }) => void;
};
@@ -207,7 +207,8 @@ const ChatBox = (
status,
name,
tool,
autoTTSResponse
autoTTSResponse,
variables
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
setChatHistories((state) =>
state.map((item, index) => {
@@ -290,6 +291,8 @@ const ChatBox = (
return val;
})
};
} else if (event === SseResponseEventEnum.updateVariables && variables) {
setValue('variables', variables);
}
return item;
@@ -297,7 +300,7 @@ const ChatBox = (
);
generatingScroll();
},
[generatingScroll, setChatHistories, splitText2Audio]
[generatingScroll, setChatHistories, setValue, splitText2Audio]
);
// 重置输入内容
@@ -466,7 +469,6 @@ const ChatBox = (
const {
responseData,
responseText,
newVariables,
isNewChat = false
} = await onStartChat({
chatList: newChatList,
@@ -476,8 +478,6 @@ const ChatBox = (
variables: requestVariables
});
newVariables && setValue('variables', newVariables);
isNewChatReplace.current = isNewChat;
// set finish status
@@ -561,7 +561,6 @@ const ChatBox = (
resetInputVal,
setAudioPlayingChatId,
setChatHistories,
setValue,
splitText2Audio,
startSegmentedAudio,
t,
@@ -696,7 +695,7 @@ const ChatBox = (
} catch (error) {}
};
},
[appId, chatId, feedbackType, outLinkUid, shareId, teamId, teamToken]
[appId, chatId, feedbackType, outLinkUid, setChatHistories, shareId, teamId, teamToken]
);
const onCloseUserLike = useCallback(
(chat: ChatSiteItemType) => {

View File

@@ -8,11 +8,12 @@ import {
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
export type generatingMessageProps = {
event: `${SseResponseEventEnum}`;
event: SseResponseEventEnum;
text?: string;
name?: string;
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;
variables?: Record<string, any>;
};
export type UserInputFileItemType = {

View File

@@ -62,7 +62,7 @@ const ChatTest = (
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
url: '/api/core/chat/chatTest',
data: {
history,
@@ -78,7 +78,7 @@ const ChatTest = (
abortCtrl: controller
});
return { responseText, responseData, newVariables };
return { responseText, responseData };
},
[appDetail._id, appDetail.name, edges, nodes]
);