perf: response tag;feat: history quote

This commit is contained in:
archer
2023-08-15 09:55:00 +08:00
parent b8a65e1742
commit cc57a7e27e
9 changed files with 39 additions and 32 deletions

View File

@@ -26,6 +26,12 @@ const PayRecordTable = () => {
const [payOrders, setPayOrders] = useState<PaySchema[]>([]);
const { toast } = useToast();
const { isInitialLoading, refetch } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
const handleRefreshPayOrder = useCallback(
async (payId: string) => {
setIsLoading(true);
@@ -36,8 +42,6 @@ const PayRecordTable = () => {
title: data,
status: 'success'
});
const res = await getPayOrders();
setPayOrders(res);
} catch (error: any) {
toast({
title: error?.message,
@@ -45,18 +49,15 @@ const PayRecordTable = () => {
});
console.log(error);
}
try {
refetch();
} catch (error) {}
setIsLoading(false);
},
[setIsLoading, toast]
[refetch, setIsLoading, toast]
);
const { isInitialLoading } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
return (
<Box position={'relative'} h={'100%'}>
{!isInitialLoading && payOrders.length === 0 ? (

View File

@@ -4,16 +4,14 @@ import { authUser } from '@/service/utils/auth';
import { sseErrRes } from '@/service/response';
import { sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt';
import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
import { ChatItemType } from '@/types/chat';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
export type Props = {
history: MessageItemType[];
history: ChatItemType[];
prompt: string;
modules: AppModuleItemType[];
variables: Record<string, any>;
@@ -51,7 +49,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
variables,
user,
params: {
history: gptMessage2ChatType(history),
history,
userChatInput: prompt
},
stream: true,

View File

@@ -5,6 +5,7 @@ import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
export type Props = {
chatId?: string;
@@ -55,10 +56,12 @@ export async function getChatHistory({
{
$project: {
obj: '$content.obj',
value: '$content.value'
value: '$content.value',
[TaskResponseKeyEnum.responseData]: `$content.responseData`
}
}
]);
console.log(history);
return { history };
}

View File

@@ -38,19 +38,19 @@ const ChatTest = (
const isOpen = useMemo(() => modules && modules.length > 0, [modules]);
const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => {
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest',
data: {
history,
prompt: messages[messages.length - 2].content,
prompt: chatList[chatList.length - 2].value,
modules,
variables,
appId: app._id,

View File

@@ -572,19 +572,19 @@ const ChatTest = ({ appId }: { appId: string }) => {
const [modules, setModules] = useState<AppModuleItemType[]>([]);
const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => {
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest',
data: {
history,
prompt: messages[messages.length - 2].content,
prompt: chatList[chatList.length - 2].value,
modules,
variables,
appId,