Record scroll test (#2783)

* perf: history add scrollList (#2696)

* perf: chatHistorySlider add virtualList

* perf: chat records add scrollList

* delete console

* perf: ScrollData add ref props

* 优化代码

* optimize code && add line breaks

* add total records display

* finish test

* perf: ScrollComponent load data

* perf: Scroll components load

* perf: scroll code

---------

Co-authored-by: papapatrick <109422393+Patrickill@users.noreply.github.com>
This commit is contained in:
Archer
2024-09-24 17:13:32 +08:00
committed by GitHub
parent f4d4d6516c
commit 434c03c955
46 changed files with 827 additions and 422 deletions

View File

@@ -10,7 +10,6 @@ import { UploadChunkItemType } from '@fastgpt/global/core/dataset/type';
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { PaginationProps } from '@fastgpt/web/common/fetch/type';
/* ===== dataset ===== */

View File

@@ -2,7 +2,8 @@ import type { AppChatConfigType, AppTTSConfigType } from '@fastgpt/global/core/a
import { AdminFbkType, ChatItemType } from '@fastgpt/global/core/chat/type';
import type { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat.d';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { RequestPaging } from '@/types';
import { GetChatTypeEnum } from '@/global/core/chat/constants';
export type GetChatSpeechProps = {
ttsConfig: AppTTSConfigType;
input: string;
@@ -15,6 +16,14 @@ export type InitChatProps = {
chatId?: string;
loadCustomFeedbacks?: boolean;
};
export type GetChatRecordsProps = OutLinkChatAuthProps & {
appId: string;
chatId?: string;
loadCustomFeedbacks?: boolean;
type: `${GetChatTypeEnum}`;
};
export type InitOutLinkChatProps = {
chatId?: string;
shareId: string;
@@ -32,7 +41,6 @@ export type InitChatResponse = {
userAvatar?: string;
title?: string;
variables: Record<string, any>;
history: ChatItemType[];
app: {
chatConfig?: AppChatConfigType;
chatModels?: string[];

View File

@@ -13,6 +13,11 @@ export const defaultChatData: InitChatResponse = {
pluginInputs: []
},
title: '',
variables: {},
history: []
variables: {}
};
export enum GetChatTypeEnum {
normal = 'normal',
outLink = 'outLink',
team = 'team'
}

View File

@@ -6,7 +6,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
export function transformPreviewHistories(histories: ChatItemType[]) {
export function transformPreviewHistories(histories: ChatItemType[]): ChatItemType[] {
return histories.map((item) => {
return {
...addStatisticalDataToHistoryItem(item),