File input (#2270)

* doc

* feat: file upload  config

* perf: chat box file params

* feat: markdown show file

* feat: chat file store and clear

* perf: read file contentType

* feat: llm vision config

* feat: file url output

* perf: plugin error text

* perf: image load

* feat: ai chat document

* perf: file block ui

* feat: read file node

* feat: file read response field

* feat: simple mode support read files

* feat: tool call

* feat: read file histories

* perf: select file

* perf: select file config

* i18n

* i18n

* fix: ts; feat: tool response preview result
This commit is contained in:
Archer
2024-08-06 10:00:22 +08:00
committed by GitHub
parent 10dcdb5491
commit e36d9d794f
121 changed files with 2600 additions and 1142 deletions

View File

@@ -1,9 +1,9 @@
import React, { useState } from 'react';
import { Skeleton } from '@chakra-ui/react';
import { ImageProps, Skeleton } from '@chakra-ui/react';
import MyPhotoView from '@fastgpt/web/components/common/Image/PhotoView';
import { useBoolean } from 'ahooks';
const MdImage = ({ src }: { src?: string }) => {
const MdImage = ({ src, ...props }: { src?: string } & ImageProps) => {
const [isLoaded, { setTrue }] = useBoolean(false);
const [renderSrc, setRenderSrc] = useState(src);
@@ -31,6 +31,7 @@ const MdImage = ({ src }: { src?: string }) => {
setRenderSrc('/imgs/errImg.png');
setTrue();
}}
{...props}
/>
</Skeleton>
);

View File

@@ -6,8 +6,8 @@ import { useTranslation } from 'next-i18next';
const VariableTip = (props: StackProps) => {
const { t } = useTranslation();
return (
<HStack fontSize={'xs'} spacing={1} {...props}>
<MyIcon name={'common/info'} w={'0.9rem'} transform={'translateY(1px)'} />
<HStack fontSize={'11px'} spacing={1} {...props}>
<MyIcon name={'common/info'} w={'0.8rem'} />
<Box>{t('common:textarea_variable_picker_tip')}</Box>
</HStack>
);

View File

@@ -41,8 +41,11 @@ const AIChatSettingsModal = ({
});
const model = watch('model');
const showResponseAnswerText = watch(NodeInputKeyEnum.aiChatIsResponseText) !== undefined;
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const useVision = watch('aiChatVision');
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0];
const llmSupportVision = !!selectedModel?.vision;
const tokenLimit = useMemo(() => {
return llmModelList.find((item) => item.model === model)?.maxResponse || 4096;
@@ -65,7 +68,7 @@ const AIChatSettingsModal = ({
alignItems: 'center',
fontSize: 'sm',
color: 'myGray.900',
width: ['80px', '90px']
width: ['6rem', '8rem']
};
return (
@@ -110,26 +113,24 @@ const AIChatSettingsModal = ({
</Box>
</Flex>
{feConfigs && (
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.ai.Ai point price')}
</Box>
<Box flex={1} ml={'10px'}>
{t('support.wallet.Ai point every thousand tokens', {
<Box flex={1}>
{t('common:support.wallet.Ai point every thousand tokens', {
points: selectedModel?.charsPointsPrice || 0
})}
</Box>
</Flex>
)}
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.ai.Max context')}
</Box>
<Box flex={1} ml={'10px'}>
{selectedModel?.maxContext || 4096}Tokens
</Box>
<Box flex={1}>{selectedModel?.maxContext || 4096}Tokens</Box>
</Flex>
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.ai.Support tool')}
<QuestionTip ml={1} label={t('common:core.module.template.AI support tool tip')} />
@@ -140,11 +141,11 @@ const AIChatSettingsModal = ({
: t('common:common.not_support')}
</Box>
</Flex>
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.app.Temperature')}
</Box>
<Box flex={1} ml={'10px'}>
<Box flex={1} ml={1}>
<MySlider
markList={[
{ label: t('common:core.app.deterministic'), value: 0 },
@@ -161,11 +162,11 @@ const AIChatSettingsModal = ({
/>
</Box>
</Flex>
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.app.Max tokens')}
</Box>
<Box flex={1} ml={'10px'}>
<Box flex={1}>
<MySlider
markList={[
{ label: '100', value: 100 },
@@ -184,11 +185,11 @@ const AIChatSettingsModal = ({
</Box>
</Flex>
{showMaxHistoriesSlider && (
<Flex mt={8}>
<Flex mt={6}>
<Box {...LabelStyles} mr={2}>
{t('common:core.app.Max histories')}
</Box>
<Box flex={1} ml={'10px'}>
<Box flex={1}>
<MySlider
markList={[
{ label: 0, value: 0 },
@@ -207,7 +208,7 @@ const AIChatSettingsModal = ({
</Flex>
)}
{showResponseAnswerText && (
<Flex mt={8} alignItems={'center'}>
<Flex mt={6} alignItems={'center'}>
<Box {...LabelStyles}>
{t('common:core.app.Ai response')}
<QuestionTip
@@ -215,7 +216,7 @@ const AIChatSettingsModal = ({
label={t('common:core.module.template.AI response switch tip')}
></QuestionTip>
</Box>
<Box flex={1} ml={'10px'}>
<Box flex={1}>
<Switch
isChecked={getValues(NodeInputKeyEnum.aiChatIsResponseText)}
onChange={(e) => {
@@ -227,6 +228,29 @@ const AIChatSettingsModal = ({
</Box>
</Flex>
)}
{showVisionSwitch && (
<Flex mt={6} alignItems={'center'}>
<Box {...LabelStyles}>
{t('app:llm_use_vision')}
<QuestionTip ml={1} label={t('app:llm_use_vision_tip')}></QuestionTip>
</Box>
<Box flex={1}>
{llmSupportVision ? (
<Switch
isChecked={useVision}
onChange={(e) => {
const value = e.target.checked;
setValue(NodeInputKeyEnum.aiChatVision, value);
}}
/>
) : (
<Box fontSize={'sm'} color={'myGray.500'}>
{t('app:llm_not_support_vision')}
</Box>
)}
</Box>
</Flex>
)}
</ModalBody>
<ModalFooter>
<Button variant={'whiteBase'} onClick={onClose}>

View File

@@ -1,13 +1,15 @@
import React, { useEffect } from 'react';
import React from 'react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { Box, Button, Flex, css, useDisclosure } from '@chakra-ui/react';
import { Box, Button, css, useDisclosure } from '@chakra-ui/react';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import AISettingModal from '@/components/core/ai/AISettingModal';
import Avatar from '@fastgpt/web/components/common/Avatar';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useMount } from 'ahooks';
type Props = {
llmModelType?: `${LLMModelTypeEnum}`;
@@ -37,14 +39,15 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
onClose: onCloseAIChatSetting
} = useDisclosure();
useEffect(() => {
// Set default model
useMount(() => {
if (!model && modelList.length > 0) {
onChange({
...defaultData,
model: modelList[0].model
});
}
}, []);
});
return (
<Box
@@ -71,10 +74,13 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
w={'18px'}
/>
}
rightIcon={<MyIcon name={'common/select'} w={'1rem'} />}
pl={4}
onClick={onOpenAIChatSetting}
>
{selectedModel?.name}
<Box flex={1} textAlign={'left'}>
{selectedModel?.name}
</Box>
</Button>
</MyTooltip>
{isOpenAIChatSetting && (

View File

@@ -0,0 +1,147 @@
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import {
Box,
Button,
Flex,
ModalBody,
useDisclosure,
Image,
HStack,
Switch,
ModalFooter
} from '@chakra-ui/react';
import React, { useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import type { AppFileSelectConfigType } from '@fastgpt/global/core/app/type.d';
import MyModal from '@fastgpt/web/components/common/MyModal';
import MySlider from '@/components/Slider';
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
import ChatFunctionTip from './Tip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import { useMount } from 'ahooks';
const FileSelect = ({
forbidVision = false,
value = defaultAppSelectFileConfig,
onChange
}: {
forbidVision?: boolean;
value?: AppFileSelectConfigType;
onChange: (e: AppFileSelectConfigType) => void;
}) => {
const { t } = useTranslation();
const { isOpen, onOpen, onClose } = useDisclosure();
const formLabel = useMemo(
() =>
value.canSelectFile || value.canSelectImg
? t('common:core.app.whisper.Open')
: t('common:core.app.whisper.Close'),
[t, value.canSelectFile, value.canSelectImg]
);
// Close select img switch when vision is forbidden
useMount(() => {
if (forbidVision) {
onChange({
...value,
canSelectImg: false
});
}
});
return (
<Flex alignItems={'center'}>
<MyIcon name={'core/app/simpleMode/file'} mr={2} w={'20px'} />
<FormLabel>{t('app:file_upload')}</FormLabel>
<ChatFunctionTip type={'file'} />
<Box flex={1} />
<MyTooltip label={t('app:config_file_upload')}>
<Button
variant={'transparentBase'}
iconSpacing={1}
size={'sm'}
mr={'-5px'}
onClick={onOpen}
>
{formLabel}
</Button>
</MyTooltip>
<MyModal
iconSrc="core/app/simpleMode/file"
title={t('app:file_upload')}
isOpen={isOpen}
onClose={onClose}
>
<ModalBody>
<HStack>
<FormLabel flex={'1 0 0'}>{t('app:document_upload')}</FormLabel>
<Switch
isChecked={value.canSelectFile}
onChange={(e) => {
onChange({
...value,
canSelectFile: e.target.checked
});
}}
/>
</HStack>
<HStack mt={6}>
<FormLabel flex={'1 0 0'}>{t('app:image_upload')}</FormLabel>
{forbidVision ? (
<Box fontSize={'sm'} color={'myGray.500'}>
{t('app:llm_not_support_vision')}
</Box>
) : (
<Switch
isChecked={value.canSelectImg}
onChange={(e) => {
onChange({
...value,
canSelectImg: e.target.checked
});
}}
/>
)}
</HStack>
{!forbidVision && (
<Box mt={2} color={'myGray.500'} fontSize={'xs'}>
{t('app:image_upload_tip')}
</Box>
)}
<Box mt={6}>
<FormLabel>{t('app:upload_file_max_amount')}</FormLabel>
<Box mt={5}>
<MySlider
markList={[
{ label: '1', value: 1 },
{ label: '20', value: 20 }
]}
width={'100%'}
min={1}
max={20}
step={1}
value={value.maxFiles ?? 5}
onChange={(e) => {
onChange({
...value,
maxFiles: e
});
}}
/>
</Box>
</Box>
</ModalBody>
<ModalFooter>
<Button onClick={onClose} px={8}>
{t('common:common.Confirm')}
</Button>
</ModalFooter>
</MyModal>
</Flex>
);
};
export default FileSelect;

View File

@@ -9,7 +9,8 @@ enum FnTypeEnum {
nextQuestion = 'nextQuestion',
tts = 'tts',
variable = 'variable',
welcome = 'welcome'
welcome = 'welcome',
file = 'file'
}
const ChatFunctionTip = ({ type }: { type: `${FnTypeEnum}` }) => {
@@ -46,6 +47,12 @@ const ChatFunctionTip = ({ type }: { type: `${FnTypeEnum}` }) => {
title: t('common:core.app.Welcome Text'),
desc: t('common:core.app.tip.welcomeTextTip'),
imgUrl: '/imgs/app/welcome.svg'
},
[FnTypeEnum.file]: {
icon: '/imgs/app/welcome-icon.svg',
title: t('app:file_upload'),
desc: t('app:file_upload_tip'),
imgUrl: '/imgs/app/fileUploadPlaceholder.svg'
}
});
const data = map.current[type];

View File

@@ -1,16 +1,14 @@
import { useSpeech } from '@/web/common/hooks/useSpeech';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback } from 'react';
import { Box, Flex, HStack, Image, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback, useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { compressImgFileAndUpload } from '@/web/common/file/controller';
import { uploadFile2DB } from '@/web/common/file/controller';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import { addDays } from 'date-fns';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from '../type';
import { textareaMinH } from '../constants';
import { UseFormReturn, useFieldArray } from 'react-hook-form';
@@ -19,103 +17,167 @@ import dynamic from 'next/dynamic';
import { useContextSelector } from 'use-context-selector';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { documentFileType } from '@fastgpt/global/common/file/constants';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { clone } from 'lodash';
import { formatFileSize } from '@fastgpt/global/common/file/tools';
const InputGuideBox = dynamic(() => import('./InputGuideBox'));
const fileTypeFilter = (file: File) => {
return (
file.type.includes('image') ||
documentFileType.split(',').some((type) => file.name.endsWith(type.trim()))
);
};
const ChatInput = ({
onSendMessage,
onStop,
TextareaDom,
showFileSelector = false,
resetInputVal,
chatForm,
appId
}: {
onSendMessage: (val: ChatBoxInputType & { autoTTSResponse?: boolean }) => void;
onStop: () => void;
showFileSelector?: boolean;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
resetInputVal: (val: ChatBoxInputType) => void;
chatForm: UseFormReturn<ChatBoxInputFormType>;
appId: string;
}) => {
const { isPc } = useSystem();
const { toast } = useToast();
const { t } = useTranslation();
const { feConfigs } = useSystemStore();
const { setValue, watch, control } = chatForm;
const inputValue = watch('input');
const {
update: updateFile,
remove: removeFile,
update: updateFiles,
remove: removeFiles,
fields: fileList,
append: appendFile,
replace: replaceFile
replace: replaceFiles
} = useFieldArray({
control,
name: 'files'
});
const { isChatting, whisperConfig, autoTTSResponse, chatInputGuide, outLinkAuthData } =
useContextSelector(ChatBoxContext, (v) => v);
const { whisperModel } = useSystemStore();
const { isPc } = useSystem();
const canvasRef = useRef<HTMLCanvasElement>(null);
const { t } = useTranslation();
const {
chatId,
isChatting,
whisperConfig,
autoTTSResponse,
chatInputGuide,
outLinkAuthData,
fileSelectConfig
} = useContextSelector(ChatBoxContext, (v) => v);
const havInput = !!inputValue || fileList.length > 0;
const hasFileUploading = fileList.some((item) => !item.url);
const canSendMessage = havInput && !hasFileUploading;
const showSelectFile = fileSelectConfig.canSelectFile;
const showSelectImg = fileSelectConfig.canSelectImg;
const maxSelectFiles = fileSelectConfig.maxFiles ?? 10;
const maxSize = (feConfigs?.uploadFileMaxSize || 1024) * 1024 * 1024; // nkb
const { icon: selectFileIcon, tooltip: selectFileTip } = useMemo(() => {
if (showSelectFile) {
return {
icon: 'core/chat/fileSelect',
tooltip: t('chat:select_file')
};
} else if (showSelectImg) {
return {
icon: 'core/chat/fileSelect',
tooltip: t('chat:select_img')
};
}
return {};
}, [showSelectFile, showSelectImg, t]);
/* file selector and upload */
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: 'image/*',
fileType: `${showSelectImg ? 'image/*,' : ''} ${showSelectFile ? documentFileType : ''}`,
multiple: true,
maxCount: 10
maxCount: maxSelectFiles
});
const { mutate: uploadFile } = useRequest({
mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => {
if (file.type === ChatFileTypeEnum.image && file.rawFile) {
useRequest2(
async () => {
const filterFiles = fileList.filter((item) => item.status === 0);
if (filterFiles.length === 0) return;
replaceFiles(fileList.map((item) => ({ ...item, status: 1 })));
for (const file of filterFiles) {
if (!file.rawFile) continue;
try {
const url = await compressImgFileAndUpload({
type: MongoImageTypeEnum.chatImage,
const { fileId, previewUrl } = await uploadFile2DB({
file: file.rawFile,
maxW: 4320,
maxH: 4320,
maxSize: 1024 * 1024 * 16,
// 7 day expired.
expiredTime: addDays(new Date(), 7),
...outLinkAuthData
bucketName: 'chat',
metadata: {
chatId
}
});
updateFile(fileIndex, {
updateFiles(fileList.findIndex((item) => item.id === file.id)!, {
...file,
url
status: 1,
url: `${location.origin}${previewUrl}`
});
} catch (error) {
removeFile(fileIndex);
removeFiles(fileList.findIndex((item) => item.id === file.id)!);
console.log(error);
return Promise.reject(error);
}
}
},
errorToast: t('common:common.Upload File Failed')
});
{
manual: false,
errorToast: t('common:upload_file_error'),
refreshDeps: [fileList]
}
);
const onSelectFile = useCallback(
async (files: File[]) => {
if (!files || files.length === 0) {
return;
}
// filter max files
if (fileList.length + files.length > maxSelectFiles) {
files = files.slice(0, maxSelectFiles - fileList.length);
toast({
status: 'warning',
title: t('chat:file_amount_over', { max: maxSelectFiles })
});
}
const filterFilesByMaxSize = files.filter((file) => file.size <= maxSize);
if (filterFilesByMaxSize.length < files.length) {
toast({
status: 'warning',
title: t('file:some_file_size_exceeds_limit', { maxSize: formatFileSize(maxSize) })
});
}
const loadFiles = await Promise.all(
files.map(
filterFilesByMaxSize.map(
(file) =>
new Promise<UserInputFileItemType>((resolve, reject) => {
if (file.type.includes('image')) {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const item = {
const item: UserInputFileItemType = {
id: getNanoid(6),
rawFile: file,
type: ChatFileTypeEnum.image,
name: file.name,
icon: reader.result as string
icon: reader.result as string,
status: 0
};
resolve(item);
};
@@ -128,22 +190,28 @@ const ChatInput = ({
rawFile: file,
type: ChatFileTypeEnum.file,
name: file.name,
icon: 'file/pdf'
icon: getFileIcon(file.name),
status: 0
});
}
})
)
);
appendFile(loadFiles);
loadFiles.forEach((file, i) =>
uploadFile({
file,
fileIndex: i + fileList.length
// Document, image
const concatFileList = clone(
fileList.concat(loadFiles).sort((a, b) => {
if (a.type === ChatFileTypeEnum.image && b.type === ChatFileTypeEnum.file) {
return 1;
} else if (a.type === ChatFileTypeEnum.file && b.type === ChatFileTypeEnum.image) {
return -1;
}
return 0;
})
);
replaceFiles(concatFileList);
},
[appendFile, fileList.length, uploadFile]
[fileList, maxSelectFiles, replaceFiles, toast, t]
);
/* on send */
@@ -155,10 +223,12 @@ const ChatInput = ({
text: textareaValue.trim(),
files: fileList
});
replaceFile([]);
replaceFiles([]);
};
/* whisper init */
const { whisperModel } = useSystemStore();
const canvasRef = useRef<HTMLCanvasElement>(null);
const {
isSpeaking,
isTransCription,
@@ -194,12 +264,12 @@ const ChatInput = ({
files: fileList,
autoTTSResponse
});
replaceFile([]);
replaceFiles([]);
} else {
resetInputVal({ text });
}
},
[autoTTSResponse, fileList, onSendMessage, replaceFile, resetInputVal, whisperConfig?.autoSend]
[autoTTSResponse, fileList, onSendMessage, replaceFiles, resetInputVal, whisperConfig?.autoSend]
);
const onWhisperRecord = useCallback(() => {
if (isSpeaking) {
@@ -261,13 +331,20 @@ const ChatInput = ({
</Flex>
{/* file preview */}
<Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
<Flex
wrap={'wrap'}
px={[2, 4]}
userSelect={'none'}
gap={2}
mb={fileList.length > 0 ? 2 : 0}
>
{fileList.map((item, index) => (
<Box
key={item.id}
border={'1px solid rgba(0,0,0,0.12)'}
mr={2}
mb={2}
border={'1px solid #E8EBF0'}
boxShadow={
'0px 2.571px 6.429px 0px rgba(19, 51, 107, 0.08), 0px 0px 0.643px 0px rgba(19, 51, 107, 0.08)'
}
rounded={'md'}
position={'relative'}
_hover={{
@@ -297,13 +374,13 @@ const ChatInput = ({
h={'16px'}
color={'myGray.700'}
cursor={'pointer'}
_hover={{ color: 'primary.500' }}
_hover={{ color: 'red.500' }}
position={'absolute'}
bg={'white'}
right={'-8px'}
top={'-8px'}
onClick={() => {
removeFile(index);
removeFiles(index);
}}
className="close-icon"
display={['', 'none']}
@@ -312,19 +389,27 @@ const ChatInput = ({
<Image
alt={'img'}
src={item.icon}
w={['50px', '70px']}
h={['50px', '70px']}
w={['2rem', '3rem']}
h={['2rem', '3rem']}
borderRadius={'md'}
objectFit={'contain'}
/>
)}
{item.type === ChatFileTypeEnum.file && (
<HStack minW={['100px', '150px']} maxW={'250px'} p={2}>
<MyIcon name={item.icon as any} w={['1.5rem', '2rem']} h={['1.5rem', '2rem']} />
<Box flex={'1 0 0'} className="textEllipsis" fontSize={'xs'}>
{item.name}
</Box>
</HStack>
)}
</Box>
))}
</Flex>
<Flex alignItems={'flex-end'} mt={fileList.length > 0 ? 1 : 0} pl={[2, 4]}>
{/* file selector */}
{showFileSelector && (
{(showSelectFile || showSelectImg) && (
<Flex
h={'22px'}
alignItems={'center'}
@@ -336,8 +421,8 @@ const ChatInput = ({
onOpenSelectFile();
}}
>
<MyTooltip label={t('common:core.chat.Select Image')}>
<MyIcon name={'core/chat/fileSelect'} w={'18px'} color={'myGray.600'} />
<MyTooltip label={selectFileTip}>
<MyIcon name={selectFileIcon as any} w={'18px'} color={'myGray.600'} />
</MyTooltip>
<File onSelect={onSelectFile} />
</Flex>
@@ -404,12 +489,19 @@ const ChatInput = ({
}}
onPaste={(e) => {
const clipboardData = e.clipboardData;
if (clipboardData && showFileSelector) {
if (clipboardData && (showSelectFile || showSelectImg)) {
const items = clipboardData.items;
const files = Array.from(items)
.map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
.filter(Boolean) as File[];
.filter((file) => {
console.log(file);
return file && fileTypeFilter(file);
}) as File[];
onSelectFile(files);
if (files.length > 0) {
e.stopPropagation();
}
}
}}
/>

View File

@@ -3,6 +3,7 @@ import { useAudioPlay } from '@/web/common/utils/voice';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import {
AppChatConfigType,
AppFileSelectConfigType,
AppTTSConfigType,
AppWhisperConfigType,
ChatInputGuideConfigType,
@@ -10,6 +11,7 @@ import {
} from '@fastgpt/global/core/app/type';
import { ChatHistoryItemResType, ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import {
defaultAppSelectFileConfig,
defaultChatInputGuideConfig,
defaultTTSConfig,
defaultWhisperConfig
@@ -64,6 +66,7 @@ type useChatStoreType = OutLinkChatAuthProps &
chatInputGuide: ChatInputGuideConfigType;
outLinkAuthData: OutLinkChatAuthProps;
getHistoryResponseData: ({ dataId }: { dataId: string }) => Promise<ChatHistoryItemResType[]>;
fileSelectConfig: AppFileSelectConfigType;
};
export const ChatBoxContext = createContext<useChatStoreType>({
@@ -146,7 +149,8 @@ const Provider = ({
questionGuide = false,
ttsConfig = defaultTTSConfig,
whisperConfig = defaultWhisperConfig,
chatInputGuide = defaultChatInputGuideConfig
chatInputGuide = defaultChatInputGuideConfig,
fileSelectConfig = defaultAppSelectFileConfig
} = useMemo(() => chatConfig, [chatConfig]);
const outLinkAuthData = useMemo(
@@ -215,6 +219,7 @@ const Provider = ({
allVariableList: variables,
questionGuide,
ttsConfig,
fileSelectConfig,
whisperConfig,
autoTTSResponse,
startSegmentedAudio,

View File

@@ -73,12 +73,11 @@ const ChatItem = ({
const ContentCard = useMemo(() => {
if (type === 'Human') {
const { text, files = [] } = formatChatValue2InputType(chat.value);
return (
<>
<Flex flexDirection={'column'} gap={4}>
{files.length > 0 && <FilesBlock files={files} />}
<Markdown source={text} />
</>
{text && <Markdown source={text} />}
</Flex>
);
}

View File

@@ -1,22 +1,89 @@
import { Box, Flex, Grid } from '@chakra-ui/react';
import { Box, Flex, Grid, Text } from '@chakra-ui/react';
import MdImage from '@/components/Markdown/img/Image';
import { UserInputFileItemType } from '@/components/core/chat/ChatContainer/ChatBox/type';
import MyIcon from '@fastgpt/web/components/common/Icon';
import React, { useCallback, useLayoutEffect, useMemo, useRef, useState } from 'react';
import { clone } from 'lodash';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useWidthVariable } from '@fastgpt/web/hooks/useWidthVariable';
const FilesBlock = ({ files }: { files: UserInputFileItemType[] }) => {
const chartRef = useRef<HTMLDivElement>(null);
const [width, setWidth] = useState(400);
const { isPc } = useSystem();
const gridColumns = useWidthVariable({
width,
widthList: [300, 500, 700],
list: ['1fr', 'repeat(2, 1fr)', 'repeat(3, 1fr)']
});
// sort files, file->image
const sortFiles = useMemo(() => {
return clone(files).sort((a, b) => {
if (a.type === ChatFileTypeEnum.image && b.type === ChatFileTypeEnum.file) {
return 1;
} else if (a.type === ChatFileTypeEnum.file && b.type === ChatFileTypeEnum.image) {
return -1;
}
return 0;
});
}, [files]);
const computedChatItemWidth = useCallback(() => {
if (!chartRef.current) return;
// 一直找到 parent = markdown 的元素
let parent = chartRef.current?.parentElement;
while (parent && !parent.className.includes('chat-box-card')) {
parent = parent.parentElement;
}
const clientWidth = parent?.clientWidth ?? 400;
setWidth(clientWidth);
return parent;
}, [isPc]);
useLayoutEffect(() => {
computedChatItemWidth();
}, [computedChatItemWidth]);
return (
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
{files.map(({ id, type, name, url }, i) => {
if (type === 'image') {
return (
<Box key={i} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
<MdImage src={url} />
</Box>
);
}
return null;
})}
<Grid ref={chartRef} gridTemplateColumns={gridColumns} gap={4} alignItems={'flex-start'}>
{sortFiles.map(({ id, type, name, url, icon }, i) => (
<Box key={i} bg={'white'} borderRadius={'md'} overflow="hidden">
{type === 'image' && <MdImage src={url} minH={'100px'} my={0} />}
{type === 'file' && (
<Flex
p={2}
w={'100%'}
alignItems="center"
cursor={'pointer'}
onClick={() => {
window.open(url);
}}
>
<MyIcon
name={icon as any}
flexShrink={0}
w={['1.5rem', '2rem']}
h={['1.5rem', '2rem']}
/>
<Text
ml={2}
fontSize={'xs'}
overflow="hidden"
textOverflow="ellipsis"
whiteSpace="nowrap"
>
{name || url}
</Text>
</Flex>
)}
</Box>
))}
</Grid>
);
};
export default FilesBlock;
export default React.memo(FilesBlock);

View File

@@ -75,7 +75,6 @@ type Props = OutLinkChatAuthProps &
showVoiceIcon?: boolean;
showEmptyIntro?: boolean;
userAvatar?: string;
showFileSelector?: boolean;
active?: boolean; // can use
appId: string;
@@ -105,7 +104,6 @@ const ChatBox = (
showEmptyIntro = false,
appAvatar,
userAvatar,
showFileSelector,
active = true,
appId,
chatId,
@@ -378,7 +376,9 @@ const ChatBox = (
return;
}
// Abort the previous request
abortRequest();
questionGuideController.current?.abort('stop');
text = text.trim();
@@ -390,14 +390,13 @@ const ChatBox = (
return;
}
// delete invalid variables 只保留在 variableList 中的变量
// Only declared variables are kept
const requestVariables: Record<string, any> = {};
allVariableList?.forEach((item) => {
requestVariables[item.key] = variables[item.key] || '';
});
const responseChatId = getNanoid(24);
questionGuideController.current?.abort('stop');
// set auto audio playing
if (autoTTSResponse) {
@@ -980,7 +979,6 @@ const ChatBox = (
onStop={() => chatController.current?.abort('stop')}
TextareaDom={TextareaDom}
resetInputVal={resetInputVal}
showFileSelector={showFileSelector}
chatForm={chatForm}
appId={appId}
/>

View File

@@ -13,6 +13,7 @@ export type UserInputFileItemType = {
type: `${ChatFileTypeEnum}`;
name: string;
icon: string; // img is base64
status: 0 | 1; // 0: uploading, 1: success
url?: string;
};

View File

@@ -1,6 +1,7 @@
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { ChatBoxInputType, UserInputFileItemType } from './type';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => {
if (!value) {
@@ -15,15 +16,16 @@ export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): Chat
.filter((item) => item.text?.content)
.map((item) => item.text?.content || '')
.join('');
const files =
(value
.map((item) =>
?.map((item) =>
item.type === 'file' && item.file
? {
id: getNanoid(),
id: item.file.url,
type: item.file.type,
name: item.file.name,
icon: '',
icon: getFileIcon(item.file.name),
url: item.file.url
}
: undefined

View File

@@ -105,19 +105,19 @@ ${JSON.stringify(questionGuides)}`;
overflowY={'auto'}
>
{toolParams && toolParams !== '{}' && (
<Markdown
source={`~~~json#Input
${toolParams}`}
/>
)}
{toolResponse && (
<Box mt={3}>
<Box mb={3}>
<Markdown
source={`~~~json#Response
${toolResponse}`}
source={`~~~json#Input
${toolParams}`}
/>
</Box>
)}
{toolResponse && (
<Markdown
source={`~~~json#Response
${toolResponse}`}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>

View File

@@ -1,5 +1,5 @@
import React, { useMemo, useState } from 'react';
import { Box, Flex, BoxProps, useDisclosure } from '@chakra-ui/react';
import { Box, Flex, BoxProps, useDisclosure, HStack } from '@chakra-ui/react';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { useTranslation } from 'next-i18next';
import { moduleTemplatesFlat } from '@fastgpt/global/core/workflow/template/constants';
@@ -16,6 +16,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
type sideTabItemType = {
moduleLogo?: string;
@@ -34,7 +35,7 @@ function RowRender({
}: { children: React.ReactNode; label: string } & BoxProps) {
return (
<Box mb={3}>
<Box fontSize={'sm'} mb={mb} flex={'0 0 90px'}>
<Box fontSize={'sm'} mb={mb} color={'myGray.800'} flex={'0 0 90px'}>
{label}:
</Box>
<Box borderRadius={'sm'} fontSize={['xs', 'sm']} bg={'myGray.50'} {...props}>
@@ -435,9 +436,50 @@ export const WholeResponseContent = ({
value={activeModule?.textOutput}
/>
{/* code */}
<Row label={workflowT('response.Custom outputs')} value={activeModule?.customOutputs} />
<Row label={workflowT('response.Custom inputs')} value={activeModule?.customInputs} />
<Row label={workflowT('response.Code log')} value={activeModule?.codeLog} />
<>
<Row
label={t('workflow:response.Custom outputs')}
value={activeModule?.customOutputs}
/>
<Row label={t('workflow:response.Custom inputs')} value={activeModule?.customInputs} />
<Row label={t('workflow:response.Code log')} value={activeModule?.codeLog} />
</>
{/* read files */}
<>
{activeModule?.readFiles && activeModule?.readFiles.length > 0 && (
<Row
label={t('workflow:response.read files')}
rawDom={
<Flex flexWrap={'wrap'} gap={3} px={4} py={2}>
{activeModule?.readFiles.map((file, i) => (
<HStack
key={i}
bg={'white'}
boxShadow={'base'}
borderRadius={'sm'}
py={1}
px={2}
{...(file.url
? {
cursor: 'pointer',
onClick: () => window.open(file.url)
}
: {})}
>
<MyIcon name={getFileIcon(file.name) as any} w={'1rem'} />
<Box>{file.name}</Box>
</HStack>
))}
</Flex>
}
/>
)}
<Row
label={t('workflow:response.Read file result')}
value={activeModule?.readFilesResult}
/>
</>
</Box>
)}
</>