import { useSpeech } from '@/web/common/hooks/useSpeech'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react'; import React, { useRef, useEffect, useCallback, useMemo } from 'react'; import { useTranslation } from 'next-i18next'; import MyTooltip from '../MyTooltip'; import MyIcon from '@fastgpt/web/components/common/Icon'; import { useSelectFile } from '@/web/common/file/hooks/useSelectFile'; import { compressImgFileAndUpload } from '@/web/common/file/controller'; import { customAlphabet } from 'nanoid'; import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants'; import { addDays } from 'date-fns'; import { useRequest } from '@/web/common/hooks/useRequest'; import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from './type'; import { textareaMinH } from './constants'; import { UseFormReturn, useFieldArray } from 'react-hook-form'; const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6); const MessageInput = ({ onSendMessage, onStop, isChatting, TextareaDom, showFileSelector = false, resetInputVal, shareId, outLinkUid, teamId, teamToken, chatForm }: OutLinkChatAuthProps & { onSendMessage: (val: ChatBoxInputType) => void; onStop: () => void; isChatting: boolean; showFileSelector?: boolean; TextareaDom: React.MutableRefObject; resetInputVal: (val: ChatBoxInputType) => void; chatForm: UseFormReturn; }) => { const { setValue, watch, control } = chatForm; const inputValue = watch('input'); const { update: updateFile, remove: removeFile, fields: fileList, append: appendFile, replace: replaceFile } = useFieldArray({ control, name: 'files' }); const { isSpeaking, isTransCription, stopSpeak, startSpeak, speakingTimeString, renderAudioGraph, stream } = useSpeech({ shareId, outLinkUid, teamId, teamToken }); const { isPc } = useSystemStore(); const canvasRef = useRef(null); const { t } = useTranslation(); const havInput = !!inputValue || fileList.length > 0; /* file selector and upload */ const { File, onOpen: onOpenSelectFile } = useSelectFile({ fileType: 'image/*', multiple: true, maxCount: 10 }); const { mutate: uploadFile } = useRequest({ mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => { if (file.type === ChatFileTypeEnum.image && file.rawFile) { try { const url = await compressImgFileAndUpload({ type: MongoImageTypeEnum.chatImage, file: file.rawFile, maxW: 4329, maxH: 4329, maxSize: 1024 * 1024 * 5, // 7 day expired. expiredTime: addDays(new Date(), 7), shareId, outLinkUid, teamId, teamToken }); updateFile(fileIndex, { ...file, url: `${location.origin}${url}` }); } catch (error) { removeFile(fileIndex); console.log(error); return Promise.reject(error); } } }, errorToast: t('common.Upload File Failed') }); const onSelectFile = useCallback( async (files: File[]) => { if (!files || files.length === 0) { return; } const loadFiles = await Promise.all( files.map( (file) => new Promise((resolve, reject) => { if (file.type.includes('image')) { const reader = new FileReader(); reader.readAsDataURL(file); reader.onload = () => { const item = { id: nanoid(), rawFile: file, type: ChatFileTypeEnum.image, name: file.name, icon: reader.result as string }; resolve(item); }; reader.onerror = () => { reject(reader.error); }; } else { resolve({ id: nanoid(), rawFile: file, type: ChatFileTypeEnum.file, name: file.name, icon: 'file/pdf' }); } }) ) ); appendFile(loadFiles); loadFiles.forEach((file, i) => uploadFile({ file, fileIndex: i + fileList.length }) ); }, [appendFile, fileList.length, uploadFile] ); /* on send */ const handleSend = useCallback(async () => { const textareaValue = TextareaDom.current?.value || ''; onSendMessage({ text: textareaValue.trim(), files: fileList }); replaceFile([]); }, [TextareaDom, fileList, onSendMessage, replaceFile]); useEffect(() => { if (!stream) { return; } const audioContext = new AudioContext(); const analyser = audioContext.createAnalyser(); analyser.fftSize = 4096; analyser.smoothingTimeConstant = 1; const source = audioContext.createMediaStreamSource(stream); source.connect(analyser); const renderCurve = () => { if (!canvasRef.current) return; renderAudioGraph(analyser, canvasRef.current); window.requestAnimationFrame(renderCurve); }; renderCurve(); }, [renderAudioGraph, stream]); return ( 0 ? '10px' : ['14px', '18px']} pb={['14px', '18px']} position={'relative'} boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`} borderRadius={['none', 'md']} bg={'white'} overflow={'hidden'} {...(isPc ? { border: '1px solid', borderColor: 'rgba(0,0,0,0.12)' } : { borderTop: '1px solid', borderTopColor: 'rgba(0,0,0,0.15)' })} > {/* translate loading */} {t('core.chat.Converting to text')} {/* file preview */} {fileList.map((item, index) => ( {/* uploading */} {!item.url && ( )} { removeFile(index); }} className="close-icon" display={['', 'none']} /> {item.type === ChatFileTypeEnum.image && ( {'img'} )} ))} 0 ? 1 : 0} pl={[2, 4]}> {/* file selector */} {showFileSelector && ( { if (isSpeaking) return; onOpenSelectFile(); }} > )} {/* input area */}