import { useSpeech } from '@/web/common/hooks/useSpeech'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react'; import React, { useRef, useEffect, useCallback } from 'react'; import { useTranslation } from 'next-i18next'; import MyTooltip from '../../MyTooltip'; import MyIcon from '@fastgpt/web/components/common/Icon'; import { useSelectFile } from '@/web/common/file/hooks/useSelectFile'; import { compressImgFileAndUpload } from '@/web/common/file/controller'; import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants'; import { addDays } from 'date-fns'; import { useRequest } from '@fastgpt/web/hooks/useRequest'; import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'; import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from '../type'; import { textareaMinH } from '../constants'; import { UseFormReturn, useFieldArray } from 'react-hook-form'; import { ChatBoxContext } from '../Provider'; import dynamic from 'next/dynamic'; import { useContextSelector } from 'use-context-selector'; import { getNanoid } from '@fastgpt/global/common/string/tools'; const InputGuideBox = dynamic(() => import('./InputGuideBox')); const ChatInput = ({ onSendMessage, onStop, TextareaDom, showFileSelector = false, resetInputVal, chatForm, appId }: { onSendMessage: (val: ChatBoxInputType & { autoTTSResponse?: boolean }) => void; onStop: () => void; showFileSelector?: boolean; TextareaDom: React.MutableRefObject; resetInputVal: (val: ChatBoxInputType) => void; chatForm: UseFormReturn; appId: string; }) => { const { setValue, watch, control } = chatForm; const inputValue = watch('input'); const { update: updateFile, remove: removeFile, fields: fileList, append: appendFile, replace: replaceFile } = useFieldArray({ control, name: 'files' }); const { isChatting, whisperConfig, autoTTSResponse, chatInputGuide, outLinkAuthData } = useContextSelector(ChatBoxContext, (v) => v); const { isPc, whisperModel } = useSystemStore(); const canvasRef = useRef(null); const { t } = useTranslation(); const havInput = !!inputValue || fileList.length > 0; const hasFileUploading = fileList.some((item) => !item.url); const canSendMessage = havInput && !hasFileUploading; /* file selector and upload */ const { File, onOpen: onOpenSelectFile } = useSelectFile({ fileType: 'image/*', multiple: true, maxCount: 10 }); const { mutate: uploadFile } = useRequest({ mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => { if (file.type === ChatFileTypeEnum.image && file.rawFile) { try { const url = await compressImgFileAndUpload({ type: MongoImageTypeEnum.chatImage, file: file.rawFile, maxW: 4320, maxH: 4320, maxSize: 1024 * 1024 * 16, // 7 day expired. expiredTime: addDays(new Date(), 7), ...outLinkAuthData }); updateFile(fileIndex, { ...file, url: `${location.origin}${url}` }); } catch (error) { removeFile(fileIndex); console.log(error); return Promise.reject(error); } } }, errorToast: t('common.Upload File Failed') }); const onSelectFile = useCallback( async (files: File[]) => { if (!files || files.length === 0) { return; } const loadFiles = await Promise.all( files.map( (file) => new Promise((resolve, reject) => { if (file.type.includes('image')) { const reader = new FileReader(); reader.readAsDataURL(file); reader.onload = () => { const item = { id: getNanoid(6), rawFile: file, type: ChatFileTypeEnum.image, name: file.name, icon: reader.result as string }; resolve(item); }; reader.onerror = () => { reject(reader.error); }; } else { resolve({ id: getNanoid(6), rawFile: file, type: ChatFileTypeEnum.file, name: file.name, icon: 'file/pdf' }); } }) ) ); appendFile(loadFiles); loadFiles.forEach((file, i) => uploadFile({ file, fileIndex: i + fileList.length }) ); }, [appendFile, fileList.length, uploadFile] ); /* on send */ const handleSend = async (val?: string) => { if (!canSendMessage) return; const textareaValue = val || TextareaDom.current?.value || ''; onSendMessage({ text: textareaValue.trim(), files: fileList }); replaceFile([]); }; /* whisper init */ const { isSpeaking, isTransCription, stopSpeak, startSpeak, speakingTimeString, renderAudioGraph, stream } = useSpeech({ appId, ...outLinkAuthData }); useEffect(() => { if (!stream) { return; } const audioContext = new AudioContext(); const analyser = audioContext.createAnalyser(); analyser.fftSize = 4096; analyser.smoothingTimeConstant = 1; const source = audioContext.createMediaStreamSource(stream); source.connect(analyser); const renderCurve = () => { if (!canvasRef.current) return; renderAudioGraph(analyser, canvasRef.current); window.requestAnimationFrame(renderCurve); }; renderCurve(); }, [renderAudioGraph, stream]); const finishWhisperTranscription = useCallback( (text: string) => { if (!text) return; if (whisperConfig?.autoSend) { onSendMessage({ text, files: fileList, autoTTSResponse }); replaceFile([]); } else { resetInputVal({ text }); } }, [autoTTSResponse, fileList, onSendMessage, replaceFile, resetInputVal, whisperConfig?.autoSend] ); const onWhisperRecord = useCallback(() => { if (isSpeaking) { return stopSpeak(); } startSpeak(finishWhisperTranscription); }, [finishWhisperTranscription, isSpeaking, startSpeak, stopSpeak]); return ( 0 ? '10px' : ['14px', '18px']} pb={['14px', '18px']} position={'relative'} boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`} borderRadius={['none', 'md']} bg={'white'} overflow={'display'} {...(isPc ? { border: '1px solid', borderColor: 'rgba(0,0,0,0.12)' } : { borderTop: '1px solid', borderTopColor: 'rgba(0,0,0,0.15)' })} > {/* Chat input guide box */} {chatInputGuide.open && ( { setValue('input', e); }} onSend={(e) => { handleSend(e); }} /> )} {/* translate loading */} {t('core.chat.Converting to text')} {/* file preview */} {fileList.map((item, index) => ( {/* uploading */} {!item.url && ( )} { removeFile(index); }} className="close-icon" display={['', 'none']} /> {item.type === ChatFileTypeEnum.image && ( {'img'} )} ))} 0 ? 1 : 0} pl={[2, 4]}> {/* file selector */} {showFileSelector && ( { if (isSpeaking) return; onOpenSelectFile(); }} > )} {/* input area */}