Untitled

 avatar
unknown
plain_text
2 months ago
12 kB
2
Indexable

export default function Chat({
  messages,
  onQuestionSubmit,
  isLoading,
  isError,
  queryResponseId,
  showSource,
}: any) {
  const { projectId, ingestId } = useParams();

  const { control, handleSubmit, setValue } = useForm({
    defaultValues: {
      prompt: "",
    },
    resolver: zodResolver(promptSchema),
  });
  const { mutateAsync: conversationCreate } = useCreateConversation();
  const [conversationId, setConversationId] = useState('');
  const [promptData, setPromptData] = useState<string>("");
  const [responseTimeTaken, setResponseTimeTaken] = useState<any>([]);
  const [elapsedTime, setElapsedTime] = useState<number>(0);
  const [showOverlay, setShowOverlay] = useState(false);
  const [audioQueryResponseValue, setAudioQueryResponseValue] = useState<boolean>(false);
  const [isRecording, setIsRecording] = useState(false);
  const [reachedTimeLimit, setReachedTimeLimit] = useState(false); // Added state for time limit
  const mediaStream = useRef<MediaStream | null>(null);
  const mediaRecorder = useRef<MediaRecorder | null>(null);
  const chunks = useRef<Blob[]>([]);
  const inputRef = useRef(null);
  const messagesRef = useRef<HTMLDivElement | null>(null);
  const recordingTimeout = useRef<NodeJS.Timeout | null>(null);
  const { data } = useGetProjectSettings((projectId as string) || "");
  const knowledgeBaseIdentifier = storage.get(storageKey.PROJECT_IDENTIFIER);
  const authTenantId = useAuthTenantId();
  const [history, setHistory] = useState<any>([]);
  const { mutateAsync: audioConversation } = useAudioChat();
  const { data: apiToken } = useGetApiToken(authTenantId);
  const { clientId, clientSecret } = (apiToken as any)?.tokens?.[0] || {};
  const authUserName = useAuthUserName();

  let apiKey: string = '';

  if (apiToken) {
    apiKey = btoa(clientId?.concat(':', clientSecret));
  }
  const createConversation = async (apiKey: string) => {
    try {
      const response = await conversationCreate({
        projectId,
        api_key: apiKey,
        projectIngestIdentifier: ingestId,
        username: btoa(authUserName),
        userType: 'botChat'
      });
      console.log("response", response)
      setConversationId(response.conversation_id as string);
    } catch (err: any) {
      console.log(err);
    }
  };
  console.log('conversationId', conversationId)
  useEffect(() => {
    if (apiKey) {
      createConversation(apiKey);
    }
  }, [conversationCreate, projectId, apiKey, ingestId]);
  const scrollToBottom = () => {
    if (messagesRef.current) {
      messagesRef.current.scrollTop = messagesRef.current.scrollHeight;
    }
  };

  useEffect(() => {
    scrollToBottom();
  }, [messages]);

  useEffect(() => {
    if (!isLoading) {
      const audioQueryResponse = data?.data?.find(
        (setting: any) => setting.key === "audioQueryResponse"
      )?.value;

      setAudioQueryResponseValue(audioQueryResponse === "enabled");
    }
  }, [data, isLoading]);

  useEffect(() => {
    if (isLoading) {
      setValue("prompt", "");
    }
  }, [isLoading, setValue]);

  useEffect(() => {
    inputRef.current && (inputRef.current as HTMLInputElement).focus();
  }, [messages, showOverlay]);

  const onSubmit = (data: TPromptSchema) => {
    const trimmedPrompt = data.prompt.trim();
    if (!trimmedPrompt) return;

    setPromptData(trimmedPrompt);
    onQuestionSubmit(trimmedPrompt);
  };

  const toggleOverlay = () => setShowOverlay(!showOverlay);

  useEffect(() => {
    if (promptData && isLoading) {
      const intervalId = setInterval(() => {
        setElapsedTime((prevTime) => prevTime + 1);
      }, 1000);
      return () => clearInterval(intervalId);
    } else if (promptData && !isLoading) {
      setResponseTimeTaken((prev: any) => [
        ...prev,
        { promptQuestion: promptData, resultTime: elapsedTime },
      ]);
      setElapsedTime(0);
    }
  }, [isLoading, promptData]);

  const updateHistory = (prompt: string, response: string) => {
    const chatHistory = [...history];
    chatHistory.push([prompt, response]);
    setHistory(chatHistory);
  };


  const handleStartRecording = async () => {
    if (!navigator.mediaDevices?.getUserMedia || typeof MediaRecorder === "undefined") {
      console.error("Browser does not support audio recording.");
      return;
    }

    setReachedTimeLimit(false); // Reset the time limit flag

    try {
      const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
      mediaStream.current = stream;
      mediaRecorder.current = new MediaRecorder(stream);

      chunks.current = []; // Reset chunks
      mediaRecorder.current.ondataavailable = (e) => {
        if (e.data.size > 0) {
          chunks.current.push(e.data);
          console.log("Chunk added, size:", e.data.size);
        }
      };

      mediaRecorder.current.start(100); // Start recording and fire ondataavailable every 100ms
      setIsRecording(true);
      setShowOverlay(true);
      console.log("Recording started");

      // Set a timeout to stop recording after 10 seconds
      recordingTimeout.current = setTimeout(() => {
        setReachedTimeLimit(true); // Update reachedTimeLimit state
        handleStopRecording();
      }, 10000);
    } catch (error) {
      console.error("Error starting recording:", error);
      alert("Failed to access microphone. Please check permissions.");
    }
  };
  const stopAllTracks = () => {
    if (mediaStream.current) {
      mediaStream.current.getTracks().forEach((track) => track.stop());
      mediaStream.current = null;
    }
    if (mediaRecorder.current) {
      mediaRecorder.current = null;
    }
    if (recordingTimeout.current) {
      clearTimeout(recordingTimeout.current);
      recordingTimeout.current = null;
    }
  };

  const handleStopRecording = async () => {
    if (mediaRecorder.current?.state === "recording") {
      mediaRecorder.current.stop();
      await new Promise(resolve => mediaRecorder.current!.addEventListener("stop", resolve));
    }

    stopAllTracks();
    setIsRecording(false);
    setShowOverlay(false);

    console.log("reachedTimeLimit", reachedTimeLimit);
    if (reachedTimeLimit) {
      console.log("Recording reached time limit. API call cancelled.");
      return;
    }

    if (chunks.current.length === 0) {
      console.error("No audio chunks were recorded.");
      return;
    }

    try {
      const audioBlob = new Blob(chunks.current, { type: "audio/wav" });
      console.log("Audio Blob size:", audioBlob.size);

      if (audioBlob.size === 0) {
        console.error("Audio Blob is empty.");
        return;
      }

      const fileName = `audio_${knowledgeBaseIdentifier}_${conversationId}.wav`;
      const file = new File([audioBlob], fileName, { type: "audio/wav" });
      console.log("File created:", file);

      const response = await audioConversation({
        projectId,
        file,
        api_key: apiKey,
        conversation_id: conversationId,
      });

      console.log("Audio API Response:", response);
    } catch (error) {
      console.error("Error uploading audio:", error);
      alert("Error uploading audio. Please try again.");
    }
  };


  const handleCancel = () => {
    stopAllTracks();
    chunks.current = [];
    setIsRecording(false);
    setShowOverlay(false);
    setReachedTimeLimit(false); // Reset reachedTimeLimit state
  };

  const lastMsgIndex = messages.length - 1;

  return (
    <ChatStyled>
      <Box className="chat-styled-wrapper">
        <Stack className="chat-scrollbar" spacing={2} ref={messagesRef}>
          <Box className="bot">
            <Message
              message={{
                text: "Hi there! How can I assist you today?",
                sender: "bot",
              }}
              hideFeedback={true}
            />
          </Box>
          {messages?.map((message: any, index: number) => (
            <Box key={index} className={message.sender === "bot" ? "bot" : "user"}>
              <Message
                index={index}
                lastMsgIndex={lastMsgIndex}
                message={message}
                showSource={showSource}
                queryResponseId={queryResponseId}
                timer={elapsedTime}
                responseDataTime={responseTimeTaken}
              />
            </Box>
          ))}
        </Stack>

        {isLoading && (
          <Box sx={{ pb: 4, textAlign: "left", display: "flex", alignItems: "center", paddingLeft: "1rem" }}>
            <Box sx={{ height: "20px", width: "20px", marginRight: "0.5rem" }}>
              <LoadingIcon />
            </Box>
            <Typography color="#ccc">Generating answers, Please wait.</Typography>
          </Box>
        )}

        {isError && !isLoading && (
          <Box sx={{ pb: 2 }}>
            <Typography color="error">Something went wrong.</Typography>
          </Box>
        )}

        <Box>
          <form
            onSubmit={(e) => {
              e.preventDefault();
              if (isRecording) {
                handleStopRecording();
              } else {
                setReachedTimeLimit(false);
                handleSubmit(onSubmit)();
              }
            }}
          >
            <Box className="chatbox">
              {!isRecording ? (
                <FormInputText
                  size="large"
                  fullWidth
                  placeholder="Type a message"
                  variant="outlined"
                  sx={{ input: { width: "calc(100% - 64px)" } }}
                  name="prompt"
                  control={control}
                  type="text"
                  displayError={false}
                  disabled={isLoading}
                  autofocus={true}
                  multiline={true}
                  maxRows={4}
                  minRows={1}
                  handleMultiLineEnter={handleSubmit(onSubmit)}
                  inputRef={inputRef}
                />
              ) : (
                <Box>
                  <Box borderRadius={16} bgcolor="" borderColor="red" border={1} py={2} px={1}>
                    <Typography variant="h6">
                      {reachedTimeLimit
                        ? "Recording stopped after 10 seconds. Please try again."
                        : "Recording... (Max 10 seconds)"}
                    </Typography>
                    <IconButton className="close-button" onClick={handleCancel}>
                      <Close />
                    </IconButton>
                  </Box>
                </Box>
              )}

              <Box
                sx={{
                  position: "absolute",
                  top: 7,
                  right: 60,
                  display: "flex",
                  alignItems: "center",
                }}
              >
                {audioQueryResponseValue && !isRecording && (
                  <Tooltip title="Use Voice Mode" placement="top">
                    <IconButton
                      onClick={handleStartRecording}
                    >
                      <SoundIcon />
                    </IconButton>
                  </Tooltip>
                )}
              </Box>

              <IconButton
                type="submit"
                className={`icon-button ${!isLoading && "icon-button-hover"}`}
              >
                {isLoading ? (
                  <CircularProgress
                    sx={{ height: "20px !important", width: "20px !important" }}
                  />
                ) : (
                  <SendIcon />
                )}
              </IconButton>

              <Box className="scroll-to-bottom" component="span" onClick={scrollToBottom}>
                <Tooltip title="Scroll to Bottom">
                  <IconButton>
                    <KeyboardArrowDownIcon />
                  </IconButton>
                </Tooltip>
              </Box>
            </Box>
          </form>

        </Box>
      </Box>
    </ChatStyled>
  );
}
Leave a Comment