import { AudioRecorder, useAudioRecorder } from 'react-audio-voice-recorder'; import { Button } from '@/components/ui/button'; import { Authorization } from '@/constants/authorization'; import { cn } from '@/lib/utils'; import api from '@/utils/api'; import { getAuthorization } from '@/utils/authorization-util'; import { Loader2, Mic, Square } from 'lucide-react'; import { useEffect, useRef, useState } from 'react'; import { useIsDarkTheme } from '../theme-provider'; import { Input } from './input'; import { Popover, PopoverContent, PopoverTrigger } from './popover'; const VoiceVisualizer = ({ isRecording }: { isRecording: boolean }) => { const canvasRef = useRef(null); const audioContextRef = useRef(null); const analyserRef = useRef(null); const animationFrameRef = useRef(0); const streamRef = useRef(null); const isDark = useIsDarkTheme(); const startVisualization = async () => { try { // Check if the browser supports getUserMedia if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) { console.error('Browser does not support getUserMedia API'); return; } // Request microphone permission const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); streamRef.current = stream; // Create audio context and analyzer const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)(); audioContextRef.current = audioContext; const analyser = audioContext.createAnalyser(); analyserRef.current = analyser; analyser.fftSize = 32; // Connect audio nodes const source = audioContext.createMediaStreamSource(stream); source.connect(analyser); // Start drawing draw(); } catch (error) { console.error( 'Unable to access microphone for voice visualization:', error, ); } }; const stopVisualization = () => { // Stop animation frame if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); } // Stop audio stream if (streamRef.current) { streamRef.current.getTracks().forEach((track) => track.stop()); } // Close audio context if (audioContextRef.current && audioContextRef.current.state !== 'closed') { audioContextRef.current.close(); } // Clear canvas const canvas = canvasRef.current; if (canvas) { const ctx = canvas.getContext('2d'); if (ctx) { ctx.clearRect(0, 0, canvas.width, canvas.height); } } }; useEffect(() => { if (isRecording) { startVisualization(); } else { stopVisualization(); } return () => { stopVisualization(); }; }, [isRecording]); const draw = () => { const canvas = canvasRef.current; if (!canvas) return; const ctx = canvas.getContext('2d'); if (!ctx) return; const analyser = analyserRef.current; if (!analyser) return; // Set canvas dimensions const width = canvas.clientWidth; const height = canvas.clientHeight; const centerY = height / 2; if (canvas.width !== width || canvas.height !== height) { canvas.width = width; canvas.height = height; } // Clear canvas ctx.clearRect(0, 0, width, height); // Get frequency data const bufferLength = analyser.frequencyBinCount; const dataArray = new Uint8Array(bufferLength); analyser.getByteFrequencyData(dataArray); // Draw waveform const barWidth = (width / bufferLength) * 1.5; let x = 0; for (let i = 0; i < bufferLength; i = i + 2) { const barHeight = (dataArray[i] / 255) * centerY; // Create gradient const gradient = ctx.createLinearGradient( 0, centerY - barHeight, 0, centerY + barHeight, ); gradient.addColorStop(0, '#3ba05c'); // Blue gradient.addColorStop(1, '#3ba05c'); // Light blue // gradient.addColorStop(0, isDark ? '#fff' : '#000'); // Blue // gradient.addColorStop(1, isDark ? '#eee' : '#eee'); // Light blue ctx.fillStyle = gradient; ctx.fillRect(x, centerY - barHeight, barWidth, barHeight * 2); x += barWidth + 2; } animationFrameRef.current = requestAnimationFrame(draw); }; return (
); }; const VoiceInputBox = ({ isRecording, onStop, recordingTime, value, }: { value: string; isRecording: boolean; onStop: () => void; recordingTime: number; }) => { // Format recording time const formatTime = (seconds: number) => { const mins = Math.floor(seconds / 60); const secs = seconds % 60; return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`; }; return (
{formatTime(recordingTime)}
} /> ); }; export const AudioButton = ({ onOk, }: { onOk?: (transcript: string) => void; }) => { // const [showInputBox, setShowInputBox] = useState(false); const [isRecording, setIsRecording] = useState(false); const [isProcessing, setIsProcessing] = useState(false); const [recordingTime, setRecordingTime] = useState(0); const [transcript, setTranscript] = useState(''); const [popoverOpen, setPopoverOpen] = useState(false); const recorderControls = useAudioRecorder(); const intervalRef = useRef(null); // Handle logic after recording is complete const handleRecordingComplete = async (blob: Blob) => { setIsRecording(false); // const url = URL.createObjectURL(blob); // const a = document.createElement('a'); // a.href = url; // a.download = 'recording.webm'; // document.body.appendChild(a); // a.click(); setIsProcessing(true); if (intervalRef.current) { clearInterval(intervalRef.current); intervalRef.current = null; } try { const audioFile = new File([blob], 'recording.webm', { type: blob.type || 'audio/webm', // type: 'audio/mpeg', }); const formData = new FormData(); formData.append('file', audioFile); formData.append('stream', 'false'); const response = await fetch(api.sequence2txt, { method: 'POST', headers: { [Authorization]: getAuthorization(), // 'Content-Type': blob.type || 'audio/webm', }, body: formData, }); // if (!response.ok) { // throw new Error(`HTTP error! status: ${response.status}`); // } // if (!response.body) { // throw new Error('ReadableStream not supported in this browser'); // } console.log('Response:', response); const { data, code } = await response.json(); if (code === 0 && data && data.text) { setTranscript(data.text); console.log('Transcript:', data.text); onOk?.(data.text); } setPopoverOpen(false); } catch (error) { console.error('Failed to process audio:', error); // setTranscript(t('voiceRecorder.processingError')); } finally { setIsProcessing(false); } }; // Start recording const startRecording = () => { recorderControls.startRecording(); setIsRecording(true); // setShowInputBox(true); setPopoverOpen(true); setRecordingTime(0); // Start timing if (intervalRef.current) { clearInterval(intervalRef.current); } intervalRef.current = setInterval(() => { setRecordingTime((prev) => prev + 1); }, 1000); }; // Stop recording const stopRecording = () => { recorderControls.stopRecording(); setIsRecording(false); // setShowInputBox(false); setPopoverOpen(false); setRecordingTime(0); // Clear timer if (intervalRef.current) { clearInterval(intervalRef.current); intervalRef.current = null; } }; // Clear transcription content // const clearTranscript = () => { // setTranscript(''); // }; useEffect(() => { return () => { if (intervalRef.current) { clearInterval(intervalRef.current); } }; }, []); return (
{false && (
{ setPopoverOpen(true); }} >
)}
{isRecording && (
)} {isRecording && (
)}
{/* Hide original component */}
); };