Skip to content

Commit

Permalink
Add Live waveform visualization
Browse files Browse the repository at this point in the history
  • Loading branch information
addyosmani committed Nov 7, 2024
1 parent b022af5 commit 851c7a4
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 129 deletions.
10 changes: 10 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
"@xenova/transformers": "^2.7.0",
"axios": "^1.3.4",
"react": "^18.2.0",
"react-audio-visualize": "^1.2.0",
"react-dom": "^18.2.0",
"react-icons": "^4.11.0"
},
Expand Down
199 changes: 70 additions & 129 deletions src/components/AudioRecorder.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import React, { useState, useRef, useEffect } from 'react';
import React, { useState, useRef } from 'react';
import { LiveAudioVisualizer } from 'react-audio-visualize';

interface Props {
onRecordingComplete: (blob: Blob) => void;
Expand All @@ -7,142 +8,56 @@ interface Props {
const AudioRecorder: React.FC<Props> = ({ onRecordingComplete }) => {
const [isRecording, setIsRecording] = useState(false);
const [recordingTime, setRecordingTime] = useState(0);
const mediaRecorder = useRef<MediaRecorder | null>(null);
const [mediaRecorder, setMediaRecorder] = useState<MediaRecorder | null>(null);
const timeInterval = useRef<number | null>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const animationFrameRef = useRef<number>();

useEffect(() => {
return () => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
};
}, []);

const drawWaveform = () => {
if (!analyserRef.current || !canvasRef.current) return;

const canvas = canvasRef.current;
const canvasCtx = canvas.getContext('2d');
if (!canvasCtx) return;

const analyser = analyserRef.current;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);

const draw = () => {
if (!isRecording) return;

animationFrameRef.current = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);

canvasCtx.fillStyle = 'rgb(255, 255, 255)';
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(59, 130, 246)';
canvasCtx.beginPath();

const sliceWidth = canvas.width / bufferLength;
let x = 0;

for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = v * canvas.height / 2;

if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}

x += sliceWidth;
}

canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};

draw();
};
const streamRef = useRef<MediaStream | null>(null);

const startRecording = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});

// Set up audio context and analyser for waveform
const audioContext = new AudioContext();
const source = audioContext.createMediaStreamSource(stream);
const analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
source.connect(analyser);
analyserRef.current = analyser;

// Try to use MP3 encoding, fall back to default if not supported
const options = {
mimeType: 'audio/mpeg',
audioBitsPerSecond: 256000
};

try {
mediaRecorder.current = new MediaRecorder(stream, options);
} catch (e) {
// If MP3 is not supported, try other common formats
const formats = [
'audio/mp3',
'audio/webm;codecs=mp3',
'audio/webm',
'audio/ogg'
];

for (const format of formats) {
if (MediaRecorder.isTypeSupported(format)) {
mediaRecorder.current = new MediaRecorder(stream, { mimeType: format });
break;
}
}

// If none of the above worked, use default format
if (!mediaRecorder.current) {
mediaRecorder.current = new MediaRecorder(stream);
}
}

mediaRecorder.current.addEventListener('dataavailable', handleDataAvailable);
mediaRecorder.current.start();
streamRef.current = stream;
const recorder = new MediaRecorder(stream);

recorder.addEventListener('dataavailable', handleDataAvailable);
recorder.start();

setMediaRecorder(recorder);
setIsRecording(true);

timeInterval.current = window.setInterval(() => {
setRecordingTime((prevTime) => prevTime + 1);
}, 1000);

drawWaveform();
} catch (err) {
console.error('Error accessing microphone:', err);
}
};

const stopRecording = () => {
if (mediaRecorder.current && isRecording) {
mediaRecorder.current.stop();
mediaRecorder.current.stream.getTracks().forEach(track => track.stop());
if (mediaRecorder && isRecording) {
mediaRecorder.stop();
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
}
setIsRecording(false);
if (timeInterval.current) {
clearInterval(timeInterval.current);
timeInterval.current = null;
}
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
setRecordingTime(0);
setMediaRecorder(null);
}
};

const handleDataAvailable = (event: BlobEvent) => {
if (event.data.size > 0) {
// Convert to MP3 if not already in MP3 format
const blob = new Blob([event.data], { type: 'audio/mpeg' });
onRecordingComplete(blob);
onRecordingComplete(event.data);
}
};

Expand All @@ -153,25 +68,51 @@ const AudioRecorder: React.FC<Props> = ({ onRecordingComplete }) => {
};

return (
<div className="flex flex-col items-center gap-4 p-4 w-full">
<canvas
ref={canvasRef}
className="w-full h-24 bg-white rounded-lg"
width={800}
height={100}
/>
<div className="text-xl font-semibold">
{isRecording ? `Recording: ${formatTime(recordingTime)}` : 'Ready to Record'}
<div className="flex flex-col items-center gap-4 p-6 w-full max-w-2xl mx-auto">
<div className="w-full bg-white rounded-lg p-6 shadow-lg">
<h2 className="text-2xl font-bold mb-4">Record Audio</h2>
<div className="relative w-full">
{mediaRecorder ? (
<div className="w-full h-40 rounded-lg mb-4 bg-[rgb(15,23,42)] flex items-center justify-center overflow-hidden">
<LiveAudioVisualizer
mediaRecorder={mediaRecorder}
width={800}
height={160}
barWidth={2}
gap={1}
barColor={'rgb(96, 165, 250)'}
backgroundColor={'rgb(15, 23, 42)'}
fftSize={1024}
smoothingTimeConstant={0.8}
/>
</div>
) : (
<div
className="w-full h-40 rounded-lg mb-4 bg-[rgb(15,23,42)] flex items-center justify-center"
>
<span className="text-white/50">
Click Start Recording to begin
</span>
</div>
)}
</div>
<div className="text-center mb-4">
<div className="text-xl font-semibold text-gray-700">
{isRecording ? `Recording: ${formatTime(recordingTime)}` : 'Ready to Record'}
</div>
</div>
<button
onClick={isRecording ? stopRecording : startRecording}
className={`w-full py-3 rounded-lg text-white text-lg font-semibold ${
isRecording
? 'bg-red-500 hover:bg-red-600'
: 'bg-blue-500 hover:bg-blue-600'
} transition-all shadow-md hover:shadow-lg`}
aria-label={isRecording ? "Stop Recording" : "Start Recording"}
>
{isRecording ? 'Stop Recording' : 'Start Recording'}
</button>
</div>
<button
onClick={isRecording ? stopRecording : startRecording}
className={`px-6 py-3 rounded-full text-white text-lg font-semibold ${
isRecording ? 'bg-red-500 hover:bg-red-600' : 'bg-blue-500 hover:bg-blue-600'
} transition-colors shadow-lg hover:shadow-xl transform hover:-translate-y-0.5 transition-all`}
aria-label={isRecording ? "Stop Recording" : "Start Recording"}
>
{isRecording ? 'Stop Recording' : 'Start Recording'}
</button>
</div>
);
};
Expand Down

0 comments on commit 851c7a4

Please sign in to comment.