import React, { useState, useRef, useEffect, useCallback } from 'react';
import { Upload, Play, Pause, Image as ImageIcon, Video, Settings2, Loader2, StopCircle } from 'lucide-react';
export default function App() {
const canvasRef = useRef(null);
const audioRef = useRef(null);
const audioCtxRef = useRef(null);
const analyserRef = useRef(null);
const sourceRef = useRef(null);
const destRef = useRef(null);
const reqIdRef = useRef(null);
const mediaRecorderRef = useRef(null);
const chunksRef = useRef([]);
// State
const [audioSrc, setAudioSrc] = useState(null);
const [fileName, setFileName] = useState('');
const [isPlaying, setIsPlaying] = useState(false);
const [isExportingVideo, setIsExportingVideo] = useState(false);
const [exportProgress, setExportProgress] = useState(0);
// Settings
const [vizType, setVizType] = useState('bars'); // 'bars', 'wave', 'circle'
const [color, setColor] = useState('#00ffcc');
const [thickness, setThickness] = useState(12);
const [sensitivity, setSensitivity] = useState(128); // FFT Size divider
// Initialize Web Audio API
const initAudio = useCallback(() => {
if (!audioCtxRef.current) {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtxRef.current = new AudioContext();
analyserRef.current = audioCtxRef.current.createAnalyser();
destRef.current = audioCtxRef.current.createMediaStreamDestination();
// We only want to create the source once for the audio element
if (!sourceRef.current && audioRef.current) {
sourceRef.current = audioCtxRef.current.createMediaElementSource(audioRef.current);
sourceRef.current.connect(analyserRef.current);
analyserRef.current.connect(audioCtxRef.current.destination); // to speakers
analyserRef.current.connect(destRef.current); // to recording destination
}
}
// Resume context if suspended (browser autoplay policy)
if (audioCtxRef.current.state === 'suspended') {
audioCtxRef.current.resume();
}
}, []);
const handleFileUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (audioSrc) URL.revokeObjectURL(audioSrc);
const url = URL.createObjectURL(file);
setAudioSrc(url);
setFileName(file.name);
setIsPlaying(false);
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.currentTime = 0;
}
}
};
const togglePlay = () => {
if (!audioSrc) return;
initAudio();
if (isPlaying) {
audioRef.current.pause();
} else {
audioRef.current.play();
}
setIsPlaying(!isPlaying);
};
// The Animation Loop
const draw = useCallback(() => {
if (!canvasRef.current || !analyserRef.current) {
reqIdRef.current = requestAnimationFrame(draw);
return;
}
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const width = canvas.width; // Native 4K: 3840
const height = canvas.height; // Native 4K: 2160
// Clear canvas completely to maintain transparency
ctx.clearRect(0, 0, width, height);
// Dynamic FFT size based on sensitivity setting
analyserRef.current.fftSize = sensitivity > 0 ? (2048 / (sensitivity / 32)) : 2048;
const bufferLength = analyserRef.current.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
ctx.lineWidth = thickness;
ctx.strokeStyle = color;
ctx.fillStyle = color;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
if (vizType === 'bars') {
analyserRef.current.getByteFrequencyData(dataArray);
const barWidth = (width / bufferLength) * 2.5;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
// Boost lower frequencies slightly for visual impact, scale to 4K height
const barHeight = (dataArray[i] / 255) * height * 0.8;
ctx.fillRect(x, height - barHeight, barWidth - 2, barHeight);
x += barWidth;
}
} else if (vizType === 'wave') {
analyserRef.current.getByteTimeDomainData(dataArray);
ctx.beginPath();
const sliceWidth = width / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0; // 0 to 2
const y = v * height / 2;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
ctx.stroke();
} else if (vizType === 'circle') {
analyserRef.current.getByteFrequencyData(dataArray);
const centerX = width / 2;
const centerY = height / 2;
const radius = height / 4;
ctx.beginPath();
const bars = 180; // Limit bars to make a clean circle
const step = (Math.PI * 2) / bars;
for (let i = 0; i < bars; i++) {
// Average a few bins to smooth out the circular graph
const dataIndex = Math.floor((i / bars) * (bufferLength / 2));
const value = dataArray[dataIndex] / 255;
const barHeight = value * (height / 3);
const angle = i * step;
const x1 = centerX + Math.cos(angle) * radius;
const y1 = centerY + Math.sin(angle) * radius;
const x2 = centerX + Math.cos(angle) * (radius + barHeight);
const y2 = centerY + Math.sin(angle) * (radius + barHeight);
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
}
ctx.stroke();
// Inner solid circle
ctx.beginPath();
ctx.arc(centerX, centerY, radius - thickness, 0, Math.PI * 2);
ctx.lineWidth = thickness / 2;
ctx.stroke();
}
reqIdRef.current = requestAnimationFrame(draw);
}, [vizType, color, thickness, sensitivity]);
// Handle Play/Pause side effects and loop
useEffect(() => {
reqIdRef.current = requestAnimationFrame(draw);
return () => cancelAnimationFrame(reqIdRef.current);
}, [draw]);
const handleAudioEnded = () => {
setIsPlaying(false);
if (isExportingVideo) {
stopVideoExport();
}
};
// Export Image (PNG)
const exportImage = () => {
if (!canvasRef.current) return;
const link = document.createElement('a');
link.download = `visualizer_${Date.now()}.png`;
link.href = canvasRef.current.toDataURL('image/png');
link.click();
};
// Export 4K Transparent Video
const startVideoExport = async () => {
if (!audioSrc || !canvasRef.current || !audioCtxRef.current) {
alert("Please upload an audio file and press play at least once to initialize.");
return;
}
setIsExportingVideo(true);
setExportProgress(0);
chunksRef.current = [];
// Reset audio to start
audioRef.current.pause();
audioRef.current.currentTime = 0;
// Capture Canvas Stream at 60fps
const canvasStream = canvasRef.current.captureStream(60);
// Get Audio Stream from destination
const audioStream = destRef.current.stream;
// Combine Streams
const combinedTracks = [...canvasStream.getTracks(), ...audioStream.getAudioTracks()];
const combinedStream = new MediaStream(combinedTracks);
// Setup MediaRecorder for Transparent Video (WebM VP9/VP8)
let options = { mimeType: 'video/webm; codecs=vp9' };
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options = { mimeType: 'video/webm; codecs=vp8' }; // Fallback
}
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
alert("Your browser does not support transparent video export (WebM with VP8/VP9 codecs). Export will proceed but may not be transparent.");
options = {}; // Use browser default
}
try {
mediaRecorderRef.current = new MediaRecorder(combinedStream, options);
} catch (e) {
console.error(e);
alert("Error starting video recorder. See console.");
setIsExportingVideo(false);
return;
}
mediaRecorderRef.current.ondataavailable = (e) => {
if (e.data && e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorderRef.current.onstop = () => {
const blob = new Blob(chunksRef.current, { type: mediaRecorderRef.current.mimeType });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.download = `visualizer_4k_${Date.now()}.webm`;
link.href = url;
link.click();
URL.revokeObjectURL(url);
setIsExportingVideo(false);
setExportProgress(0);
};
// Progress timer
const duration = audioRef.current.duration;
const progressInterval = setInterval(() => {
if (audioRef.current && !audioRef.current.paused) {
setExportProgress((audioRef.current.currentTime / duration) * 100);
} else {
clearInterval(progressInterval);
}
}, 500);
// Start Recording & Playback
mediaRecorderRef.current.start(100); // collect data every 100ms
await audioRef.current.play();
setIsPlaying(true);
};
const stopVideoExport = () => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
mediaRecorderRef.current.stop();
}
audioRef.current.pause();
setIsPlaying(false);
};
return (
{/* Header */}
{/* Left Column: Controls */}
{/* Upload Section */}
Audio Input
{/* Hidden Audio Element */}
{/* Settings Section */}
Visual Settings
{/* Type */}
{['bars', 'wave', 'circle'].map(type => (
))}
{/* Color */}
{/* Thickness */}
setThickness(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Sensitivity / Scale */}
setSensitivity(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Export Section */}
Export Options
{isExportingVideo ? (
Recording transparent 4K video... {Math.round(exportProgress)}%
) : (
)}
* Video export records in real-time. Background will be transparent. WebM VP9 format is used for alpha channel support.
{/* Right Column: Preview */}
Live Preview
Native: 3840x2160
Checkerboard denotes transparency
{/* Checkerboard Background for Transparency check */}
{/* Actual Canvas (Internal 4K resolution, scaled by CSS for preview) */}
{!audioSrc && (
)}
);
}