import React, { useState, useRef, useEffect, useCallback } from 'react';
import { Upload, Play, Pause, Image as ImageIcon, Video, Settings2, Loader2, StopCircle, Sparkles, Monitor, ImagePlus } from 'lucide-react';
export default function App() {
const canvasRef = useRef(null);
const audioRef = useRef(null);
const audioCtxRef = useRef(null);
const analyserRef = useRef(null);
const sourceRef = useRef(null);
const destRef = useRef(null);
const reqIdRef = useRef(null);
const mediaRecorderRef = useRef(null);
const chunksRef = useRef([]);
const bgImgRef = useRef(null);
// State
const [audioSrc, setAudioSrc] = useState(null);
const [fileName, setFileName] = useState('');
const [isPlaying, setIsPlaying] = useState(false);
const [isExportingVideo, setIsExportingVideo] = useState(false);
const [exportProgress, setExportProgress] = useState(0);
// Settings
const [vizType, setVizType] = useState('bars'); // 'bars', 'wave', 'circle'
const [color, setColor] = useState('#00ffcc');
const [thickness, setThickness] = useState(12);
const [spacing, setSpacing] = useState(8); // NEW
const [sensitivity, setSensitivity] = useState(1.5); // Changed to Amplitude Multiplier
const [smoothing, setSmoothing] = useState(0.85); // NEW
// Transform Settings (NEW)
const [offsetX, setOffsetX] = useState(0);
const [offsetY, setOffsetY] = useState(0);
const [scale, setScale] = useState(1.0);
const [rotation, setRotation] = useState(0);
// Advanced Settings
const [colorMode, setColorMode] = useState('solid');
const [color2, setColor2] = useState('#b829ff');
const [glow, setGlow] = useState(false);
const [resolution, setResolution] = useState('4k_16_9');
const [bgType, setBgType] = useState('transparent');
const [bgColor, setBgColor] = useState('#0f172a');
const [bgImageSrc, setBgImageSrc] = useState(null);
const RESOLUTIONS = {
'4k_16_9': { w: 3840, h: 2160, label: '4K (16:9)', isVertical: false },
'1080p_16_9': { w: 1920, h: 1080, label: '1080p (16:9)', isVertical: false },
'4k_9_16': { w: 2160, h: 3840, label: '4K Vertical (9:16)', isVertical: true },
'1080p_9_16': { w: 1080, h: 1920, label: '1080p Vertical (9:16)', isVertical: true }
};
// Load background image
useEffect(() => {
if (bgImageSrc) {
const img = new Image();
img.onload = () => { bgImgRef.current = img; };
img.src = bgImageSrc;
} else {
bgImgRef.current = null;
}
}, [bgImageSrc]);
const handleBgUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (bgImageSrc) URL.revokeObjectURL(bgImageSrc);
setBgImageSrc(URL.createObjectURL(file));
setBgType('image');
}
};
// Initialize Web Audio API
const initAudio = useCallback(() => {
if (!audioCtxRef.current) {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtxRef.current = new AudioContext();
analyserRef.current = audioCtxRef.current.createAnalyser();
destRef.current = audioCtxRef.current.createMediaStreamDestination();
// We only want to create the source once for the audio element
if (!sourceRef.current && audioRef.current) {
sourceRef.current = audioCtxRef.current.createMediaElementSource(audioRef.current);
sourceRef.current.connect(analyserRef.current);
analyserRef.current.connect(audioCtxRef.current.destination); // to speakers
analyserRef.current.connect(destRef.current); // to recording destination
}
}
// Resume context if suspended (browser autoplay policy)
if (audioCtxRef.current.state === 'suspended') {
audioCtxRef.current.resume();
}
}, []);
const handleFileUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (audioSrc) URL.revokeObjectURL(audioSrc);
const url = URL.createObjectURL(file);
setAudioSrc(url);
setFileName(file.name);
setIsPlaying(false);
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.currentTime = 0;
}
}
};
const togglePlay = () => {
if (!audioSrc) return;
initAudio();
if (isPlaying) {
audioRef.current.pause();
} else {
audioRef.current.play();
}
setIsPlaying(!isPlaying);
};
// The Animation Loop
const draw = useCallback(() => {
if (!canvasRef.current || !analyserRef.current) {
reqIdRef.current = requestAnimationFrame(draw);
return;
}
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const res = RESOLUTIONS[resolution] || RESOLUTIONS['4k_16_9'];
// Update canvas native resolution if changed
if (canvas.width !== res.w || canvas.height !== res.h) {
canvas.width = res.w;
canvas.height = res.h;
}
const width = canvas.width;
const height = canvas.height;
// Background handling
ctx.clearRect(0, 0, width, height);
if (bgType === 'color') {
ctx.fillStyle = bgColor;
ctx.fillRect(0, 0, width, height);
} else if (bgType === 'image' && bgImgRef.current) {
const img = bgImgRef.current;
const imgRatio = img.width / img.height;
const canvasRatio = width / height;
let drawW, drawH, drawX, drawY;
if (imgRatio > canvasRatio) {
drawH = height;
drawW = height * imgRatio;
drawX = (width - drawW) / 2;
drawY = 0;
} else {
drawW = width;
drawH = width / imgRatio;
drawX = 0;
drawY = (height - drawH) / 2;
}
ctx.drawImage(img, drawX, drawY, drawW, drawH);
}
// Dynamic smoothing and fixed FFT for better reactivity control
analyserRef.current.smoothingTimeConstant = smoothing;
analyserRef.current.fftSize = 2048;
const bufferLength = analyserRef.current.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
ctx.save(); // Save context before applying transforms
// Apply Transformations (Position, Scale, Rotation)
const centerX = width / 2 + (width * (offsetX / 100));
const centerY = height / 2 + (height * (offsetY / 100));
ctx.translate(centerX, centerY);
ctx.scale(scale, scale);
ctx.rotate((rotation * Math.PI) / 180);
// Color and Glow logic (Coordinates adapted to centered origin)
let activeColor = color;
if (colorMode === 'gradient') {
const grad = ctx.createLinearGradient(-width/2, -height/2, width/2, height/2);
grad.addColorStop(0, color);
grad.addColorStop(1, color2);
activeColor = grad;
} else if (colorMode === 'rainbow') {
const grad = ctx.createLinearGradient(-width/2, 0, width/2, 0);
grad.addColorStop(0, '#ff0000');
grad.addColorStop(0.16, '#ffff00');
grad.addColorStop(0.33, '#00ff00');
grad.addColorStop(0.5, '#00ffff');
grad.addColorStop(0.66, '#0000ff');
grad.addColorStop(0.83, '#ff00ff');
grad.addColorStop(1, '#ff0000');
activeColor = grad;
}
ctx.lineWidth = thickness;
ctx.strokeStyle = activeColor;
ctx.fillStyle = activeColor;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
if (glow) {
ctx.shadowBlur = thickness * 2;
ctx.shadowColor = colorMode === 'solid' ? color : (colorMode === 'gradient' ? color2 : '#ffffff');
} else {
ctx.shadowBlur = 0;
}
if (vizType === 'bars') {
analyserRef.current.getByteFrequencyData(dataArray);
const step = thickness + spacing;
// Calculate exactly how many bars can fit in half the screen
const maxBars = Math.floor((width / 2) / step);
const usefulLength = Math.floor(bufferLength * 0.75); // Skip extreme silent highs
const numBars = Math.min(maxBars, usefulLength);
for (let i = 0; i < numBars; i++) {
const dataIndex = Math.floor((i / numBars) * usefulLength);
// Progressively boost higher frequencies
const boost = Math.pow(1 + (i / numBars), 1.5);
const value = dataArray[dataIndex] * boost * sensitivity;
// Minimum height ensures a nice dot/line is drawn even at complete silence
const barHeight = Math.max(thickness / 2, (value / 255) * height * 0.8);
const xOffset = i * step + (step / 2);
// Draw Right Side (Centered at 0,0 where bottom is height/2)
ctx.beginPath();
ctx.moveTo(xOffset, height / 2 - (thickness / 2));
ctx.lineTo(xOffset, height / 2 - barHeight);
ctx.stroke();
// Draw Left Side (Mirrored)
ctx.beginPath();
ctx.moveTo(-xOffset, height / 2 - (thickness / 2));
ctx.lineTo(-xOffset, height / 2 - barHeight);
ctx.stroke();
}
} else if (vizType === 'wave') {
analyserRef.current.getByteTimeDomainData(dataArray);
ctx.beginPath();
const sliceWidth = width / bufferLength;
let x = -width / 2; // Start from left edge relative to center
for (let i = 0; i < bufferLength; i++) {
// Apply sensitivity to the wave
const normalized = (dataArray[i] / 128.0) - 1;
const y = normalized * sensitivity * (height / 2); // Centered on Y axis
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
ctx.stroke();
} else if (vizType === 'circle') {
analyserRef.current.getByteFrequencyData(dataArray);
const radius = height / 4;
ctx.beginPath();
// Calculate number of bars based on thickness and spacing
const circumference = 2 * Math.PI * radius;
const stepSize = thickness + spacing;
const bars = Math.min(180, Math.floor(circumference / stepSize));
const step = (Math.PI * 2) / bars;
for (let i = 0; i < bars; i++) {
const dataIndex = Math.floor((i / bars) * (bufferLength / 2));
const value = (dataArray[dataIndex] / 255) * sensitivity;
const barHeight = Math.max(thickness / 2, value * (height / 3));
const angle = i * step;
const x1 = Math.cos(angle) * radius;
const y1 = Math.sin(angle) * radius;
const x2 = Math.cos(angle) * (radius + barHeight);
const y2 = Math.sin(angle) * (radius + barHeight);
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
}
ctx.stroke();
// Inner solid circle
ctx.beginPath();
ctx.arc(0, 0, radius - thickness, 0, Math.PI * 2);
ctx.lineWidth = thickness / 2;
ctx.stroke();
}
ctx.restore(); // Restore context to original state for next frame
reqIdRef.current = requestAnimationFrame(draw);
}, [vizType, color, thickness, spacing, sensitivity, smoothing, colorMode, color2, glow, resolution, bgType, bgColor, offsetX, offsetY, scale, rotation]);
// Handle Play/Pause side effects and loop
useEffect(() => {
reqIdRef.current = requestAnimationFrame(draw);
return () => cancelAnimationFrame(reqIdRef.current);
}, [draw]);
const handleAudioEnded = () => {
setIsPlaying(false);
if (isExportingVideo) {
stopVideoExport();
}
};
// Export Image (PNG)
const exportImage = () => {
if (!canvasRef.current) return;
const link = document.createElement('a');
link.download = `visualizer_${Date.now()}.png`;
link.href = canvasRef.current.toDataURL('image/png');
link.click();
};
// Export 4K Transparent Video
const startVideoExport = async () => {
if (!audioSrc || !canvasRef.current || !audioCtxRef.current) {
alert("Please upload an audio file and press play at least once to initialize.");
return;
}
setIsExportingVideo(true);
setExportProgress(0);
chunksRef.current = [];
// Reset audio to start
audioRef.current.pause();
audioRef.current.currentTime = 0;
// Capture Canvas Stream at 60fps
const canvasStream = canvasRef.current.captureStream(60);
// Get Audio Stream from destination
const audioStream = destRef.current.stream;
// Combine Streams
const combinedTracks = [...canvasStream.getTracks(), ...audioStream.getAudioTracks()];
const combinedStream = new MediaStream(combinedTracks);
// Setup MediaRecorder for Transparent Video (WebM VP9/VP8)
let options = { mimeType: 'video/webm; codecs=vp9' };
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options = { mimeType: 'video/webm; codecs=vp8' }; // Fallback
}
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
alert("Your browser does not support transparent video export (WebM with VP8/VP9 codecs). Export will proceed but may not be transparent.");
options = {}; // Use browser default
}
try {
mediaRecorderRef.current = new MediaRecorder(combinedStream, options);
} catch (e) {
console.error(e);
alert("Error starting video recorder. See console.");
setIsExportingVideo(false);
return;
}
mediaRecorderRef.current.ondataavailable = (e) => {
if (e.data && e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorderRef.current.onstop = () => {
const blob = new Blob(chunksRef.current, { type: mediaRecorderRef.current.mimeType });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.download = `visualizer_4k_${Date.now()}.webm`;
link.href = url;
link.click();
URL.revokeObjectURL(url);
setIsExportingVideo(false);
setExportProgress(0);
};
// Progress timer
const duration = audioRef.current.duration;
const progressInterval = setInterval(() => {
if (audioRef.current && !audioRef.current.paused) {
setExportProgress((audioRef.current.currentTime / duration) * 100);
} else {
clearInterval(progressInterval);
}
}, 500);
// Start Recording & Playback
mediaRecorderRef.current.start(100); // collect data every 100ms
await audioRef.current.play();
setIsPlaying(true);
};
const stopVideoExport = () => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
mediaRecorderRef.current.stop();
}
audioRef.current.pause();
setIsPlaying(false);
};
return (
{/* Header */}
{/* Left Column: Controls */}
{/* Upload Section */}
Audio Input
{fileName ? fileName : 'Click to browse audio file'}
MP3, WAV, FLAC
{/* Hidden Audio Element */}
setIsPlaying(true)}
onPause={() => setIsPlaying(false)}
/>
{/* Playback Controls */}
{audioSrc && (
{isPlaying ? : }
{isPlaying ? 'Pause' : 'Play Audio'}
)}
{/* Settings Section */}
Visual Settings
{/* Type */}
Style
{['bars', 'wave', 'circle'].map(type => (
setVizType(type)}
className={`py-2 px-3 rounded-lg text-sm font-medium capitalize transition-all ${
vizType === type
? 'bg-slate-700 text-white shadow-inner border border-slate-600'
: 'bg-slate-950 text-slate-400 border border-slate-800 hover:border-slate-600'
}`}
>
{type}
))}
{/* Color & Style */}
{/* Glow Effect */}
{/* Thickness */}
Line Thickness
{thickness}px
setThickness(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Spacing */}
Space Between Lines
{spacing}px
setSpacing(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Amplitude (Sensitivity) */}
Amplitude (Height)
{sensitivity.toFixed(1)}x
setSensitivity(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Smoothing */}
Motion Smoothing
{Math.round(smoothing * 100)}%
setSmoothing(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
{/* Transform Settings */}
{/* Advanced Layout Section */}
Output Setup
{/* Resolution */}
Aspect Ratio & Resolution
setResolution(e.target.value)}
className="w-full bg-slate-950 border border-slate-700 text-slate-300 text-sm rounded-lg px-3 py-2.5 outline-none focus:ring-1 focus:ring-cyan-500"
>
4K Landscape (3840x2160)
1080p Landscape (1920x1080)
4K Vertical / Reels (2160x3840)
1080p Vertical / Reels (1080x1920)
{/* Background */}
Background Type
{['transparent', 'color', 'image'].map(type => (
setBgType(type)}
className={`flex-1 py-2 px-2 rounded-lg text-xs font-medium capitalize transition-all ${
bgType === type
? 'bg-slate-700 text-white shadow-inner border border-slate-600'
: 'bg-slate-950 text-slate-400 border border-slate-800 hover:border-slate-600'
}`}
>
{type}
))}
{bgType === 'color' && (
setBgColor(e.target.value)} className="h-8 w-12 rounded cursor-pointer bg-slate-950 border border-slate-700" />
{bgColor}
)}
{bgType === 'image' && (
{bgImageSrc ? 'Change Image' : 'Upload Background Image'}
)}
{/* Export Section */}
Export Options
Save Snapshot (PNG)
{isExportingVideo ? (
Stop & Save
Recording transparent 4K video... {Math.round(exportProgress)}%
) : (
Export 4K Video (WebM)
)}
* Video export records in real-time. Background will be transparent. WebM VP9 format is used for alpha channel support.
{/* Right Column: Preview */}
Live Preview
{RESOLUTIONS[resolution]?.w}x{RESOLUTIONS[resolution]?.h}
{bgType === 'transparent' ? 'Checkerboard denotes transparency' : 'Background included in export'}
{/* Checkerboard Background for Transparency check */}
{/* Actual Canvas */}
{!audioSrc && (
)}
);
}