4kaudio / 4kaudiovis
trysem's picture
Create 4kaudiovis
b322a42 verified
import React, { useState, useRef, useEffect, useCallback } from 'react';
import { Upload, Play, Pause, Image as ImageIcon, Video, Settings2, Loader2, StopCircle } from 'lucide-react';
export default function App() {
const canvasRef = useRef(null);
const audioRef = useRef(null);
const audioCtxRef = useRef(null);
const analyserRef = useRef(null);
const sourceRef = useRef(null);
const destRef = useRef(null);
const reqIdRef = useRef(null);
const mediaRecorderRef = useRef(null);
const chunksRef = useRef([]);
// State
const [audioSrc, setAudioSrc] = useState(null);
const [fileName, setFileName] = useState('');
const [isPlaying, setIsPlaying] = useState(false);
const [isExportingVideo, setIsExportingVideo] = useState(false);
const [exportProgress, setExportProgress] = useState(0);
// Settings
const [vizType, setVizType] = useState('bars'); // 'bars', 'wave', 'circle'
const [color, setColor] = useState('#00ffcc');
const [thickness, setThickness] = useState(12);
const [sensitivity, setSensitivity] = useState(128); // FFT Size divider
// Initialize Web Audio API
const initAudio = useCallback(() => {
if (!audioCtxRef.current) {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtxRef.current = new AudioContext();
analyserRef.current = audioCtxRef.current.createAnalyser();
destRef.current = audioCtxRef.current.createMediaStreamDestination();
// We only want to create the source once for the audio element
if (!sourceRef.current && audioRef.current) {
sourceRef.current = audioCtxRef.current.createMediaElementSource(audioRef.current);
sourceRef.current.connect(analyserRef.current);
analyserRef.current.connect(audioCtxRef.current.destination); // to speakers
analyserRef.current.connect(destRef.current); // to recording destination
}
}
// Resume context if suspended (browser autoplay policy)
if (audioCtxRef.current.state === 'suspended') {
audioCtxRef.current.resume();
}
}, []);
const handleFileUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (audioSrc) URL.revokeObjectURL(audioSrc);
const url = URL.createObjectURL(file);
setAudioSrc(url);
setFileName(file.name);
setIsPlaying(false);
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.currentTime = 0;
}
}
};
const togglePlay = () => {
if (!audioSrc) return;
initAudio();
if (isPlaying) {
audioRef.current.pause();
} else {
audioRef.current.play();
}
setIsPlaying(!isPlaying);
};
// The Animation Loop
const draw = useCallback(() => {
if (!canvasRef.current || !analyserRef.current) {
reqIdRef.current = requestAnimationFrame(draw);
return;
}
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const width = canvas.width; // Native 4K: 3840
const height = canvas.height; // Native 4K: 2160
// Clear canvas completely to maintain transparency
ctx.clearRect(0, 0, width, height);
// Dynamic FFT size based on sensitivity setting
analyserRef.current.fftSize = sensitivity > 0 ? (2048 / (sensitivity / 32)) : 2048;
const bufferLength = analyserRef.current.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
ctx.lineWidth = thickness;
ctx.strokeStyle = color;
ctx.fillStyle = color;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
if (vizType === 'bars') {
analyserRef.current.getByteFrequencyData(dataArray);
const barWidth = (width / bufferLength) * 2.5;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
// Boost lower frequencies slightly for visual impact, scale to 4K height
const barHeight = (dataArray[i] / 255) * height * 0.8;
ctx.fillRect(x, height - barHeight, barWidth - 2, barHeight);
x += barWidth;
}
} else if (vizType === 'wave') {
analyserRef.current.getByteTimeDomainData(dataArray);
ctx.beginPath();
const sliceWidth = width / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0; // 0 to 2
const y = v * height / 2;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
ctx.stroke();
} else if (vizType === 'circle') {
analyserRef.current.getByteFrequencyData(dataArray);
const centerX = width / 2;
const centerY = height / 2;
const radius = height / 4;
ctx.beginPath();
const bars = 180; // Limit bars to make a clean circle
const step = (Math.PI * 2) / bars;
for (let i = 0; i < bars; i++) {
// Average a few bins to smooth out the circular graph
const dataIndex = Math.floor((i / bars) * (bufferLength / 2));
const value = dataArray[dataIndex] / 255;
const barHeight = value * (height / 3);
const angle = i * step;
const x1 = centerX + Math.cos(angle) * radius;
const y1 = centerY + Math.sin(angle) * radius;
const x2 = centerX + Math.cos(angle) * (radius + barHeight);
const y2 = centerY + Math.sin(angle) * (radius + barHeight);
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
}
ctx.stroke();
// Inner solid circle
ctx.beginPath();
ctx.arc(centerX, centerY, radius - thickness, 0, Math.PI * 2);
ctx.lineWidth = thickness / 2;
ctx.stroke();
}
reqIdRef.current = requestAnimationFrame(draw);
}, [vizType, color, thickness, sensitivity]);
// Handle Play/Pause side effects and loop
useEffect(() => {
reqIdRef.current = requestAnimationFrame(draw);
return () => cancelAnimationFrame(reqIdRef.current);
}, [draw]);
const handleAudioEnded = () => {
setIsPlaying(false);
if (isExportingVideo) {
stopVideoExport();
}
};
// Export Image (PNG)
const exportImage = () => {
if (!canvasRef.current) return;
const link = document.createElement('a');
link.download = `visualizer_${Date.now()}.png`;
link.href = canvasRef.current.toDataURL('image/png');
link.click();
};
// Export 4K Transparent Video
const startVideoExport = async () => {
if (!audioSrc || !canvasRef.current || !audioCtxRef.current) {
alert("Please upload an audio file and press play at least once to initialize.");
return;
}
setIsExportingVideo(true);
setExportProgress(0);
chunksRef.current = [];
// Reset audio to start
audioRef.current.pause();
audioRef.current.currentTime = 0;
// Capture Canvas Stream at 60fps
const canvasStream = canvasRef.current.captureStream(60);
// Get Audio Stream from destination
const audioStream = destRef.current.stream;
// Combine Streams
const combinedTracks = [...canvasStream.getTracks(), ...audioStream.getAudioTracks()];
const combinedStream = new MediaStream(combinedTracks);
// Setup MediaRecorder for Transparent Video (WebM VP9/VP8)
let options = { mimeType: 'video/webm; codecs=vp9' };
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options = { mimeType: 'video/webm; codecs=vp8' }; // Fallback
}
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
alert("Your browser does not support transparent video export (WebM with VP8/VP9 codecs). Export will proceed but may not be transparent.");
options = {}; // Use browser default
}
try {
mediaRecorderRef.current = new MediaRecorder(combinedStream, options);
} catch (e) {
console.error(e);
alert("Error starting video recorder. See console.");
setIsExportingVideo(false);
return;
}
mediaRecorderRef.current.ondataavailable = (e) => {
if (e.data && e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorderRef.current.onstop = () => {
const blob = new Blob(chunksRef.current, { type: mediaRecorderRef.current.mimeType });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.download = `visualizer_4k_${Date.now()}.webm`;
link.href = url;
link.click();
URL.revokeObjectURL(url);
setIsExportingVideo(false);
setExportProgress(0);
};
// Progress timer
const duration = audioRef.current.duration;
const progressInterval = setInterval(() => {
if (audioRef.current && !audioRef.current.paused) {
setExportProgress((audioRef.current.currentTime / duration) * 100);
} else {
clearInterval(progressInterval);
}
}, 500);
// Start Recording & Playback
mediaRecorderRef.current.start(100); // collect data every 100ms
await audioRef.current.play();
setIsPlaying(true);
};
const stopVideoExport = () => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
mediaRecorderRef.current.stop();
}
audioRef.current.pause();
setIsPlaying(false);
};
return (
<div className="min-h-screen bg-slate-950 text-slate-200 font-sans selection:bg-cyan-500/30">
{/* Header */}
<header className="border-b border-slate-800 bg-slate-900/50 p-6 flex items-center justify-between">
<div className="flex items-center gap-3">
<div className="bg-cyan-500/20 p-2 rounded-lg">
<Video className="w-6 h-6 text-cyan-400" />
</div>
<h1 className="text-xl font-bold tracking-tight text-white">4K Transparent Visualizer</h1>
</div>
<div className="text-sm text-slate-400 hidden sm:block">
All processing is strictly local.
</div>
</header>
<main className="container mx-auto p-6 grid lg:grid-cols-12 gap-8">
{/* Left Column: Controls */}
<div className="lg:col-span-4 space-y-6">
{/* Upload Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4 flex items-center gap-2">
<Upload className="w-4 h-4" /> Audio Input
</h2>
<label className="block w-full cursor-pointer bg-slate-800 hover:bg-slate-700 transition-colors border-2 border-dashed border-slate-600 rounded-xl p-8 text-center group">
<input
type="file"
accept="audio/*"
onChange={handleFileUpload}
className="hidden"
disabled={isExportingVideo}
/>
<div className="mx-auto w-12 h-12 bg-slate-900 rounded-full flex items-center justify-center mb-3 group-hover:scale-110 transition-transform">
<Upload className="w-6 h-6 text-cyan-400" />
</div>
<p className="font-medium text-slate-300">
{fileName ? fileName : 'Click to browse audio file'}
</p>
<p className="text-xs text-slate-500 mt-2">MP3, WAV, FLAC</p>
</label>
{/* Hidden Audio Element */}
<audio
ref={audioRef}
src={audioSrc}
onEnded={handleAudioEnded}
onPlay={() => setIsPlaying(true)}
onPause={() => setIsPlaying(false)}
/>
{/* Playback Controls */}
{audioSrc && (
<div className="mt-4 flex gap-3">
<button
onClick={togglePlay}
disabled={isExportingVideo}
className="flex-1 bg-cyan-500 hover:bg-cyan-400 text-slate-950 font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors disabled:opacity-50"
>
{isPlaying ? <Pause className="w-5 h-5" /> : <Play className="w-5 h-5" />}
{isPlaying ? 'Pause' : 'Play Audio'}
</button>
</div>
)}
</section>
{/* Settings Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4 flex items-center gap-2">
<Settings2 className="w-4 h-4" /> Visual Settings
</h2>
<div className="space-y-5">
{/* Type */}
<div>
<label className="block text-sm font-medium text-slate-400 mb-2">Style</label>
<div className="grid grid-cols-3 gap-2">
{['bars', 'wave', 'circle'].map(type => (
<button
key={type}
onClick={() => setVizType(type)}
className={`py-2 px-3 rounded-lg text-sm font-medium capitalize transition-all ${
vizType === type
? 'bg-slate-700 text-white shadow-inner border border-slate-600'
: 'bg-slate-950 text-slate-400 border border-slate-800 hover:border-slate-600'
}`}
>
{type}
</button>
))}
</div>
</div>
{/* Color */}
<div>
<label className="block text-sm font-medium text-slate-400 mb-2">Primary Color</label>
<div className="flex items-center gap-3">
<input
type="color"
value={color}
onChange={(e) => setColor(e.target.value)}
className="h-10 w-14 rounded cursor-pointer bg-slate-950 border border-slate-700"
/>
<input
type="text"
value={color}
onChange={(e) => setColor(e.target.value)}
className="flex-1 bg-slate-950 border border-slate-800 rounded-lg px-3 py-2 text-sm focus:ring-1 focus:ring-cyan-500 outline-none uppercase font-mono"
/>
</div>
</div>
{/* Thickness */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Line Thickness</span>
<span className="text-slate-500">{thickness}px</span>
</label>
<input
type="range"
min="2" max="64"
value={thickness}
onChange={(e) => setThickness(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Sensitivity / Scale */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Reactivity (FFT)</span>
</label>
<input
type="range"
min="32" max="256" step="32"
value={sensitivity}
onChange={(e) => setSensitivity(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
</div>
</section>
{/* Export Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4">Export Options</h2>
<div className="grid grid-cols-2 gap-3">
<button
onClick={exportImage}
disabled={isExportingVideo}
className="col-span-2 bg-slate-800 hover:bg-slate-700 text-white font-medium py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors disabled:opacity-50"
>
<ImageIcon className="w-4 h-4" />
Save Snapshot (PNG)
</button>
{isExportingVideo ? (
<div className="col-span-2 space-y-3">
<button
onClick={stopVideoExport}
className="w-full bg-red-500/10 hover:bg-red-500/20 text-red-500 border border-red-500/20 font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors"
>
<StopCircle className="w-5 h-5" />
Stop & Save
</button>
<div className="w-full bg-slate-950 rounded-full h-2.5 border border-slate-800 overflow-hidden">
<div className="bg-cyan-500 h-2.5 rounded-full transition-all duration-300" style={{ width: `${exportProgress}%` }}></div>
</div>
<p className="text-xs text-center text-slate-400">Recording transparent 4K video... {Math.round(exportProgress)}%</p>
</div>
) : (
<button
onClick={startVideoExport}
disabled={!audioSrc}
className="col-span-2 bg-gradient-to-r from-indigo-500 to-cyan-500 hover:from-indigo-400 hover:to-cyan-400 text-white font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-all shadow-lg shadow-cyan-500/20 disabled:opacity-50 disabled:shadow-none"
>
<Video className="w-5 h-5" />
Export 4K Video (WebM)
</button>
)}
</div>
<p className="text-xs text-slate-500 mt-4 leading-relaxed">
* Video export records in real-time. Background will be transparent. WebM VP9 format is used for alpha channel support.
</p>
</section>
</div>
{/* Right Column: Preview */}
<div className="lg:col-span-8 flex flex-col">
<div className="bg-slate-900 rounded-2xl border border-slate-800 shadow-xl overflow-hidden flex-1 relative flex flex-col">
<div className="p-4 border-b border-slate-800 bg-slate-900/80 flex justify-between items-center z-10">
<span className="text-sm font-semibold text-slate-300 flex items-center gap-2">
Live Preview
<span className="bg-slate-800 text-xs px-2 py-0.5 rounded text-slate-400 border border-slate-700">Native: 3840x2160</span>
</span>
<span className="text-xs text-slate-500">Checkerboard denotes transparency</span>
</div>
{/* Checkerboard Background for Transparency check */}
<div
className="flex-1 w-full relative flex items-center justify-center p-8"
style={{
backgroundImage: 'repeating-linear-gradient(45deg, #0f172a 25%, transparent 25%, transparent 75%, #0f172a 75%, #0f172a), repeating-linear-gradient(45deg, #0f172a 25%, #1e293b 25%, #1e293b 75%, #0f172a 75%, #0f172a)',
backgroundPosition: '0 0, 10px 10px',
backgroundSize: '20px 20px'
}}
>
{/* Actual Canvas (Internal 4K resolution, scaled by CSS for preview) */}
<canvas
ref={canvasRef}
width="3840"
height="2160"
className="w-full aspect-video object-contain drop-shadow-2xl rounded-lg ring-1 ring-white/10 bg-transparent"
/>
{!audioSrc && (
<div className="absolute inset-0 flex flex-col items-center justify-center pointer-events-none bg-slate-900/60 backdrop-blur-sm">
<Loader2 className="w-12 h-12 text-slate-500 animate-spin mb-4 opacity-50" />
<p className="text-slate-400 font-medium">Awaiting Audio Input</p>
</div>
)}
</div>
</div>
</div>
</main>
</div>
);
}