4kaudio / OG-BEST-COPY
trysem's picture
Create OG-BEST-COPY
d4118d0 verified
import React, { useState, useRef, useEffect, useCallback } from 'react';
import { Upload, Play, Pause, Image as ImageIcon, Video, Settings2, Loader2, StopCircle, Sparkles, Monitor, ImagePlus } from 'lucide-react';
export default function App() {
const canvasRef = useRef(null);
const audioRef = useRef(null);
const audioCtxRef = useRef(null);
const analyserRef = useRef(null);
const sourceRef = useRef(null);
const destRef = useRef(null);
const reqIdRef = useRef(null);
const mediaRecorderRef = useRef(null);
const chunksRef = useRef([]);
const bgImgRef = useRef(null);
// State
const [audioSrc, setAudioSrc] = useState(null);
const [fileName, setFileName] = useState('');
const [isPlaying, setIsPlaying] = useState(false);
const [isExportingVideo, setIsExportingVideo] = useState(false);
const [exportProgress, setExportProgress] = useState(0);
// Settings
const [vizType, setVizType] = useState('bars'); // 'bars', 'wave', 'circle'
const [color, setColor] = useState('#00ffcc');
const [thickness, setThickness] = useState(12);
const [spacing, setSpacing] = useState(8); // NEW
const [sensitivity, setSensitivity] = useState(1.5); // Changed to Amplitude Multiplier
const [smoothing, setSmoothing] = useState(0.85); // NEW
// Transform Settings (NEW)
const [offsetX, setOffsetX] = useState(0);
const [offsetY, setOffsetY] = useState(0);
const [scale, setScale] = useState(1.0);
const [rotation, setRotation] = useState(0);
// Advanced Settings
const [colorMode, setColorMode] = useState('solid');
const [color2, setColor2] = useState('#b829ff');
const [glow, setGlow] = useState(false);
const [resolution, setResolution] = useState('4k_16_9');
const [bgType, setBgType] = useState('transparent');
const [bgColor, setBgColor] = useState('#0f172a');
const [bgImageSrc, setBgImageSrc] = useState(null);
const RESOLUTIONS = {
'4k_16_9': { w: 3840, h: 2160, label: '4K (16:9)', isVertical: false },
'1080p_16_9': { w: 1920, h: 1080, label: '1080p (16:9)', isVertical: false },
'4k_9_16': { w: 2160, h: 3840, label: '4K Vertical (9:16)', isVertical: true },
'1080p_9_16': { w: 1080, h: 1920, label: '1080p Vertical (9:16)', isVertical: true }
};
// Load background image
useEffect(() => {
if (bgImageSrc) {
const img = new Image();
img.onload = () => { bgImgRef.current = img; };
img.src = bgImageSrc;
} else {
bgImgRef.current = null;
}
}, [bgImageSrc]);
const handleBgUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (bgImageSrc) URL.revokeObjectURL(bgImageSrc);
setBgImageSrc(URL.createObjectURL(file));
setBgType('image');
}
};
// Initialize Web Audio API
const initAudio = useCallback(() => {
if (!audioCtxRef.current) {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtxRef.current = new AudioContext();
analyserRef.current = audioCtxRef.current.createAnalyser();
destRef.current = audioCtxRef.current.createMediaStreamDestination();
// We only want to create the source once for the audio element
if (!sourceRef.current && audioRef.current) {
sourceRef.current = audioCtxRef.current.createMediaElementSource(audioRef.current);
sourceRef.current.connect(analyserRef.current);
analyserRef.current.connect(audioCtxRef.current.destination); // to speakers
analyserRef.current.connect(destRef.current); // to recording destination
}
}
// Resume context if suspended (browser autoplay policy)
if (audioCtxRef.current.state === 'suspended') {
audioCtxRef.current.resume();
}
}, []);
const handleFileUpload = (e) => {
const file = e.target.files[0];
if (file) {
if (audioSrc) URL.revokeObjectURL(audioSrc);
const url = URL.createObjectURL(file);
setAudioSrc(url);
setFileName(file.name);
setIsPlaying(false);
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.currentTime = 0;
}
}
};
const togglePlay = () => {
if (!audioSrc) return;
initAudio();
if (isPlaying) {
audioRef.current.pause();
} else {
audioRef.current.play();
}
setIsPlaying(!isPlaying);
};
// The Animation Loop
const draw = useCallback(() => {
if (!canvasRef.current || !analyserRef.current) {
reqIdRef.current = requestAnimationFrame(draw);
return;
}
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const res = RESOLUTIONS[resolution] || RESOLUTIONS['4k_16_9'];
// Update canvas native resolution if changed
if (canvas.width !== res.w || canvas.height !== res.h) {
canvas.width = res.w;
canvas.height = res.h;
}
const width = canvas.width;
const height = canvas.height;
// Background handling
ctx.clearRect(0, 0, width, height);
if (bgType === 'color') {
ctx.fillStyle = bgColor;
ctx.fillRect(0, 0, width, height);
} else if (bgType === 'image' && bgImgRef.current) {
const img = bgImgRef.current;
const imgRatio = img.width / img.height;
const canvasRatio = width / height;
let drawW, drawH, drawX, drawY;
if (imgRatio > canvasRatio) {
drawH = height;
drawW = height * imgRatio;
drawX = (width - drawW) / 2;
drawY = 0;
} else {
drawW = width;
drawH = width / imgRatio;
drawX = 0;
drawY = (height - drawH) / 2;
}
ctx.drawImage(img, drawX, drawY, drawW, drawH);
}
// Dynamic smoothing and fixed FFT for better reactivity control
analyserRef.current.smoothingTimeConstant = smoothing;
analyserRef.current.fftSize = 2048;
const bufferLength = analyserRef.current.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
ctx.save(); // Save context before applying transforms
// Apply Transformations (Position, Scale, Rotation)
const centerX = width / 2 + (width * (offsetX / 100));
const centerY = height / 2 + (height * (offsetY / 100));
ctx.translate(centerX, centerY);
ctx.scale(scale, scale);
ctx.rotate((rotation * Math.PI) / 180);
// Color and Glow logic (Coordinates adapted to centered origin)
let activeColor = color;
if (colorMode === 'gradient') {
const grad = ctx.createLinearGradient(-width/2, -height/2, width/2, height/2);
grad.addColorStop(0, color);
grad.addColorStop(1, color2);
activeColor = grad;
} else if (colorMode === 'rainbow') {
const grad = ctx.createLinearGradient(-width/2, 0, width/2, 0);
grad.addColorStop(0, '#ff0000');
grad.addColorStop(0.16, '#ffff00');
grad.addColorStop(0.33, '#00ff00');
grad.addColorStop(0.5, '#00ffff');
grad.addColorStop(0.66, '#0000ff');
grad.addColorStop(0.83, '#ff00ff');
grad.addColorStop(1, '#ff0000');
activeColor = grad;
}
ctx.lineWidth = thickness;
ctx.strokeStyle = activeColor;
ctx.fillStyle = activeColor;
ctx.lineCap = 'round';
ctx.lineJoin = 'round';
if (glow) {
ctx.shadowBlur = thickness * 2;
ctx.shadowColor = colorMode === 'solid' ? color : (colorMode === 'gradient' ? color2 : '#ffffff');
} else {
ctx.shadowBlur = 0;
}
if (vizType === 'bars') {
analyserRef.current.getByteFrequencyData(dataArray);
const step = thickness + spacing;
// Calculate exactly how many bars can fit in half the screen
const maxBars = Math.floor((width / 2) / step);
const usefulLength = Math.floor(bufferLength * 0.75); // Skip extreme silent highs
const numBars = Math.min(maxBars, usefulLength);
for (let i = 0; i < numBars; i++) {
const dataIndex = Math.floor((i / numBars) * usefulLength);
// Progressively boost higher frequencies
const boost = Math.pow(1 + (i / numBars), 1.5);
const value = dataArray[dataIndex] * boost * sensitivity;
// Minimum height ensures a nice dot/line is drawn even at complete silence
const barHeight = Math.max(thickness / 2, (value / 255) * height * 0.8);
const xOffset = i * step + (step / 2);
// Draw Right Side (Centered at 0,0 where bottom is height/2)
ctx.beginPath();
ctx.moveTo(xOffset, height / 2 - (thickness / 2));
ctx.lineTo(xOffset, height / 2 - barHeight);
ctx.stroke();
// Draw Left Side (Mirrored)
ctx.beginPath();
ctx.moveTo(-xOffset, height / 2 - (thickness / 2));
ctx.lineTo(-xOffset, height / 2 - barHeight);
ctx.stroke();
}
} else if (vizType === 'wave') {
analyserRef.current.getByteTimeDomainData(dataArray);
ctx.beginPath();
const sliceWidth = width / bufferLength;
let x = -width / 2; // Start from left edge relative to center
for (let i = 0; i < bufferLength; i++) {
// Apply sensitivity to the wave
const normalized = (dataArray[i] / 128.0) - 1;
const y = normalized * sensitivity * (height / 2); // Centered on Y axis
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
ctx.stroke();
} else if (vizType === 'circle') {
analyserRef.current.getByteFrequencyData(dataArray);
const radius = height / 4;
ctx.beginPath();
// Calculate number of bars based on thickness and spacing
const circumference = 2 * Math.PI * radius;
const stepSize = thickness + spacing;
const bars = Math.min(180, Math.floor(circumference / stepSize));
const step = (Math.PI * 2) / bars;
for (let i = 0; i < bars; i++) {
const dataIndex = Math.floor((i / bars) * (bufferLength / 2));
const value = (dataArray[dataIndex] / 255) * sensitivity;
const barHeight = Math.max(thickness / 2, value * (height / 3));
const angle = i * step;
const x1 = Math.cos(angle) * radius;
const y1 = Math.sin(angle) * radius;
const x2 = Math.cos(angle) * (radius + barHeight);
const y2 = Math.sin(angle) * (radius + barHeight);
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
}
ctx.stroke();
// Inner solid circle
ctx.beginPath();
ctx.arc(0, 0, radius - thickness, 0, Math.PI * 2);
ctx.lineWidth = thickness / 2;
ctx.stroke();
}
ctx.restore(); // Restore context to original state for next frame
reqIdRef.current = requestAnimationFrame(draw);
}, [vizType, color, thickness, spacing, sensitivity, smoothing, colorMode, color2, glow, resolution, bgType, bgColor, offsetX, offsetY, scale, rotation]);
// Handle Play/Pause side effects and loop
useEffect(() => {
reqIdRef.current = requestAnimationFrame(draw);
return () => cancelAnimationFrame(reqIdRef.current);
}, [draw]);
const handleAudioEnded = () => {
setIsPlaying(false);
if (isExportingVideo) {
stopVideoExport();
}
};
// Export Image (PNG)
const exportImage = () => {
if (!canvasRef.current) return;
const link = document.createElement('a');
link.download = `visualizer_${Date.now()}.png`;
link.href = canvasRef.current.toDataURL('image/png');
link.click();
};
// Export 4K Transparent Video
const startVideoExport = async () => {
if (!audioSrc || !canvasRef.current || !audioCtxRef.current) {
alert("Please upload an audio file and press play at least once to initialize.");
return;
}
setIsExportingVideo(true);
setExportProgress(0);
chunksRef.current = [];
// Reset audio to start
audioRef.current.pause();
audioRef.current.currentTime = 0;
// Capture Canvas Stream at 60fps
const canvasStream = canvasRef.current.captureStream(60);
// Get Audio Stream from destination
const audioStream = destRef.current.stream;
// Combine Streams
const combinedTracks = [...canvasStream.getTracks(), ...audioStream.getAudioTracks()];
const combinedStream = new MediaStream(combinedTracks);
// Setup MediaRecorder for Transparent Video (WebM VP9/VP8)
let options = { mimeType: 'video/webm; codecs=vp9' };
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
options = { mimeType: 'video/webm; codecs=vp8' }; // Fallback
}
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
alert("Your browser does not support transparent video export (WebM with VP8/VP9 codecs). Export will proceed but may not be transparent.");
options = {}; // Use browser default
}
try {
mediaRecorderRef.current = new MediaRecorder(combinedStream, options);
} catch (e) {
console.error(e);
alert("Error starting video recorder. See console.");
setIsExportingVideo(false);
return;
}
mediaRecorderRef.current.ondataavailable = (e) => {
if (e.data && e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
mediaRecorderRef.current.onstop = () => {
const blob = new Blob(chunksRef.current, { type: mediaRecorderRef.current.mimeType });
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.download = `visualizer_4k_${Date.now()}.webm`;
link.href = url;
link.click();
URL.revokeObjectURL(url);
setIsExportingVideo(false);
setExportProgress(0);
};
// Progress timer
const duration = audioRef.current.duration;
const progressInterval = setInterval(() => {
if (audioRef.current && !audioRef.current.paused) {
setExportProgress((audioRef.current.currentTime / duration) * 100);
} else {
clearInterval(progressInterval);
}
}, 500);
// Start Recording & Playback
mediaRecorderRef.current.start(100); // collect data every 100ms
await audioRef.current.play();
setIsPlaying(true);
};
const stopVideoExport = () => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
mediaRecorderRef.current.stop();
}
audioRef.current.pause();
setIsPlaying(false);
};
return (
<div className="min-h-screen bg-slate-950 text-slate-200 font-sans selection:bg-cyan-500/30">
{/* Header */}
<header className="border-b border-slate-800 bg-slate-900/50 p-6 flex items-center justify-between">
<div className="flex items-center gap-3">
<div className="bg-cyan-500/20 p-2 rounded-lg">
<Video className="w-6 h-6 text-cyan-400" />
</div>
<h1 className="text-xl font-bold tracking-tight text-white">4K Transparent Visualizer</h1>
</div>
<div className="text-sm text-slate-400 hidden sm:block">
All processing is strictly local.
</div>
</header>
<main className="container mx-auto p-6 grid lg:grid-cols-12 gap-8">
{/* Left Column: Controls */}
<div className="lg:col-span-4 space-y-6">
{/* Upload Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4 flex items-center gap-2">
<Upload className="w-4 h-4" /> Audio Input
</h2>
<label className="block w-full cursor-pointer bg-slate-800 hover:bg-slate-700 transition-colors border-2 border-dashed border-slate-600 rounded-xl p-8 text-center group">
<input
type="file"
accept="audio/*"
onChange={handleFileUpload}
className="hidden"
disabled={isExportingVideo}
/>
<div className="mx-auto w-12 h-12 bg-slate-900 rounded-full flex items-center justify-center mb-3 group-hover:scale-110 transition-transform">
<Upload className="w-6 h-6 text-cyan-400" />
</div>
<p className="font-medium text-slate-300">
{fileName ? fileName : 'Click to browse audio file'}
</p>
<p className="text-xs text-slate-500 mt-2">MP3, WAV, FLAC</p>
</label>
{/* Hidden Audio Element */}
<audio
ref={audioRef}
src={audioSrc}
onEnded={handleAudioEnded}
onPlay={() => setIsPlaying(true)}
onPause={() => setIsPlaying(false)}
/>
{/* Playback Controls */}
{audioSrc && (
<div className="mt-4 flex gap-3">
<button
onClick={togglePlay}
disabled={isExportingVideo}
className="flex-1 bg-cyan-500 hover:bg-cyan-400 text-slate-950 font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors disabled:opacity-50"
>
{isPlaying ? <Pause className="w-5 h-5" /> : <Play className="w-5 h-5" />}
{isPlaying ? 'Pause' : 'Play Audio'}
</button>
</div>
)}
</section>
{/* Settings Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4 flex items-center gap-2">
<Settings2 className="w-4 h-4" /> Visual Settings
</h2>
<div className="space-y-5">
{/* Type */}
<div>
<label className="block text-sm font-medium text-slate-400 mb-2">Style</label>
<div className="grid grid-cols-3 gap-2">
{['bars', 'wave', 'circle'].map(type => (
<button
key={type}
onClick={() => setVizType(type)}
className={`py-2 px-3 rounded-lg text-sm font-medium capitalize transition-all ${
vizType === type
? 'bg-slate-700 text-white shadow-inner border border-slate-600'
: 'bg-slate-950 text-slate-400 border border-slate-800 hover:border-slate-600'
}`}
>
{type}
</button>
))}
</div>
</div>
{/* Color & Style */}
<div>
<div className="flex justify-between items-center mb-2">
<label className="text-sm font-medium text-slate-400">Color Style</label>
<select
value={colorMode}
onChange={(e) => setColorMode(e.target.value)}
className="bg-slate-950 border border-slate-700 text-slate-300 text-xs rounded px-2 py-1 outline-none"
>
<option value="solid">Solid</option>
<option value="gradient">Gradient</option>
<option value="rainbow">Rainbow</option>
</select>
</div>
<div className="flex items-center gap-3">
{colorMode !== 'rainbow' && (
<input
type="color"
value={color}
onChange={(e) => setColor(e.target.value)}
className="h-10 w-14 rounded cursor-pointer bg-slate-950 border border-slate-700 shrink-0"
/>
)}
{colorMode === 'gradient' && (
<>
<span className="text-slate-500 text-xs font-medium">to</span>
<input
type="color"
value={color2}
onChange={(e) => setColor2(e.target.value)}
className="h-10 w-14 rounded cursor-pointer bg-slate-950 border border-slate-700 shrink-0"
/>
</>
)}
{colorMode === 'solid' && (
<input
type="text"
value={color}
onChange={(e) => setColor(e.target.value)}
className="flex-1 bg-slate-950 border border-slate-800 rounded-lg px-3 py-2 text-sm focus:ring-1 focus:ring-cyan-500 outline-none uppercase font-mono"
/>
)}
</div>
</div>
{/* Glow Effect */}
<div className="flex items-center justify-between bg-slate-950 p-3 rounded-xl border border-slate-800">
<div className="flex items-center gap-2">
<Sparkles className="w-4 h-4 text-amber-400" />
<span className="text-sm font-medium text-slate-300">Neon Glow Effect</span>
</div>
<label className="relative inline-flex items-center cursor-pointer">
<input type="checkbox" checked={glow} onChange={(e) => setGlow(e.target.checked)} className="sr-only peer" />
<div className="w-11 h-6 bg-slate-700 peer-focus:outline-none rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-cyan-500"></div>
</label>
</div>
{/* Thickness */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Line Thickness</span>
<span className="text-slate-500">{thickness}px</span>
</label>
<input
type="range"
min="2" max="64"
value={thickness}
onChange={(e) => setThickness(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Spacing */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Space Between Lines</span>
<span className="text-slate-500">{spacing}px</span>
</label>
<input
type="range"
min="0" max="64"
value={spacing}
onChange={(e) => setSpacing(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Amplitude (Sensitivity) */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Amplitude (Height)</span>
<span className="text-slate-500">{sensitivity.toFixed(1)}x</span>
</label>
<input
type="range"
min="0.5" max="3.0" step="0.1"
value={sensitivity}
onChange={(e) => setSensitivity(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Smoothing */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Motion Smoothing</span>
<span className="text-slate-500">{Math.round(smoothing * 100)}%</span>
</label>
<input
type="range"
min="0.1" max="0.99" step="0.01"
value={smoothing}
onChange={(e) => setSmoothing(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Transform Settings */}
<div className="pt-4 mt-4 border-t border-slate-800 space-y-5">
<h3 className="text-xs font-semibold uppercase tracking-wider text-slate-500 mb-2">Transform & Position</h3>
{/* Scale */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Size (Scale)</span>
<span className="text-slate-500">{scale.toFixed(2)}x</span>
</label>
<input
type="range"
min="0.1" max="3.0" step="0.1"
value={scale}
onChange={(e) => setScale(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Rotation */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Rotation</span>
<span className="text-slate-500">{rotation}°</span>
</label>
<input
type="range"
min="0" max="360" step="1"
value={rotation}
onChange={(e) => setRotation(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Offset X */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Horizontal Position</span>
<span className="text-slate-500">{offsetX}%</span>
</label>
<input
type="range"
min="-50" max="50" step="1"
value={offsetX}
onChange={(e) => setOffsetX(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
{/* Offset Y */}
<div>
<label className="flex justify-between text-sm font-medium text-slate-400 mb-2">
<span>Vertical Position</span>
<span className="text-slate-500">{offsetY}%</span>
</label>
<input
type="range"
min="-50" max="50" step="1"
value={offsetY}
onChange={(e) => setOffsetY(Number(e.target.value))}
className="w-full accent-cyan-500 cursor-pointer"
/>
</div>
</div>
</div>
</section>
{/* Advanced Layout Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4 flex items-center gap-2">
<Monitor className="w-4 h-4" /> Output Setup
</h2>
<div className="space-y-5">
{/* Resolution */}
<div>
<label className="block text-sm font-medium text-slate-400 mb-2">Aspect Ratio & Resolution</label>
<select
value={resolution}
onChange={(e) => setResolution(e.target.value)}
className="w-full bg-slate-950 border border-slate-700 text-slate-300 text-sm rounded-lg px-3 py-2.5 outline-none focus:ring-1 focus:ring-cyan-500"
>
<option value="4k_16_9">4K Landscape (3840x2160)</option>
<option value="1080p_16_9">1080p Landscape (1920x1080)</option>
<option value="4k_9_16">4K Vertical / Reels (2160x3840)</option>
<option value="1080p_9_16">1080p Vertical / Reels (1080x1920)</option>
</select>
</div>
{/* Background */}
<div>
<label className="block text-sm font-medium text-slate-400 mb-2">Background Type</label>
<div className="flex gap-2 mb-3">
{['transparent', 'color', 'image'].map(type => (
<button
key={type}
onClick={() => setBgType(type)}
className={`flex-1 py-2 px-2 rounded-lg text-xs font-medium capitalize transition-all ${
bgType === type
? 'bg-slate-700 text-white shadow-inner border border-slate-600'
: 'bg-slate-950 text-slate-400 border border-slate-800 hover:border-slate-600'
}`}
>
{type}
</button>
))}
</div>
{bgType === 'color' && (
<div className="flex items-center gap-3 mt-2 bg-slate-950 p-2 rounded-lg border border-slate-800">
<input type="color" value={bgColor} onChange={(e) => setBgColor(e.target.value)} className="h-8 w-12 rounded cursor-pointer bg-slate-950 border border-slate-700" />
<span className="text-sm font-mono text-slate-400 uppercase">{bgColor}</span>
</div>
)}
{bgType === 'image' && (
<div className="mt-2">
<label className="flex items-center justify-center gap-2 w-full cursor-pointer bg-slate-950 hover:bg-slate-800 transition-colors border border-dashed border-slate-600 rounded-lg p-3 text-center text-sm text-slate-300">
<ImagePlus className="w-4 h-4" />
{bgImageSrc ? 'Change Image' : 'Upload Background Image'}
<input type="file" accept="image/*" onChange={handleBgUpload} className="hidden" />
</label>
</div>
)}
</div>
</div>
</section>
{/* Export Section */}
<section className="bg-slate-900 p-6 rounded-2xl border border-slate-800 shadow-xl">
<h2 className="text-sm font-semibold uppercase tracking-wider text-slate-500 mb-4">Export Options</h2>
<div className="grid grid-cols-2 gap-3">
<button
onClick={exportImage}
disabled={isExportingVideo}
className="col-span-2 bg-slate-800 hover:bg-slate-700 text-white font-medium py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors disabled:opacity-50"
>
<ImageIcon className="w-4 h-4" />
Save Snapshot (PNG)
</button>
{isExportingVideo ? (
<div className="col-span-2 space-y-3">
<button
onClick={stopVideoExport}
className="w-full bg-red-500/10 hover:bg-red-500/20 text-red-500 border border-red-500/20 font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-colors"
>
<StopCircle className="w-5 h-5" />
Stop & Save
</button>
<div className="w-full bg-slate-950 rounded-full h-2.5 border border-slate-800 overflow-hidden">
<div className="bg-cyan-500 h-2.5 rounded-full transition-all duration-300" style={{ width: `${exportProgress}%` }}></div>
</div>
<p className="text-xs text-center text-slate-400">Recording transparent 4K video... {Math.round(exportProgress)}%</p>
</div>
) : (
<button
onClick={startVideoExport}
disabled={!audioSrc}
className="col-span-2 bg-gradient-to-r from-indigo-500 to-cyan-500 hover:from-indigo-400 hover:to-cyan-400 text-white font-bold py-3 px-4 rounded-xl flex items-center justify-center gap-2 transition-all shadow-lg shadow-cyan-500/20 disabled:opacity-50 disabled:shadow-none"
>
<Video className="w-5 h-5" />
Export 4K Video (WebM)
</button>
)}
</div>
<p className="text-xs text-slate-500 mt-4 leading-relaxed">
* Video export records in real-time. Background will be transparent. WebM VP9 format is used for alpha channel support.
</p>
</section>
</div>
{/* Right Column: Preview */}
<div className="lg:col-span-8 flex flex-col">
<div className="bg-slate-900 rounded-2xl border border-slate-800 shadow-xl overflow-hidden flex-1 relative flex flex-col">
<div className="p-4 border-b border-slate-800 bg-slate-900/80 flex justify-between items-center z-10">
<span className="text-sm font-semibold text-slate-300 flex items-center gap-2">
Live Preview
<span className="bg-slate-800 text-xs px-2 py-0.5 rounded text-slate-400 border border-slate-700">
{RESOLUTIONS[resolution]?.w}x{RESOLUTIONS[resolution]?.h}
</span>
</span>
<span className="text-xs text-slate-500">
{bgType === 'transparent' ? 'Checkerboard denotes transparency' : 'Background included in export'}
</span>
</div>
{/* Checkerboard Background for Transparency check */}
<div
className="flex-1 w-full relative flex items-center justify-center p-4 sm:p-8 overflow-hidden bg-black/50"
style={ bgType === 'transparent' ? {
backgroundImage: 'repeating-linear-gradient(45deg, #0f172a 25%, transparent 25%, transparent 75%, #0f172a 75%, #0f172a), repeating-linear-gradient(45deg, #0f172a 25%, #1e293b 25%, #1e293b 75%, #0f172a 75%, #0f172a)',
backgroundPosition: '0 0, 10px 10px',
backgroundSize: '20px 20px'
} : {}}
>
{/* Actual Canvas */}
<canvas
ref={canvasRef}
width={RESOLUTIONS[resolution]?.w || 3840}
height={RESOLUTIONS[resolution]?.h || 2160}
className={`max-w-full max-h-full object-contain drop-shadow-2xl rounded-lg ring-1 ring-white/10 bg-transparent ${RESOLUTIONS[resolution]?.isVertical ? 'aspect-[9/16]' : 'aspect-video'}`}
/>
{!audioSrc && (
<div className="absolute inset-0 flex flex-col items-center justify-center pointer-events-none bg-slate-900/60 backdrop-blur-sm z-20">
<Loader2 className="w-12 h-12 text-slate-500 animate-spin mb-4 opacity-50" />
<p className="text-slate-400 font-medium">Awaiting Audio Input</p>
</div>
)}
</div>
</div>
</div>
</main>
</div>
);
}