import React, { useState, useRef, useEffect } from 'react'; import { PipelineTrace } from '../components/ui/index'; interface Props { aiStatus: any; } interface Msg { id: string; role: 'user' | 'assistant' | 'system'; text: string; pipeline?: any[]; actions?: any[]; ts: Date; } const SUGGESTIONS = ['What is my balance?', 'Send 5 SOL to alice.sol', 'Show recent transactions', 'Help me understand gas fees']; export default function VoicePage({ aiStatus }: Props) { const [msgs, setMsgs] = useState([ { id: '0', role: 'assistant', text: "I'm your SolVox AI assistant — running 100% locally via 6 QVAC modules. Ask me to send tokens, check balances, search transactions, or explain anything about Solana.", pipeline: [], actions: [], ts: new Date() }, ]); const [input, setInput] = useState(''); const [recording, setRecording] = useState(false); const [processing, setProcessing] = useState(false); const [wave, setWave] = useState(new Array(40).fill(1)); const recRef = useRef(null); const chunks = useRef([]); const endRef = useRef(null); const analyser = useRef(null); const raf = useRef(null); useEffect(() => { endRef.current?.scrollIntoView({ behavior: 'smooth' }); }, [msgs]); const startRec = async () => { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: { sampleRate: 16000, channelCount: 1, echoCancellation: true, noiseSuppression: true } }); const ctx = new AudioContext(); const src = ctx.createMediaStreamSource(stream); const a = ctx.createAnalyser(); a.fftSize = 128; src.connect(a); analyser.current = a; const tick = () => { if (!analyser.current) return; const d = new Uint8Array(analyser.current.frequencyBinCount); analyser.current.getByteFrequencyData(d); setWave(Array.from(d).slice(0, 40).map(v => Math.max(1, v / 8))); raf.current = requestAnimationFrame(tick); }; tick(); const rec = new MediaRecorder(stream, { mimeType: 'audio/webm' }); chunks.current = []; rec.ondataavailable = e => { if (e.data.size > 0) chunks.current.push(e.data); }; rec.onstop = async () => { stream.getTracks().forEach(t => t.stop()); if (raf.current) cancelAnimationFrame(raf.current); setWave(new Array(40).fill(1)); const b = new Blob(chunks.current, { type: 'audio/webm' }); processVoice(await b.arrayBuffer()); }; recRef.current = rec; rec.start(); setRecording(true); } catch { add('system', 'Microphone access denied.'); } }; const stopRec = () => { if (recRef.current && recording) { recRef.current.stop(); setRecording(false); } }; const processVoice = async (data: ArrayBuffer) => { setProcessing(true); try { if (window.solvox) { const r = await window.solvox.ai.processVoice(data); if (r.success) { add('user', r.transcription || '[voice]'); add('assistant', r.agentResult?.response || 'Done.', r.pipelineSteps, r.agentResult?.actions); if (r.responseAudio) { const u = URL.createObjectURL(new Blob([r.responseAudio], { type: 'audio/wav' })); new Audio(u).play().catch(() => {}); } } else add('system', r.error || 'Voice processing failed'); } else { add('user', '[voice — dev]'); add('assistant', 'QVAC models needed. Type commands instead.'); } } catch (e: any) { add('system', e.message); } setProcessing(false); }; const handleSend = async () => { if (!input.trim()) return; const text = input.trim(); setInput(''); add('user', text); setProcessing(true); try { if (window.solvox) { const r = await window.solvox.ai.chat(text); if (r.success) add('assistant', r.response || '', r.pipelineSteps, r.actions); else add('assistant', r.error || 'Could not process.'); } else add('assistant', `[Dev] "${text}" — needs QVAC models.`); } catch (e: any) { add('system', e.message); } setProcessing(false); }; const add = (role: Msg['role'], text: string, pipeline?: any[], actions?: any[]) => { setMsgs(p => [...p, { id: Date.now().toString(36), role, text, pipeline, actions, ts: new Date() }]); }; const mods = aiStatus ? [aiStatus.llm, aiStatus.transcription, aiStatus.tts, aiStatus.embed, aiStatus.translation, aiStatus.ocr].filter(Boolean).length : 0; return (
{/* Header */}

Voice AI Assistant

Powered by 6 QVAC packages · All inference local

= 4 ? 'badge-pill-green' : mods > 0 ? 'badge-pill-blue' : ''}`}> {mods >= 4 ? 'AI ONLINE' : mods > 0 ? 'PARTIAL' : 'LOADING'}
{/* Chat */}
{msgs.map(m => (
{m.role === 'assistant' &&
SolVox AI
}

{m.text}

{m.actions && m.actions.length > 0 && (
{m.actions.map((a: any, i: number) => ( {a.tool} ))}
)} {m.pipeline && m.pipeline.length > 0 && }
{m.ts.toLocaleTimeString()}
))} {processing && (
Processing locally…
)}
{/* Suggestions */} {msgs.length <= 2 && !recording && (
{SUGGESTIONS.map(s => ( ))}
)} {/* Waveform */} {recording && (
{wave.map((h, i) => (
))}
)} {/* Input */}
setInput(e.target.value)} onKeyDown={e => e.key === 'Enter' && handleSend()} placeholder={recording ? 'Listening…' : 'Type a command or hold mic…'} className="input-field pr-20" disabled={processing || recording} />
); }