import { useEffect, useState, Suspense, useRef } from 'react'; import { useNavigate } from 'react-router-dom'; import { motion, AnimatePresence } from 'motion/react'; import { CheckCircle2, Sparkles, AlertCircle, AlertTriangle, ArrowLeft, Bot } from 'lucide-react'; import { Button } from '@/components/ui/button'; import { Card } from '@/components/ui/card'; import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'; import { cn } from '@/lib/utils'; import { useStageStore } from '@/lib/store/stage'; import { useSettingsStore } from '@/lib/store/settings'; import { useAgentRegistry } from '@/lib/orchestration/registry/store'; import { getAvailableProvidersWithVoices } from '@/lib/audio/voice-resolver'; import { getVoxCPMProviderOptions, useVoxCPMVoiceProfiles } from '@/lib/audio/voxcpm-voices'; import { useI18n } from '@/lib/hooks/use-i18n'; import { loadImageMapping, loadPdfBlob, cleanupOldImages, storeImages, } from '@/lib/utils/image-storage'; import { getCurrentModelConfig } from '@/lib/utils/model-config'; import { db } from '@/lib/utils/database'; import { MAX_PDF_CONTENT_CHARS, MAX_VISION_IMAGES } from '@/lib/constants/generation'; import { nanoid } from 'nanoid'; import type { Stage } from '@/lib/types/stage'; import type { SceneOutline, PdfImage, ImageMapping } from '@/lib/types/generation'; import { AgentRevealModal } from '@/components/agent/agent-reveal-modal'; import { createLogger } from '@/lib/logger'; import { type GenerationSessionState, ALL_STEPS, getActiveSteps } from './types'; import { StepVisualizer } from './components/visualizers'; const log = createLogger('GenerationPreview'); function GenerationPreviewContent() { const navigate = useNavigate(); const { t } = useI18n(); const hasStartedRef = useRef(false); const abortControllerRef = useRef(null); const { profiles: voxcpmProfiles } = useVoxCPMVoiceProfiles(); const [session, setSession] = useState(null); const [sessionLoaded, setSessionLoaded] = useState(false); const [error, setError] = useState(null); const [currentStepIndex, setCurrentStepIndex] = useState(0); const [isComplete] = useState(false); const [statusMessage, setStatusMessage] = useState(''); const [streamingOutlines, setStreamingOutlines] = useState(null); const [truncationWarnings, setTruncationWarnings] = useState([]); const [webSearchSources, setWebSearchSources] = useState>( [], ); const [showAgentReveal, setShowAgentReveal] = useState(false); const [generatedAgents, setGeneratedAgents] = useState< Array<{ id: string; name: string; role: string; persona: string; avatar: string; color: string; priority: number; }> >([]); const agentRevealResolveRef = useRef<(() => void) | null>(null); // Compute active steps based on session state const activeSteps = getActiveSteps(session); // Load session from sessionStorage useEffect(() => { cleanupOldImages(24).catch((e) => log.error(e)); const saved = sessionStorage.getItem('generationSession'); if (saved) { try { const parsed = JSON.parse(saved) as GenerationSessionState; setSession(parsed); } catch (e) { log.error('Failed to parse generation session:', e); } } setSessionLoaded(true); }, []); // Abort all in-flight requests on unmount useEffect(() => { return () => { abortControllerRef.current?.abort(); }; }, []); // Get API credentials from localStorage const getApiHeaders = () => { const modelConfig = getCurrentModelConfig(); const settings = useSettingsStore.getState(); const imageProviderConfig = settings.imageProvidersConfig?.[settings.imageProviderId]; const videoProviderConfig = settings.videoProvidersConfig?.[settings.videoProviderId]; return { 'Content-Type': 'application/json', 'x-model': modelConfig.modelString, 'x-api-key': modelConfig.apiKey, 'x-base-url': modelConfig.baseUrl, 'x-provider-type': modelConfig.providerType || '', // Image generation provider 'x-image-provider': settings.imageProviderId || '', 'x-image-model': settings.imageModelId || '', 'x-image-api-key': imageProviderConfig?.apiKey || '', 'x-image-base-url': imageProviderConfig?.baseUrl || '', // Video generation provider 'x-video-provider': settings.videoProviderId || '', 'x-video-model': settings.videoModelId || '', 'x-video-api-key': videoProviderConfig?.apiKey || '', 'x-video-base-url': videoProviderConfig?.baseUrl || '', // Media generation toggles 'x-image-generation-enabled': String(settings.imageGenerationEnabled ?? false), 'x-video-generation-enabled': String(settings.videoGenerationEnabled ?? false), }; }; const withThinkingConfig = >(body: T) => { const { thinkingConfig } = getCurrentModelConfig(); return thinkingConfig ? { ...body, thinkingConfig } : body; }; // Auto-start generation when session is loaded useEffect(() => { if (session && !hasStartedRef.current) { hasStartedRef.current = true; startGeneration(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [session]); // Main generation flow const startGeneration = async () => { if (!session) return; // Create AbortController for this generation run abortControllerRef.current?.abort(); const controller = new AbortController(); abortControllerRef.current = controller; const signal = controller.signal; // Use a local mutable copy so we can update it after PDF parsing let currentSession = session; setError(null); setCurrentStepIndex(0); try { // Compute active steps for this session (recomputed after session mutations) let activeSteps = getActiveSteps(currentSession); // Determine if we need the PDF analysis step const hasPdfToAnalyze = !!currentSession.pdfStorageKey && !currentSession.pdfText; // If no PDF to analyze, skip to the next available step if (!hasPdfToAnalyze) { const firstNonPdfIdx = activeSteps.findIndex((s) => s.id !== 'pdf-analysis'); setCurrentStepIndex(Math.max(0, firstNonPdfIdx)); } // Step 0: Parse PDF if needed if (hasPdfToAnalyze) { log.debug('=== Generation Preview: Parsing PDF ==='); const pdfBlob = await loadPdfBlob(currentSession.pdfStorageKey!); if (!pdfBlob) { throw new Error(t('generation.pdfLoadFailed')); } // Ensure pdfBlob is a valid Blob with content if (!(pdfBlob instanceof Blob) || pdfBlob.size === 0) { log.error('Invalid PDF blob:', { type: typeof pdfBlob, size: pdfBlob instanceof Blob ? pdfBlob.size : 'N/A', }); throw new Error(t('generation.pdfLoadFailed')); } // Wrap as a File to guarantee multipart/form-data with correct content-type const pdfFile = new File([pdfBlob], currentSession.pdfFileName || 'document.pdf', { type: 'application/pdf', }); const parseFormData = new FormData(); parseFormData.append('pdf', pdfFile); if (currentSession.pdfProviderId) { parseFormData.append('providerId', currentSession.pdfProviderId); } if (currentSession.pdfProviderConfig?.apiKey?.trim()) { parseFormData.append('apiKey', currentSession.pdfProviderConfig.apiKey); } if (currentSession.pdfProviderConfig?.baseUrl?.trim()) { parseFormData.append('baseUrl', currentSession.pdfProviderConfig.baseUrl); } const parseResponse = await fetch('/api/parse-pdf', { method: 'POST', body: parseFormData, signal, }); if (!parseResponse.ok) { const errorData = await parseResponse.json(); throw new Error(errorData.error || t('generation.pdfParseFailed')); } const parseResult = await parseResponse.json(); if (!parseResult.success || !parseResult.data) { throw new Error(t('generation.pdfParseFailed')); } let pdfText = parseResult.data.text as string; // Truncate if needed if (pdfText.length > MAX_PDF_CONTENT_CHARS) { pdfText = pdfText.substring(0, MAX_PDF_CONTENT_CHARS); } // Create image metadata and store images // Prefer metadata.pdfImages (both parsers now return this) const rawPdfImages = parseResult.data.metadata?.pdfImages; const images = rawPdfImages ? rawPdfImages.map( (img: { id: string; src?: string; pageNumber?: number; description?: string; width?: number; height?: number; }) => ({ id: img.id, src: img.src || '', pageNumber: img.pageNumber || 1, description: img.description, width: img.width, height: img.height, }), ) : (parseResult.data.images as string[]).map((src: string, i: number) => ({ id: `img_${i + 1}`, src, pageNumber: 1, })); const imageStorageIds = await storeImages(images); const pdfImages: PdfImage[] = images.map( ( img: { id: string; src: string; pageNumber: number; description?: string; width?: number; height?: number; }, i: number, ) => ({ id: img.id, src: '', pageNumber: img.pageNumber, description: img.description, width: img.width, height: img.height, storageId: imageStorageIds[i], }), ); // Update session with parsed PDF data const updatedSession = { ...currentSession, pdfText, pdfImages, imageStorageIds, pdfStorageKey: undefined, // Clear so we don't re-parse }; setSession(updatedSession); sessionStorage.setItem('generationSession', JSON.stringify(updatedSession)); // Truncation warnings const warnings: string[] = []; if ((parseResult.data.text as string).length > MAX_PDF_CONTENT_CHARS) { warnings.push(t('generation.textTruncated', { n: MAX_PDF_CONTENT_CHARS })); } if (images.length > MAX_VISION_IMAGES) { warnings.push( t('generation.imageTruncated', { total: images.length, max: MAX_VISION_IMAGES }), ); } if (warnings.length > 0) { setTruncationWarnings(warnings); } // Reassign local reference for subsequent steps currentSession = updatedSession; activeSteps = getActiveSteps(currentSession); } // Step: Web Search (if enabled) const webSearchStepIdx = activeSteps.findIndex((s) => s.id === 'web-search'); if (currentSession.requirements.webSearch && webSearchStepIdx >= 0) { setCurrentStepIndex(webSearchStepIdx); setWebSearchSources([]); const wsSettings = useSettingsStore.getState(); const wsApiKey = wsSettings.webSearchProvidersConfig?.[wsSettings.webSearchProviderId]?.apiKey; const res = await fetch('/api/web-search', { method: 'POST', headers: getApiHeaders(), body: JSON.stringify( withThinkingConfig({ query: currentSession.requirements.requirement, pdfText: currentSession.pdfText || undefined, apiKey: wsApiKey || undefined, }), ), signal, }); if (!res.ok) { const data = await res.json().catch(() => ({ error: 'Web search failed' })); throw new Error(data.error || t('generation.webSearchFailed')); } const searchData = await res.json(); const sources = (searchData.sources || []).map((s: { title: string; url: string }) => ({ title: s.title, url: s.url, })); setWebSearchSources(sources); const updatedSessionWithSearch = { ...currentSession, researchContext: searchData.context || '', researchSources: sources, }; setSession(updatedSessionWithSearch); sessionStorage.setItem('generationSession', JSON.stringify(updatedSessionWithSearch)); currentSession = updatedSessionWithSearch; activeSteps = getActiveSteps(currentSession); } // Load imageMapping early (needed for both outline and scene generation) let imageMapping: ImageMapping = {}; if (currentSession.imageStorageIds && currentSession.imageStorageIds.length > 0) { log.debug('Loading images from IndexedDB'); imageMapping = await loadImageMapping(currentSession.imageStorageIds); } else if ( currentSession.imageMapping && Object.keys(currentSession.imageMapping).length > 0 ) { log.debug('Using imageMapping from session (old format)'); imageMapping = currentSession.imageMapping; } // Create stage client-side const stageId = nanoid(10); const stage: Stage = { id: stageId, name: extractTopicFromRequirement(currentSession.requirements.requirement), description: '', style: 'professional', createdAt: Date.now(), updatedAt: Date.now(), interactiveMode: !!currentSession.requirements.interactiveMode, }; // ── Generate outlines first (infers languageDirective) ── let outlines = currentSession.sceneOutlines; let languageDirective: string | undefined; const outlineStepIdx = activeSteps.findIndex((s) => s.id === 'outline'); setCurrentStepIndex(outlineStepIdx >= 0 ? outlineStepIdx : 0); if (!outlines || outlines.length === 0) { log.debug('=== Generating outlines (SSE) ==='); setStreamingOutlines([]); const outlineResult = await new Promise<{ outlines: SceneOutline[]; languageDirective: string; }>((resolve, reject) => { const collected: SceneOutline[] = []; let directive: string | undefined; fetch('/api/generate/scene-outlines-stream', { method: 'POST', headers: getApiHeaders(), body: JSON.stringify( withThinkingConfig({ requirements: currentSession.requirements, pdfText: currentSession.pdfText, pdfImages: currentSession.pdfImages, imageMapping, researchContext: currentSession.researchContext, }), ), signal, }) .then((res) => { if (!res.ok) { return res.json().then((d) => { reject(new Error(d.error || t('generation.outlineGenerateFailed'))); }); } const reader = res.body?.getReader(); if (!reader) { reject(new Error(t('generation.streamNotReadable'))); return; } const decoder = new TextDecoder(); let sseBuffer = ''; const pump = (): Promise => reader.read().then(({ done, value }) => { if (value) { sseBuffer += decoder.decode(value, { stream: !done }); const lines = sseBuffer.split('\n'); sseBuffer = lines.pop() || ''; for (const line of lines) { if (!line.startsWith('data: ')) continue; try { const evt = JSON.parse(line.slice(6)); if (evt.type === 'languageDirective') { directive = evt.data; } else if (evt.type === 'outline') { collected.push(evt.data); setStreamingOutlines([...collected]); } else if (evt.type === 'retry') { collected.length = 0; setStreamingOutlines([]); setStatusMessage(t('generation.outlineRetrying')); } else if (evt.type === 'done') { directive = evt.languageDirective || directive; resolve({ outlines: evt.outlines || collected, languageDirective: directive || 'Teach in the language that matches the user requirement.', }); return; } else if (evt.type === 'error') { reject(new Error(evt.error)); return; } } catch (e) { log.error('Failed to parse outline SSE:', line, e); } } } if (done) { if (collected.length > 0) { resolve({ outlines: collected, languageDirective: directive || 'Teach in the language that matches the user requirement.', }); } else { reject(new Error(t('generation.outlineEmptyResponse'))); } return; } return pump(); }); pump().catch(reject); }) .catch(reject); }); outlines = outlineResult.outlines; languageDirective = outlineResult.languageDirective; // Store languageDirective on the stage stage.languageDirective = languageDirective; const updatedSession = { ...currentSession, sceneOutlines: outlines, languageDirective, }; setSession(updatedSession); sessionStorage.setItem('generationSession', JSON.stringify(updatedSession)); // Outline generation succeeded — clear homepage draft cache try { localStorage.removeItem('requirementDraft'); } catch { /* ignore */ } // Brief pause to let user see the final outline state await new Promise((resolve) => setTimeout(resolve, 800)); } // ── Agent generation (after outlines — uses languageDirective + outlines) ── const settings = useSettingsStore.getState(); let agents: Array<{ id: string; name: string; role: string; persona?: string; }> = []; if (settings.agentMode === 'auto') { const agentStepIdx = activeSteps.findIndex((s) => s.id === 'agent-generation'); if (agentStepIdx >= 0) setCurrentStepIndex(agentStepIdx); try { const allAvatars = [ { path: '/avatars/teacher.png', desc: 'Male teacher with glasses, holding a book, green background', }, { path: '/avatars/teacher-2.png', desc: 'Female teacher with long dark hair, blue traditional outfit, gentle expression', }, { path: '/avatars/assist.png', desc: 'Young female assistant with glasses, pink background, friendly smile', }, { path: '/avatars/assist-2.png', desc: 'Young female in orange top and purple overalls, cheerful and approachable', }, { path: '/avatars/clown.png', desc: 'Energetic girl with glasses pointing up, green shirt, lively and fun', }, { path: '/avatars/clown-2.png', desc: 'Playful girl with curly hair doing rock gesture, blue shirt, humorous vibe', }, { path: '/avatars/curious.png', desc: 'Surprised boy with glasses, hand on cheek, curious expression', }, { path: '/avatars/curious-2.png', desc: 'Boy with backpack holding a book and question mark bubble, inquisitive', }, { path: '/avatars/note-taker.png', desc: 'Studious boy with glasses, blue shirt, calm and organized', }, { path: '/avatars/note-taker-2.png', desc: 'Active boy with yellow backpack waving, blue outfit, enthusiastic learner', }, { path: '/avatars/thinker.png', desc: 'Thoughtful girl with hand on chin, purple background, contemplative', }, { path: '/avatars/thinker-2.png', desc: 'Girl reading a book intently, long dark hair, intellectual and focused', }, ]; const getAvailableVoicesForGeneration = () => { const providers = getAvailableProvidersWithVoices( settings.ttsProvidersConfig, voxcpmProfiles, ); return providers.flatMap((p) => p.voices.map((v) => ({ providerId: p.providerId, voiceId: v.id, voiceName: v.name, })), ); }; const agentResp = await fetch('/api/generate/agent-profiles', { method: 'POST', headers: getApiHeaders(), body: JSON.stringify( withThinkingConfig({ stageInfo: { name: stage.name, description: stage.description }, sceneOutlines: outlines.map((o) => ({ title: o.title, description: o.description, })), languageDirective, availableAvatars: allAvatars.map((a) => a.path), avatarDescriptions: allAvatars.map((a) => ({ path: a.path, desc: a.desc })), availableVoices: getAvailableVoicesForGeneration(), }), ), signal, }); if (!agentResp.ok) throw new Error('Agent generation failed'); const agentData = await agentResp.json(); if (!agentData.success) throw new Error(agentData.error || 'Agent generation failed'); // Save to IndexedDB and registry const { saveGeneratedAgents } = await import('@/lib/orchestration/registry/store'); const savedIds = await saveGeneratedAgents(stage.id, agentData.agents); settings.setSelectedAgentIds(savedIds); stage.agentIds = savedIds; // Show card-reveal modal, continue generation once all cards are revealed setGeneratedAgents(agentData.agents); setShowAgentReveal(true); await new Promise((resolve) => { agentRevealResolveRef.current = resolve; }); agents = savedIds .map((id) => useAgentRegistry.getState().getAgent(id)) .filter(Boolean) .map((a) => ({ id: a!.id, name: a!.name, role: a!.role, persona: a!.persona, })); } catch (err: unknown) { log.warn('[Generation] Agent generation failed, falling back to presets:', err); const registry = useAgentRegistry.getState(); const fallbackIds = settings.selectedAgentIds.filter((id) => { const a = registry.getAgent(id); return a && !a.isGenerated; }); agents = fallbackIds .map((id) => registry.getAgent(id)) .filter(Boolean) .map((a) => ({ id: a!.id, name: a!.name, role: a!.role, persona: a!.persona, })); stage.agentIds = fallbackIds; } } else { // Preset mode — use selected agents (include persona) // Filter out stale generated agent IDs that may linger in settings const registry = useAgentRegistry.getState(); const presetAgentIds = settings.selectedAgentIds.filter((id) => { const a = registry.getAgent(id); return a && !a.isGenerated; }); agents = presetAgentIds .map((id) => registry.getAgent(id)) .filter(Boolean) .map((a) => ({ id: a!.id, name: a!.name, role: a!.role, persona: a!.persona, })); stage.agentIds = presetAgentIds; } // Move to scene generation step setStatusMessage(''); if (!outlines || outlines.length === 0) { throw new Error(t('generation.outlineEmptyResponse')); } // Store stage and outlines const store = useStageStore.getState(); store.setStage(stage); store.setOutlines(outlines); // Advance to slide-content step const contentStepIdx = activeSteps.findIndex((s) => s.id === 'slide-content'); if (contentStepIdx >= 0) setCurrentStepIndex(contentStepIdx); // Build stageInfo and userProfile for API call const stageInfo = { name: stage.name, description: stage.description, style: stage.style, }; const userProfile = currentSession.requirements.userNickname || currentSession.requirements.userBio ? `Student: ${currentSession.requirements.userNickname || 'Unknown'}${currentSession.requirements.userBio ? ` — ${currentSession.requirements.userBio}` : ''}` : undefined; // Generate ONLY the first scene store.setGeneratingOutlines(outlines); const firstOutline = outlines[0]; // Step 2: Generate content (currentStepIndex is already 2) const contentResp = await fetch('/api/generate/scene-content', { method: 'POST', headers: getApiHeaders(), body: JSON.stringify( withThinkingConfig({ outline: firstOutline, allOutlines: outlines, pdfImages: currentSession.pdfImages, imageMapping, stageInfo, stageId: stage.id, agents, languageDirective, }), ), signal, }); if (!contentResp.ok) { const errorData = await contentResp.json().catch(() => ({ error: 'Request failed' })); throw new Error(errorData.error || t('generation.sceneGenerateFailed')); } const contentData = await contentResp.json(); if (!contentData.success || !contentData.content) { throw new Error(contentData.error || t('generation.sceneGenerateFailed')); } // Generate actions (activate actions step indicator) const actionsStepIdx = activeSteps.findIndex((s) => s.id === 'actions'); setCurrentStepIndex(actionsStepIdx >= 0 ? actionsStepIdx : currentStepIndex + 1); const actionsResp = await fetch('/api/generate/scene-actions', { method: 'POST', headers: getApiHeaders(), body: JSON.stringify( withThinkingConfig({ outline: contentData.effectiveOutline || firstOutline, allOutlines: outlines, content: contentData.content, stageId: stage.id, agents, previousSpeeches: [], userProfile, languageDirective, }), ), signal, }); if (!actionsResp.ok) { const errorData = await actionsResp.json().catch(() => ({ error: 'Request failed' })); throw new Error(errorData.error || t('generation.sceneGenerateFailed')); } const data = await actionsResp.json(); if (!data.success || !data.scene) { throw new Error(data.error || t('generation.sceneGenerateFailed')); } // Generate TTS for first scene (part of actions step — blocking) if (settings.ttsEnabled && settings.ttsProviderId !== 'browser-native-tts') { const ttsProviderConfig = settings.ttsProvidersConfig?.[settings.ttsProviderId]; const providerOptions = settings.ttsProviderId === 'voxcpm-tts' ? { ...(ttsProviderConfig?.providerOptions || {}), ...(await getVoxCPMProviderOptions(settings.ttsVoice, { role: 'teacher', language: languageDirective, })), } : undefined; const speechActions = (data.scene.actions || []).filter( (a: { type: string; text?: string }) => a.type === 'speech' && a.text, ); let ttsFailCount = 0; for (const action of speechActions) { const audioId = `tts_${action.id}`; action.audioId = audioId; try { const resp = await fetch('/api/generate/tts', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ text: action.text, audioId, ttsProviderId: settings.ttsProviderId, ttsModelId: ttsProviderConfig?.modelId, ttsVoice: settings.ttsVoice, ttsSpeed: settings.ttsSpeed, ttsApiKey: ttsProviderConfig?.apiKey || undefined, ttsBaseUrl: ttsProviderConfig?.serverBaseUrl || ttsProviderConfig?.baseUrl || ttsProviderConfig?.customDefaultBaseUrl || undefined, ttsProviderOptions: providerOptions, }), signal, }); if (!resp.ok) { ttsFailCount++; continue; } const ttsData = await resp.json(); if (!ttsData.success) { ttsFailCount++; continue; } const binary = atob(ttsData.base64); const bytes = new Uint8Array(binary.length); for (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i); const blob = new Blob([bytes], { type: `audio/${ttsData.format}` }); await db.audioFiles.put({ id: audioId, blob, format: ttsData.format, createdAt: Date.now(), }); } catch (err) { log.warn(`[TTS] Failed for ${audioId}:`, err); ttsFailCount++; } } if (ttsFailCount > 0 && speechActions.length > 0) { throw new Error(t('generation.speechFailed')); } } // Add scene to store and navigate store.addScene(data.scene); store.setCurrentSceneId(data.scene.id); // Set remaining outlines as skeleton placeholders const remaining = outlines.filter((o) => o.order !== data.scene.order); store.setGeneratingOutlines(remaining); // Store generation params for classroom to continue generation sessionStorage.setItem( 'generationParams', JSON.stringify({ pdfImages: currentSession.pdfImages, agents, userProfile, languageDirective, }), ); sessionStorage.removeItem('generationSession'); await store.saveToStorage(); navigate(`/classroom/${stage.id}`); } catch (err) { // AbortError is expected when navigating away — don't show as error if (err instanceof DOMException && err.name === 'AbortError') { log.info('[GenerationPreview] Generation aborted'); return; } sessionStorage.removeItem('generationSession'); setError(err instanceof Error ? err.message : String(err)); } }; const extractTopicFromRequirement = (requirement: string): string => { const trimmed = requirement.trim(); if (trimmed.length <= 500) { return trimmed; } return trimmed.substring(0, 500).trim() + '...'; }; const goBackToHome = () => { abortControllerRef.current?.abort(); sessionStorage.removeItem('generationSession'); navigate('/'); }; // Still loading session from sessionStorage if (!sessionLoaded) { return (
); } // No session found if (!session) { return (

{t('generation.sessionNotFound')}

{t('generation.sessionNotFoundDesc')}

); } const activeStep = activeSteps.length > 0 ? activeSteps[Math.min(currentStepIndex, activeSteps.length - 1)] : ALL_STEPS[0]; return (
{/* Background Decor */}
{/* Back button */}
{/* Progress Dots */}
{activeSteps.map((step, idx) => (
))}
{/* Central Content */}
{/* Icon / Visualizer Container */}
{error ? ( ) : isComplete ? ( ) : ( )}
{/* Text Content */}

{error ? t('generation.generationFailed') : isComplete ? t('generation.generationComplete') : t(activeStep.title)}

{error ? error : isComplete ? t('generation.classroomReady') : statusMessage || t(activeStep.description)}

{/* Truncation warning indicator */} {truncationWarnings.length > 0 && !error && !isComplete && (
{truncationWarnings.map((w, i) => (

{w}

))}
)}
{/* Footer Action */}
{error ? ( ) : !isComplete ? ( {t('generation.aiWorking')} {generatedAgents.length > 0 && !showAgentReveal && ( )} ) : null}
{/* Agent Reveal Modal */} setShowAgentReveal(false)} onAllRevealed={() => { agentRevealResolveRef.current?.(); agentRevealResolveRef.current = null; }} />
); } export default function GenerationPreviewPage() { return (
} > ); }