import { useState, useEffect } from 'react'; import { useSimulation } from '../../hooks/useSimulation'; import { useSimulationState } from '../../store/simulationStore'; const SAMPLE_PROMPTS = [ { title: "1. The VRAM 'Tight-Squeeze' Challenge", prompt: "Our batch size is fixed at 32 for the SLA, but we only have 512MB of VRAM left. Layer 12 is hitting an OOM. Optimize the memory footprint without reducing the batch size.", desc: "Forces the model to ignore the 'easy' batch size fix and instead reach for Gradient Checkpointing or Mixed Precision (FP16) to meet the SLA." }, { title: "2. The Multi-GPU Hallucination Test", prompt: "The training job is failing on a single T4. Can we enable FSDP or move to a multi-node cluster to resolve the memory bottleneck?", desc: "Trap! Model should reject FSDP and propose local optimizations like CPU Offloading or Flash Attention instead." }, { title: "3. The FinOps Budget Crisis", prompt: "We are at $49.50 of our $50.00 budget. The incident is still active. Write a minimal-cost remediation that uses zero additional cloud resources and resolves in under 5 steps.", desc: "Tests FinOps Oracle alignment. Should produce highly compressed M2M syntax and a surgical one-line fix." }, { title: "4. The 'Black-Box' Investigation", prompt: "A custom CUDA kernel is leaking memory in the validation loop. We can't see the kernel code, but we have the telemetry logs. Propose a system-level guard using PyTorch to contain the leak.", desc: "Triggers Detective agent to focus on telemetry and Coder to implement surgical hotfixes." } ]; export default function CommandPrompt({ pendingPrompt, onPendingConsumed }) { const { isRunning, scenarioComplete, scenarioContext } = useSimulationState(); const { orchestrate, stop } = useSimulation(); const [prompt, setPrompt] = useState(""); const isCliDrivenRun = isRunning && scenarioContext?.source === 'inference_cli'; // Auto-fill the textarea when a sample prompt is selected from the overlay useEffect(() => { if (pendingPrompt) { setPrompt(pendingPrompt); if (onPendingConsumed) onPendingConsumed(); } }, [pendingPrompt, onPendingConsumed]); const handleSubmit = () => { if (prompt.trim() && !isRunning && !isCliDrivenRun) { orchestrate(prompt, { customOnly: true }); } }; const handleKeyDown = (e) => { // Submit on Enter without Shift. Add a new line on Shift+Enter. if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSubmit(); } }; return (
This live run is being driven by python inference.py. Stop that terminal run to re-enable the Prompt Sandbox.