/** "Connect an LLM" panel — four endpoints, one model picker. * * Decomposed UX: * - **Endpoint**: flat select over { Ollama, HF Router, OpenAI, Custom }. * - **Model**: render adapts per endpoint. * * Ollama → hard select over locally-installed tags * (server tells us which ones via /interactive/models). * * HF / OpenAI / Custom → free-form text input with a * curated `` of suggestions. Users can type * anything — the suggestions are shortcut clicks, not * a whitelist. * - **Custom base URL**: only shown when endpoint = Custom. * - **API key**: always shown, optional, persisted per base URL. */ import { useEffect, useState } from "react"; import { PhysixInferStatus } from "@/components/PhysixInferStatus"; import { cn } from "@/lib/cn"; import type { LlmModelInfo } from "@/lib/interactiveClient"; import { type Endpoint, type EndpointId, type LlmConnection, ENDPOINTS, connectionForEndpoint, findEndpoint, loadApiKey, saveApiKey, } from "@/lib/llmPresets"; interface LlmConnectionPanelProps { /** Side label ("A", "B", …). */ title: string; /** Sub-label that gives the slot semantic meaning. */ subtitle?: string; value: LlmConnection; onChange: (next: LlmConnection) => void; /** Disable all fields while an episode is running. */ disabled?: boolean; /** Subtle accent colour for the title strip. */ accent?: "primary" | "blue" | "muted"; /** Locally-installed Ollama tags from /interactive/models. Only used * when the user picks the Ollama endpoint; pass [] otherwise. */ installedOllamaModels?: LlmModelInfo[]; /** True while the catalogue is loading. */ installedOllamaLoading?: boolean; /** Optional error string from the lister (Ollama unreachable, etc.). */ installedOllamaError?: string | null; /** Refetch handler so the user can retry without reloading the page. */ onRefreshOllama?: () => void; } export function LlmConnectionPanel({ title, subtitle, value, onChange, disabled, accent = "muted", installedOllamaModels = [], installedOllamaLoading = false, installedOllamaError = null, onRefreshOllama, }: LlmConnectionPanelProps): JSX.Element { const endpoint = findEndpoint(value.endpointId); const isCustom = endpoint.id === "custom"; const [revealKey, setRevealKey] = useState(false); // Hydrate the API key from per-URL storage whenever the base URL the // panel is pointing at changes (endpoint switch, custom URL edit). useEffect(() => { if (!value.baseUrl) return; const stored = loadApiKey(value.baseUrl); if (stored && stored !== value.apiKey) { onChange({ ...value, apiKey: stored }); } // Run only when the base URL changes — keying on `value` would loop. // eslint-disable-next-line react-hooks/exhaustive-deps }, [value.baseUrl]); function setEndpoint(id: EndpointId): void { const next = findEndpoint(id); onChange({ ...connectionForEndpoint(next), apiKey: loadApiKey(next.baseUrl), }); } function setBaseUrl(url: string): void { onChange({ ...value, baseUrl: url, apiKey: loadApiKey(url) }); } function setModel(model: string): void { onChange({ ...value, model }); } function setApiKey(key: string): void { saveApiKey(value.baseUrl, key); onChange({ ...value, apiKey: key }); } return (

Connect an LLM · {title}

{subtitle ? (

{subtitle}

) : (

Point at any OpenAI-compatible{" "} /v1/chat/completions {" "} endpoint.

)}
{isCustom ? ( setBaseUrl(event.target.value)} disabled={disabled} placeholder="https://your-endpoint.example/v1" className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50" /> ) : null} API key {endpoint.needsKey ? "" : "(optional)"} } > setApiKey(event.target.value)} disabled={disabled} placeholder={ endpoint.needsKey ? "Bearer " : "(server will use env var if set)" } className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50" />

{endpoint.hint}

{/* Live status banner for the GPU Space. Only renders for the physix endpoint — every other endpoint either has no sleep cycle (HF Router, OpenAI, Custom) or is local-only (Ollama), so there's nothing to surface. */} {endpoint.id === "physix" ? : null} {endpoint.id === "ollama" && installedOllamaError ? ( ) : null}
); } // --------------------------------------------------------------------- // Model picker — Ollama dropdown vs. free-form input with datalist. // --------------------------------------------------------------------- interface ModelInputProps { endpoint: Endpoint; value: string; onChange: (next: string) => void; disabled?: boolean | undefined; installedOllamaModels: LlmModelInfo[]; installedOllamaLoading: boolean; installedOllamaError: string | null; } function ModelInput({ endpoint, value, onChange, disabled, installedOllamaModels, installedOllamaLoading, installedOllamaError, }: ModelInputProps): JSX.Element { if (endpoint.modelInputMode === "ollama-installed") { return ( s.id)} /> ); } // Free-form input + curated datalist. The user can type anything; the // datalist is just a click-to-fill convenience. const datalistId = `model-suggestions-${endpoint.id}`; return ( <> onChange(event.target.value)} disabled={disabled} placeholder={endpoint.modelSuggestions[0]?.id ?? "model-id"} list={datalistId} className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50" /> {endpoint.modelSuggestions.map((suggestion) => ( ))} ); } interface OllamaInstalledSelectProps { value: string; onChange: (next: string) => void; disabled?: boolean | undefined; models: LlmModelInfo[]; loading: boolean; error: string | null; fallbackSuggestions: string[]; } function OllamaInstalledSelect({ value, onChange, disabled, models, loading, error, fallbackSuggestions, }: OllamaInstalledSelectProps): JSX.Element { // Three states to handle: // 1. Daemon up + ≥1 tags pulled — render a real ); } if (error) { // Fallback: typed input with the canonical Qwen tag suggestions. const datalistId = "ollama-typed-fallback"; return ( <> onChange(event.target.value)} disabled={disabled} placeholder={fallbackSuggestions[0] ?? "qwen2.5:3b-instruct"} list={datalistId} className="w-full rounded-lg border border-accentAmber/40 bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-accentAmber disabled:opacity-50" /> {fallbackSuggestions.map((id) => ( ); } if (models.length === 0) { return ( ); } // Make sure the currently-selected value is renderable: if the user // had a tag selected and then uninstalled it, pull it into the list // as a stale entry rather than silently swapping selection. const selectableNames = models.map((m) => m.name); const includesValue = !value || selectableNames.includes(value); return ( ); } // --------------------------------------------------------------------- // Misc small subcomponents // --------------------------------------------------------------------- function OllamaTroubleshooter({ message, onRetry, }: { message: string; onRetry?: (() => void) | undefined; }): JSX.Element { return (
{message} {onRetry ? ( ) : null}
); } function Field({ label, children, }: { label: React.ReactNode; children: React.ReactNode; }): JSX.Element { return ( ); } function EndpointBadge({ endpoint }: { endpoint: Endpoint }): JSX.Element { const COPY: Record = { physix: { label: "PhysiX-3B", tone: "border-accentGreen/40 text-accentGreen", }, ollama: { label: "local", tone: "border-textMuted/40 text-textMuted" }, hf: { label: "HF router", tone: "border-accentBlue/40 text-accentBlue" }, openai: { label: "OpenAI", tone: "border-primary/40 text-primary" }, custom: { label: "custom", tone: "border-accentAmber/40 text-accentAmber" }, }; const entry = COPY[endpoint.id]; return ( {entry.label} ); } function accentClass(accent: "primary" | "blue" | "muted"): string { if (accent === "primary") return "text-primary"; if (accent === "blue") return "text-accentBlue"; return "text-textMuted"; }