physix / frontend /src /components /LlmConnectionPanel.tsx
Pratyush-01's picture
Upload folder using huggingface_hub
0e24aff verified
/** "Connect an LLM" panel — four endpoints, one model picker.
*
* Decomposed UX:
* - **Endpoint**: flat select over { Ollama, HF Router, OpenAI, Custom }.
* - **Model**: render adapts per endpoint.
* * Ollama → hard select over locally-installed tags
* (server tells us which ones via /interactive/models).
* * HF / OpenAI / Custom → free-form text input with a
* curated `<datalist>` of suggestions. Users can type
* anything — the suggestions are shortcut clicks, not
* a whitelist.
* - **Custom base URL**: only shown when endpoint = Custom.
* - **API key**: always shown, optional, persisted per base URL. */
import { useEffect, useState } from "react";
import { PhysixInferStatus } from "@/components/PhysixInferStatus";
import { cn } from "@/lib/cn";
import type { LlmModelInfo } from "@/lib/interactiveClient";
import {
type Endpoint,
type EndpointId,
type LlmConnection,
ENDPOINTS,
connectionForEndpoint,
findEndpoint,
loadApiKey,
saveApiKey,
} from "@/lib/llmPresets";
interface LlmConnectionPanelProps {
/** Side label ("A", "B", …). */
title: string;
/** Sub-label that gives the slot semantic meaning. */
subtitle?: string;
value: LlmConnection;
onChange: (next: LlmConnection) => void;
/** Disable all fields while an episode is running. */
disabled?: boolean;
/** Subtle accent colour for the title strip. */
accent?: "primary" | "blue" | "muted";
/** Locally-installed Ollama tags from /interactive/models. Only used
* when the user picks the Ollama endpoint; pass [] otherwise. */
installedOllamaModels?: LlmModelInfo[];
/** True while the catalogue is loading. */
installedOllamaLoading?: boolean;
/** Optional error string from the lister (Ollama unreachable, etc.). */
installedOllamaError?: string | null;
/** Refetch handler so the user can retry without reloading the page. */
onRefreshOllama?: () => void;
}
export function LlmConnectionPanel({
title,
subtitle,
value,
onChange,
disabled,
accent = "muted",
installedOllamaModels = [],
installedOllamaLoading = false,
installedOllamaError = null,
onRefreshOllama,
}: LlmConnectionPanelProps): JSX.Element {
const endpoint = findEndpoint(value.endpointId);
const isCustom = endpoint.id === "custom";
const [revealKey, setRevealKey] = useState(false);
// Hydrate the API key from per-URL storage whenever the base URL the
// panel is pointing at changes (endpoint switch, custom URL edit).
useEffect(() => {
if (!value.baseUrl) return;
const stored = loadApiKey(value.baseUrl);
if (stored && stored !== value.apiKey) {
onChange({ ...value, apiKey: stored });
}
// Run only when the base URL changes — keying on `value` would loop.
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [value.baseUrl]);
function setEndpoint(id: EndpointId): void {
const next = findEndpoint(id);
onChange({
...connectionForEndpoint(next),
apiKey: loadApiKey(next.baseUrl),
});
}
function setBaseUrl(url: string): void {
onChange({ ...value, baseUrl: url, apiKey: loadApiKey(url) });
}
function setModel(model: string): void {
onChange({ ...value, model });
}
function setApiKey(key: string): void {
saveApiKey(value.baseUrl, key);
onChange({ ...value, apiKey: key });
}
return (
<section className="panel flex flex-col gap-3">
<header className="flex items-baseline justify-between gap-3">
<div>
<p className={cn("heading-eyebrow", accentClass(accent))}>
Connect an LLM · {title}
</p>
{subtitle ? (
<p className="mt-1 text-xs text-textMuted">{subtitle}</p>
) : (
<p className="mt-1 text-xs text-textMuted">
Point at any OpenAI-compatible{" "}
<code className="font-mono text-textPrimary">
/v1/chat/completions
</code>{" "}
endpoint.
</p>
)}
</div>
<EndpointBadge endpoint={endpoint} />
</header>
<div className="grid grid-cols-1 gap-3 sm:grid-cols-2">
<Field label="Endpoint">
<select
value={endpoint.id}
onChange={(event) => setEndpoint(event.target.value as EndpointId)}
disabled={disabled}
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 text-sm text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50"
>
{ENDPOINTS.map((ep) => (
<option key={ep.id} value={ep.id}>
{ep.label}
</option>
))}
</select>
</Field>
<Field label="Model">
<ModelInput
endpoint={endpoint}
value={value.model}
onChange={setModel}
disabled={disabled}
installedOllamaModels={installedOllamaModels}
installedOllamaLoading={installedOllamaLoading}
installedOllamaError={installedOllamaError}
/>
</Field>
</div>
{isCustom ? (
<Field label="Custom base URL">
<input
type="url"
value={value.baseUrl}
onChange={(event) => setBaseUrl(event.target.value)}
disabled={disabled}
placeholder="https://your-endpoint.example/v1"
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50"
/>
</Field>
) : null}
<Field
label={
<span className="flex items-baseline justify-between gap-2">
<span>API key {endpoint.needsKey ? "" : "(optional)"}</span>
<button
type="button"
onClick={() => setRevealKey((v) => !v)}
className="text-[10px] uppercase tracking-wider text-textMuted underline hover:text-textPrimary"
>
{revealKey ? "hide" : "show"}
</button>
</span>
}
>
<input
type={revealKey ? "text" : "password"}
value={value.apiKey}
onChange={(event) => setApiKey(event.target.value)}
disabled={disabled}
placeholder={
endpoint.needsKey
? "Bearer <key>"
: "(server will use env var if set)"
}
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50"
/>
</Field>
<p className="text-[11px] leading-relaxed text-textMuted">
{endpoint.hint}
</p>
{/*
Live status banner for the GPU Space. Only renders for the
physix endpoint — every other endpoint either has no sleep
cycle (HF Router, OpenAI, Custom) or is local-only (Ollama),
so there's nothing to surface.
*/}
{endpoint.id === "physix" ? <PhysixInferStatus /> : null}
{endpoint.id === "ollama" && installedOllamaError ? (
<OllamaTroubleshooter
message={installedOllamaError}
onRetry={onRefreshOllama}
/>
) : null}
</section>
);
}
// ---------------------------------------------------------------------
// Model picker — Ollama dropdown vs. free-form input with datalist.
// ---------------------------------------------------------------------
interface ModelInputProps {
endpoint: Endpoint;
value: string;
onChange: (next: string) => void;
disabled?: boolean | undefined;
installedOllamaModels: LlmModelInfo[];
installedOllamaLoading: boolean;
installedOllamaError: string | null;
}
function ModelInput({
endpoint,
value,
onChange,
disabled,
installedOllamaModels,
installedOllamaLoading,
installedOllamaError,
}: ModelInputProps): JSX.Element {
if (endpoint.modelInputMode === "ollama-installed") {
return (
<OllamaInstalledSelect
value={value}
onChange={onChange}
disabled={disabled}
models={installedOllamaModels}
loading={installedOllamaLoading}
error={installedOllamaError}
fallbackSuggestions={endpoint.modelSuggestions.map((s) => s.id)}
/>
);
}
// Free-form input + curated datalist. The user can type anything; the
// datalist is just a click-to-fill convenience.
const datalistId = `model-suggestions-${endpoint.id}`;
return (
<>
<input
type="text"
value={value}
onChange={(event) => onChange(event.target.value)}
disabled={disabled}
placeholder={endpoint.modelSuggestions[0]?.id ?? "model-id"}
list={datalistId}
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50"
/>
<datalist id={datalistId}>
{endpoint.modelSuggestions.map((suggestion) => (
<option key={suggestion.id} value={suggestion.id}>
{suggestion.tag ?? ""}
</option>
))}
</datalist>
</>
);
}
interface OllamaInstalledSelectProps {
value: string;
onChange: (next: string) => void;
disabled?: boolean | undefined;
models: LlmModelInfo[];
loading: boolean;
error: string | null;
fallbackSuggestions: string[];
}
function OllamaInstalledSelect({
value,
onChange,
disabled,
models,
loading,
error,
fallbackSuggestions,
}: OllamaInstalledSelectProps): JSX.Element {
// Three states to handle:
// 1. Daemon up + ≥1 tags pulled — render a real <select>.
// 2. Daemon up + nothing pulled — render an empty disabled select
// with a hint (the panel hint already covers the install
// command).
// 3. Daemon unreachable — fall back to a typed input so the user
// can still set the model and hit Run after starting the daemon.
if (loading) {
return (
<select
disabled
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textMuted outline-none disabled:opacity-50"
>
<option>Loading installed models…</option>
</select>
);
}
if (error) {
// Fallback: typed input with the canonical Qwen tag suggestions.
const datalistId = "ollama-typed-fallback";
return (
<>
<input
type="text"
value={value}
onChange={(event) => onChange(event.target.value)}
disabled={disabled}
placeholder={fallbackSuggestions[0] ?? "qwen2.5:3b-instruct"}
list={datalistId}
className="w-full rounded-lg border border-accentAmber/40 bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-accentAmber disabled:opacity-50"
/>
<datalist id={datalistId}>
{fallbackSuggestions.map((id) => (
<option key={id} value={id} />
))}
</datalist>
</>
);
}
if (models.length === 0) {
return (
<select
disabled
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textMuted outline-none disabled:opacity-50"
>
<option>No models pulled — see hint below</option>
</select>
);
}
// Make sure the currently-selected value is renderable: if the user
// had a tag selected and then uninstalled it, pull it into the list
// as a stale entry rather than silently swapping selection.
const selectableNames = models.map((m) => m.name);
const includesValue = !value || selectableNames.includes(value);
return (
<select
value={value}
onChange={(event) => onChange(event.target.value)}
disabled={disabled}
className="w-full rounded-lg border border-border bg-surfaceMuted px-3 py-2 font-mono text-xs text-textPrimary outline-none transition focus:border-textMuted disabled:opacity-50"
>
{!includesValue ? (
<option value={value}>{value} (not installed)</option>
) : null}
{!value ? <option value="">Pick a tag…</option> : null}
{models.map((m) => (
<option key={m.name} value={m.name}>
{m.name}
{m.parameter_size ? ` · ${m.parameter_size}` : ""}
</option>
))}
</select>
);
}
// ---------------------------------------------------------------------
// Misc small subcomponents
// ---------------------------------------------------------------------
function OllamaTroubleshooter({
message,
onRetry,
}: {
message: string;
onRetry?: (() => void) | undefined;
}): JSX.Element {
return (
<div className="rounded-md border border-accentAmber/40 bg-accentAmber/5 px-3 py-2 text-[11px] text-accentAmber">
<div className="flex items-start justify-between gap-3">
<span>{message}</span>
{onRetry ? (
<button
type="button"
onClick={onRetry}
className="text-[10px] text-textMuted underline hover:text-textPrimary"
>
retry
</button>
) : null}
</div>
</div>
);
}
function Field({
label,
children,
}: {
label: React.ReactNode;
children: React.ReactNode;
}): JSX.Element {
return (
<label className="flex flex-col gap-1 text-xs text-textMuted">
<span className="heading-eyebrow">{label}</span>
{children}
</label>
);
}
function EndpointBadge({ endpoint }: { endpoint: Endpoint }): JSX.Element {
const COPY: Record<EndpointId, { label: string; tone: string }> = {
physix: {
label: "PhysiX-3B",
tone: "border-accentGreen/40 text-accentGreen",
},
ollama: { label: "local", tone: "border-textMuted/40 text-textMuted" },
hf: { label: "HF router", tone: "border-accentBlue/40 text-accentBlue" },
openai: { label: "OpenAI", tone: "border-primary/40 text-primary" },
custom: { label: "custom", tone: "border-accentAmber/40 text-accentAmber" },
};
const entry = COPY[endpoint.id];
return (
<span
className={cn(
"rounded-full border bg-surface px-2 py-0.5 text-[10px] uppercase tracking-wider",
entry.tone,
)}
>
{entry.label}
</span>
);
}
function accentClass(accent: "primary" | "blue" | "muted"): string {
if (accent === "primary") return "text-primary";
if (accent === "blue") return "text-accentBlue";
return "text-textMuted";
}