open-prompt / src /components /tools /tool-executor.tsx
GitHub Action
Automated sync to Hugging Face
bcce530
"use client"
import { useState, useEffect } from 'react'
import { useRouter } from 'next/navigation'
import { useUser } from '@stackframe/stack'
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Textarea } from '@/components/ui/textarea'
import { Label } from '@/components/ui/label'
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select'
import { Badge } from '@/components/ui/badge'
import { Loader2, Copy, CheckCircle2, Cloud, Server, Share2 } from 'lucide-react'
import { ToolDefinition, ToolInput } from '@/lib/tools'
import { AI_MODELS, DEFAULT_MODEL, isOllamaModel } from '@/types/prompt'
import { useOllama } from '@/contexts/ollama-context'
import { SaveRunPanel } from '@/components/saved-runs/save-run-panel'
import { ShareMenu } from '@/components/share/share-menu'
interface ToolExecutorProps {
tool: ToolDefinition
}
interface SavedRun {
id: string
name: string | null
inputs: Record<string, string> | null
output: string
model: string
createdAt: string
}
export function ToolExecutor({ tool }: ToolExecutorProps) {
const router = useRouter()
const user = useUser()
const { settings: ollamaSettings } = useOllama()
const [inputs, setInputs] = useState<Record<string, string>>({})
const [output, setOutput] = useState<string>('')
const [isLoading, setIsLoading] = useState(false)
const [isCopied, setIsCopied] = useState(false)
const [selectedModel, setSelectedModel] = useState<string>(DEFAULT_MODEL)
// Saved runs state
const [savedRuns, setSavedRuns] = useState<SavedRun[]>([])
const [loadingRuns, setLoadingRuns] = useState(false)
// Fetch saved runs
useEffect(() => {
const fetchRuns = async () => {
if (!tool.slug || !user?.id) return
setLoadingRuns(true)
try {
const res = await fetch(`/api/tools/${tool.slug}/saved-runs?userId=${user.id}`)
if (res.ok) {
const data = await res.json()
setSavedRuns(data.runs || [])
}
} catch (err) {
console.error("Failed to fetch runs:", err)
} finally {
setLoadingRuns(false)
}
}
fetchRuns()
}, [tool.slug, user?.id])
const handleInputChange = (name: string, value: string) => {
setInputs(prev => ({ ...prev, [name]: value }))
}
const handleExecute = async () => {
setIsLoading(true)
setOutput('')
try {
if (isOllamaModel(selectedModel)) {
// Ollama: call directly from browser instead of through server
const ollamaUrl = (ollamaSettings.apiUrl || "http://localhost:11434").replace(/\/+$/, "")
// Build the prompt from tool's system prompt + inputs
let finalPrompt = tool.systemPrompt
for (const [key, value] of Object.entries(inputs)) {
finalPrompt = finalPrompt.replace(new RegExp(`{{${key}}}`, 'g'), value)
}
const res = await fetch(`${ollamaUrl}/api/generate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: selectedModel,
prompt: finalPrompt,
stream: false,
}),
})
if (!res.ok) {
if (res.status === 0 || res.type === "opaque") {
throw new Error(
`Cannot reach Ollama. Make sure CORS is enabled:\n` +
`OLLAMA_ORIGINS=${window.location.origin} ollama serve`
)
}
throw new Error(`Ollama error: ${res.status}. Make sure Ollama is running.`)
}
const data = await res.json()
setOutput(data.response)
} else {
// Cloud models: call server API
const res = await fetch(`/api/tools/${tool.slug}/execute`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
inputs,
model: selectedModel,
}),
})
const data = await res.json()
if (res.ok) {
setOutput(data.output)
} else {
setOutput(`Error: ${data.error || 'Failed to execute tool'}`)
}
}
} catch (error) {
console.error('Execute error:', error)
setOutput(
error instanceof Error
? `Error: ${error.message}`
: 'Error: Failed to execute tool. Please try again.'
)
} finally {
setIsLoading(false)
}
}
const handleCopy = async () => {
if (output) {
await navigator.clipboard.writeText(output)
setIsCopied(true)
setTimeout(() => setIsCopied(false), 2000)
}
}
const handleSaveRun = async (name: string) => {
if (!output) return
const res = await fetch(`/api/tools/${tool.slug}/saved-runs`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
userId: user?.id,
name: name || null,
inputs,
output,
model: selectedModel,
}),
})
if (res.ok) {
const newRun = await res.json()
setSavedRuns(prev => [newRun, ...prev])
} else {
throw new Error("Failed to save")
}
}
const getModelName = (modelId: string) => {
if (isOllamaModel(modelId)) {
return modelId.split(':')[0] || modelId
}
return AI_MODELS[modelId as keyof typeof AI_MODELS]?.name || modelId
}
const renderInput = (input: ToolInput) => {
const value = inputs[input.name] || ''
switch (input.type) {
case 'textarea':
return (
<Textarea
id={input.name}
placeholder={input.placeholder}
value={value}
onChange={(e) => handleInputChange(input.name, e.target.value)}
rows={4}
className="resize-none"
/>
)
case 'select':
return (
<Select
value={value}
onValueChange={(val) => handleInputChange(input.name, val)}
>
<SelectTrigger id={input.name}>
<SelectValue placeholder={input.placeholder || 'Select...'} />
</SelectTrigger>
<SelectContent>
{input.options?.map((option) => (
<SelectItem key={option} value={option}>
{option}
</SelectItem>
))}
</SelectContent>
</Select>
)
case 'number':
return (
<Input
id={input.name}
type="number"
placeholder={input.placeholder}
value={value}
onChange={(e) => handleInputChange(input.name, e.target.value)}
/>
)
default:
return (
<Input
id={input.name}
type="text"
placeholder={input.placeholder}
value={value}
onChange={(e) => handleInputChange(input.name, e.target.value)}
/>
)
}
}
const isFormValid = tool.inputSchema.every(
input => !input.required || (inputs[input.name] && inputs[input.name].trim() !== '')
)
// Check if selected model is Ollama
const isOllama = isOllamaModel(selectedModel)
return (
<div className="grid lg:grid-cols-2 gap-8">
{/* Input Form */}
<Card className="glass">
<CardHeader>
<div className="flex items-center justify-between">
<CardTitle>Inputs</CardTitle>
{tool.isPremium && (
<Badge className="bg-linear-to-r from-yellow-500 to-orange-500">
PRO
</Badge>
)}
</div>
<CardDescription>
Fill in the details below
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
{/* Model Selector */}
<div className="space-y-2">
<Label>AI Model</Label>
<Select value={selectedModel} onValueChange={setSelectedModel}>
<SelectTrigger>
<SelectValue>
<div className="flex items-center gap-2">
{isOllama ? (
<Server className="h-4 w-4 text-green-500" />
) : (
<Cloud className="h-4 w-4 text-blue-500" />
)}
<span>
{isOllama
? selectedModel
: AI_MODELS[selectedModel as keyof typeof AI_MODELS]?.name || selectedModel
}
</span>
</div>
</SelectValue>
</SelectTrigger>
<SelectContent>
{/* Cloud Models */}
<div className="px-2 py-1 text-xs font-medium text-muted-foreground flex items-center gap-1">
<Cloud className="h-3 w-3" /> Cloud Models
</div>
{Object.entries(AI_MODELS).map(([id, model]) => (
<SelectItem key={id} value={id}>
<div className="flex items-center gap-2">
<span>{model.name}</span>
<Badge variant="outline" className="text-xs">
{model.provider}
</Badge>
</div>
</SelectItem>
))}
{/* Ollama Models */}
{ollamaSettings.enabled && ollamaSettings.availableModels.length > 0 && (
<>
<div className="px-2 py-1 text-xs font-medium text-muted-foreground flex items-center gap-1 border-t mt-1 pt-2">
<Server className="h-3 w-3" /> Ollama (Local)
</div>
{ollamaSettings.availableModels.map((model) => (
<SelectItem key={model} value={model}>
<div className="flex items-center gap-2">
<span>{model}</span>
<Badge variant="outline" className="text-xs border-green-500/50">
Local
</Badge>
</div>
</SelectItem>
))}
</>
)}
{/* Ollama not connected hint */}
{!ollamaSettings.enabled && (
<div className="px-2 py-1 text-xs text-muted-foreground border-t mt-1 pt-2">
<Server className="h-3 w-3 inline mr-1" />
Connect Ollama in header for local models
</div>
)}
</SelectContent>
</Select>
</div>
{tool.inputSchema.map((input) => (
<div key={input.name} className="space-y-2">
<Label htmlFor={input.name}>
{input.label}
{input.required && <span className="text-destructive ml-1">*</span>}
</Label>
{renderInput(input)}
</div>
))}
<Button
onClick={handleExecute}
disabled={!isFormValid || isLoading}
className="w-full gap-2 btn-glow"
variant="gradient"
size="lg"
>
{isLoading ? (
<>
<Loader2 className="h-4 w-4 animate-spin" />
Generating...
</>
) : (
<>
{tool.icon} Generate
</>
)}
</Button>
</CardContent>
</Card>
{/* Output */}
<Card className="glass">
<CardHeader>
<div className="flex items-center justify-between">
<CardTitle>Output</CardTitle>
<div className="flex gap-2">
{output && (
<Button
variant="ghost"
size="sm"
onClick={handleCopy}
className="gap-2"
>
{isCopied ? (
<>
<CheckCircle2 className="h-4 w-4" />
Copied!
</>
) : (
<>
<Copy className="h-4 w-4" />
Copy
</>
)}
</Button>
)}
<ShareMenu
title={tool.name}
description={tool.description}
/>
</div>
</div>
<CardDescription>
AI-generated result will appear here
</CardDescription>
</CardHeader>
<CardContent>
{output ? (
<div className="whitespace-pre-wrap bg-muted p-4 rounded-lg font-mono text-sm">
{output}
</div>
) : (
<div className="text-center py-12 text-muted-foreground">
<span className="text-4xl mb-2 block">{tool.icon}</span>
<p>Fill in the inputs and click Generate</p>
</div>
)}
</CardContent>
</Card>
{/* Save Run Panel */}
<div className="lg:col-span-2">
<SaveRunPanel
hasOutput={!!output && !output.startsWith('Error:')}
isRunning={isLoading}
onSave={handleSaveRun}
savedRuns={savedRuns}
loadingRuns={loadingRuns}
type="tool"
getModelName={getModelName}
/>
</div>
</div>
)
}