anky2002 commited on
Commit
db1fb03
·
verified ·
1 Parent(s): f873af1

feat: add useOllamaBridge hook - uses extension to proxy Ollama (no CORS config needed)

Browse files
Files changed (1) hide show
  1. src/hooks/use-ollama-bridge.ts +316 -0
src/hooks/use-ollama-bridge.ts ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use client"
2
+
3
+ import { useState, useCallback, useRef, useEffect } from "react"
4
+
5
+ interface UseOllamaBridgeOptions {
6
+ ollamaUrl?: string
7
+ onError?: (error: Error) => void
8
+ onFinish?: (output: string) => void
9
+ }
10
+
11
+ interface UseOllamaBridgeReturn {
12
+ /** Whether the extension bridge is available */
13
+ bridgeAvailable: boolean
14
+ /** Whether Ollama is running (detected via extension) */
15
+ ollamaRunning: boolean | null
16
+ /** Available models from Ollama */
17
+ models: string[]
18
+ /** Accumulated completion text */
19
+ completion: string
20
+ /** Whether a stream is currently in progress */
21
+ isLoading: boolean
22
+ /** Check if extension + Ollama are available */
23
+ checkConnection: () => Promise<boolean>
24
+ /** Fetch available models */
25
+ refreshModels: () => Promise<string[]>
26
+ /** Stream a generate request through the extension bridge */
27
+ generate: (prompt: string, model: string, options?: Record<string, unknown>) => Promise<string>
28
+ /** Stream a chat request through the extension bridge */
29
+ chat: (messages: Array<{ role: string; content: string }>, model: string, options?: Record<string, unknown>) => Promise<string>
30
+ /** Abort the current stream */
31
+ stop: () => void
32
+ /** Reset completion to empty */
33
+ reset: () => void
34
+ }
35
+
36
+ /**
37
+ * Hook to use Ollama through the OpenPrompt browser extension.
38
+ *
39
+ * The extension acts as a CORS-free bridge between the website and the user's
40
+ * local Ollama instance. No need for OLLAMA_ORIGINS configuration!
41
+ *
42
+ * Falls back to direct connection if extension is not available.
43
+ */
44
+ export function useOllamaBridge(options: UseOllamaBridgeOptions = {}): UseOllamaBridgeReturn {
45
+ const { ollamaUrl = "http://localhost:11434", onError, onFinish } = options
46
+
47
+ const [bridgeAvailable, setBridgeAvailable] = useState(false)
48
+ const [ollamaRunning, setOllamaRunning] = useState<boolean | null>(null)
49
+ const [models, setModels] = useState<string[]>([])
50
+ const [completion, setCompletion] = useState("")
51
+ const [isLoading, setIsLoading] = useState(false)
52
+ const requestIdRef = useRef<string | null>(null)
53
+ const accumulatedRef = useRef("")
54
+
55
+ // Generate unique request ID
56
+ const generateId = () => `req_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`
57
+
58
+ // Detect if the bridge is available
59
+ useEffect(() => {
60
+ const handleBridgeReady = (event: MessageEvent) => {
61
+ if (event.data?.type === 'OLLAMA_BRIDGE_READY') {
62
+ setBridgeAvailable(true)
63
+ }
64
+ }
65
+
66
+ // Check for attribute (already loaded)
67
+ if (document.documentElement.getAttribute('data-ollama-bridge') === 'true') {
68
+ setBridgeAvailable(true)
69
+ }
70
+
71
+ window.addEventListener('message', handleBridgeReady)
72
+ return () => window.removeEventListener('message', handleBridgeReady)
73
+ }, [])
74
+
75
+ // Listen for stream responses
76
+ useEffect(() => {
77
+ const handleMessage = (event: MessageEvent) => {
78
+ if (event.origin !== window.location.origin) return
79
+ const msg = event.data
80
+ if (!msg || !msg.id || msg.id !== requestIdRef.current) return
81
+
82
+ if (msg.type === 'OLLAMA_STREAM_CHUNK' && msg.chunk) {
83
+ accumulatedRef.current += msg.chunk
84
+ setCompletion(accumulatedRef.current)
85
+ } else if (msg.type === 'OLLAMA_STREAM_END') {
86
+ setIsLoading(false)
87
+ onFinish?.(accumulatedRef.current)
88
+ } else if (msg.type === 'OLLAMA_ERROR') {
89
+ setIsLoading(false)
90
+ onError?.(new Error(msg.error || 'Ollama bridge error'))
91
+ }
92
+ }
93
+
94
+ window.addEventListener('message', handleMessage)
95
+ return () => window.removeEventListener('message', handleMessage)
96
+ }, [onError, onFinish])
97
+
98
+ const checkConnection = useCallback(async (): Promise<boolean> => {
99
+ if (!bridgeAvailable) {
100
+ // Try direct connection as fallback
101
+ try {
102
+ const response = await fetch(`${ollamaUrl}/api/tags`, { signal: AbortSignal.timeout(3000) })
103
+ const isRunning = response.ok
104
+ setOllamaRunning(isRunning)
105
+ return isRunning
106
+ } catch {
107
+ setOllamaRunning(false)
108
+ return false
109
+ }
110
+ }
111
+
112
+ return new Promise((resolve) => {
113
+ const id = generateId()
114
+ const timeout = setTimeout(() => {
115
+ window.removeEventListener('message', handler)
116
+ setOllamaRunning(false)
117
+ resolve(false)
118
+ }, 5000)
119
+
120
+ const handler = (event: MessageEvent) => {
121
+ if (event.data?.id === id) {
122
+ clearTimeout(timeout)
123
+ window.removeEventListener('message', handler)
124
+ const running = event.data.data?.running ?? false
125
+ setOllamaRunning(running)
126
+ resolve(running)
127
+ }
128
+ }
129
+
130
+ window.addEventListener('message', handler)
131
+ window.postMessage({ type: 'OLLAMA_PING', id, ollamaUrl }, '*')
132
+ })
133
+ }, [bridgeAvailable, ollamaUrl])
134
+
135
+ const refreshModels = useCallback(async (): Promise<string[]> => {
136
+ if (!bridgeAvailable) {
137
+ // Direct connection fallback
138
+ try {
139
+ const response = await fetch(`${ollamaUrl}/api/tags`)
140
+ const data = await response.json()
141
+ const modelNames = data.models?.map((m: { name: string }) => m.name) || []
142
+ setModels(modelNames)
143
+ return modelNames
144
+ } catch {
145
+ return []
146
+ }
147
+ }
148
+
149
+ return new Promise((resolve) => {
150
+ const id = generateId()
151
+ const timeout = setTimeout(() => {
152
+ window.removeEventListener('message', handler)
153
+ resolve([])
154
+ }, 10000)
155
+
156
+ const handler = (event: MessageEvent) => {
157
+ if (event.data?.id === id) {
158
+ clearTimeout(timeout)
159
+ window.removeEventListener('message', handler)
160
+
161
+ if (event.data.type === 'OLLAMA_ERROR') {
162
+ onError?.(new Error(event.data.error))
163
+ resolve([])
164
+ } else {
165
+ const modelNames = event.data.data?.models?.map((m: { name: string }) => m.name) || []
166
+ setModels(modelNames)
167
+ resolve(modelNames)
168
+ }
169
+ }
170
+ }
171
+
172
+ window.addEventListener('message', handler)
173
+ window.postMessage({ type: 'OLLAMA_LIST_MODELS', id, ollamaUrl }, '*')
174
+ })
175
+ }, [bridgeAvailable, ollamaUrl, onError])
176
+
177
+ const generate = useCallback(async (
178
+ prompt: string,
179
+ model: string,
180
+ genOptions?: Record<string, unknown>
181
+ ): Promise<string> => {
182
+ const id = generateId()
183
+ requestIdRef.current = id
184
+ accumulatedRef.current = ""
185
+ setCompletion("")
186
+ setIsLoading(true)
187
+
188
+ if (!bridgeAvailable) {
189
+ // Direct fallback (requires CORS)
190
+ try {
191
+ const response = await fetch(`${ollamaUrl}/api/generate`, {
192
+ method: 'POST',
193
+ headers: { 'Content-Type': 'application/json' },
194
+ body: JSON.stringify({ model, prompt, stream: true, ...(genOptions && { options: genOptions }) }),
195
+ })
196
+
197
+ const reader = response.body?.getReader()
198
+ if (!reader) throw new Error('No stream')
199
+
200
+ const decoder = new TextDecoder()
201
+ while (true) {
202
+ const { done, value } = await reader.read()
203
+ if (done) break
204
+ const lines = decoder.decode(value, { stream: true }).split('\n').filter(l => l.trim())
205
+ for (const line of lines) {
206
+ try {
207
+ const json = JSON.parse(line)
208
+ if (json.response) {
209
+ accumulatedRef.current += json.response
210
+ setCompletion(accumulatedRef.current)
211
+ }
212
+ } catch { /* skip */ }
213
+ }
214
+ }
215
+
216
+ setIsLoading(false)
217
+ onFinish?.(accumulatedRef.current)
218
+ return accumulatedRef.current
219
+ } catch (err) {
220
+ setIsLoading(false)
221
+ const error = err instanceof Error ? err : new Error('Failed to connect to Ollama')
222
+ onError?.(error)
223
+ throw error
224
+ }
225
+ }
226
+
227
+ // Use extension bridge
228
+ window.postMessage({
229
+ type: 'OLLAMA_GENERATE',
230
+ id,
231
+ ollamaUrl,
232
+ model,
233
+ prompt,
234
+ options: genOptions,
235
+ }, '*')
236
+
237
+ // Return a promise that resolves when streaming is done
238
+ return new Promise((resolve, reject) => {
239
+ const handler = (event: MessageEvent) => {
240
+ if (event.data?.id !== id) return
241
+
242
+ if (event.data.type === 'OLLAMA_STREAM_END') {
243
+ window.removeEventListener('message', handler)
244
+ resolve(accumulatedRef.current)
245
+ } else if (event.data.type === 'OLLAMA_ERROR') {
246
+ window.removeEventListener('message', handler)
247
+ reject(new Error(event.data.error))
248
+ }
249
+ }
250
+ window.addEventListener('message', handler)
251
+ })
252
+ }, [bridgeAvailable, ollamaUrl, onError, onFinish])
253
+
254
+ const chat = useCallback(async (
255
+ messages: Array<{ role: string; content: string }>,
256
+ model: string,
257
+ chatOptions?: Record<string, unknown>
258
+ ): Promise<string> => {
259
+ const id = generateId()
260
+ requestIdRef.current = id
261
+ accumulatedRef.current = ""
262
+ setCompletion("")
263
+ setIsLoading(true)
264
+
265
+ window.postMessage({
266
+ type: 'OLLAMA_CHAT',
267
+ id,
268
+ ollamaUrl,
269
+ model,
270
+ messages,
271
+ options: chatOptions,
272
+ }, '*')
273
+
274
+ return new Promise((resolve, reject) => {
275
+ const handler = (event: MessageEvent) => {
276
+ if (event.data?.id !== id) return
277
+
278
+ if (event.data.type === 'OLLAMA_STREAM_END') {
279
+ window.removeEventListener('message', handler)
280
+ resolve(accumulatedRef.current)
281
+ } else if (event.data.type === 'OLLAMA_ERROR') {
282
+ window.removeEventListener('message', handler)
283
+ reject(new Error(event.data.error))
284
+ }
285
+ }
286
+ window.addEventListener('message', handler)
287
+ })
288
+ }, [ollamaUrl])
289
+
290
+ const stop = useCallback(() => {
291
+ if (requestIdRef.current) {
292
+ window.postMessage({ type: 'OLLAMA_ABORT', id: requestIdRef.current }, '*')
293
+ requestIdRef.current = null
294
+ setIsLoading(false)
295
+ }
296
+ }, [])
297
+
298
+ const reset = useCallback(() => {
299
+ setCompletion("")
300
+ accumulatedRef.current = ""
301
+ }, [])
302
+
303
+ return {
304
+ bridgeAvailable,
305
+ ollamaRunning,
306
+ models,
307
+ completion,
308
+ isLoading,
309
+ checkConnection,
310
+ refreshModels,
311
+ generate,
312
+ chat,
313
+ stop,
314
+ reset,
315
+ }
316
+ }