File size: 34,500 Bytes
5f3e9f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
/**
 * Tracked-generation context + run queue.
 *
 * The SSE lifecycle (AbortController, event handlers, accumulated result)
 * lives in a React context mounted at the App level. That way navigating
 * between pages — e.g. from Text→Video's "Start Process" to the Processes
 * tab β€” doesn't unmount the component that owns the stream and doesn't
 * orphan the in-flight request.
 *
 * A FIFO **queue** sits on top of the single-run executor. Wizards call
 * `generate(...)`; if nothing is running it kicks off immediately, otherwise
 * the job is enqueued and the provider auto-dequeues when the current run
 * reaches a terminal state. This gives users "submit more work while the
 * first run is executing" without any extra UI on the wizards.
 */
import {
  createContext,
  useCallback,
  useContext,
  useEffect,
  useMemo,
  useRef,
  useState,
} from 'react'
import type { ReactNode } from 'react'
import { useGenerate } from './useGenerate'
import type { GenerationState } from './useGenerate'
import { useRuns } from '../store/runs'
import type { Run, RunTool } from '../store/runs'
import { useToast } from '../store/toast'
import type { BackendRunDetail, GenerateSettings, PendingClientQueueItem } from '../api/types'
import { api } from '../api/client'
import type { ReplacementTargets } from '../lib/processEditHandoff'
import { useSettings } from '../store/settings'

type PendingMeta = Omit<Run, 'id' | 'status' | 'startedAt'>

export type QueueItemKind = 'text' | 'html' | 'image'

export interface QueueItem {
  id: string
  tool: RunTool
  kind: QueueItemKind
  inputPreview: string
  inputText?: string
  queuedAt: number
  settings?: GenerateSettings
  // payload variants β€” only one of these is populated per item
  text?: string
  html?: string
  formData?: FormData
  files?: File[]
  replaceTargets?: ReplacementTargets
}

export interface EnqueueResult {
  /** Stable queue id β€” clients can show this in UI or pass to `cancelQueued`. */
  queueId: string
  /** True when this item started executing immediately (queue was empty). */
  startedImmediately: boolean
  /**
   * When set, the new submission was a client-side duplicate of an already
   * queued or currently-running item; `queueId` points at that existing
   * item (so callers can still navigate to it) and no new work was
   * enqueued. Wizards show a toast and jump to Processes instead of
   * spawning a second identical run.
   */
  duplicateOf?: 'active' | 'queued'
}

interface TrackedGenerationContextValue {
  state: GenerationState
  queue: QueueItem[]
  /**
   * True when the queue stopped auto-dispatching after a backend
   * rejection. `resumeQueue()` (or any new `enqueueX` call) clears it.
   */
  paused: boolean
  /** Reason the queue is currently paused, when applicable. */
  pausedReason: 'in_flight' | 'duplicate' | 'unknown' | null
  queueModeNotice: string | null
  dismissQueueModeNotice: () => void
  cancel: (options?: {
    mode?: 'now' | 'after_html' | 'after_screenshots' | 'after_pptx' | 'after_video'
    delete_outputs?: boolean
  }) => void
  cancelQueued: (queueId: string) => void
  pauseQueue: () => void
  /** Manually resume auto-dispatch after a 409-induced pause. */
  resumeQueue: () => void
  reorderQueued: (queueId: string, targetQueueId: string) => void
  updateQueued: (queueId: string, patch: Partial<Pick<QueueItem, 'text' | 'html' | 'settings'>>) => void
  reset: () => void
  enqueueText: (
    tool: RunTool,
    text: string,
    settings: GenerateSettings,
    options?: { replaceTargets?: ReplacementTargets },
  ) => EnqueueResult
  enqueueHtml: (
    tool: RunTool,
    html: string,
    settings: GenerateSettings,
    options?: { replaceTargets?: ReplacementTargets },
  ) => EnqueueResult
  enqueueImage: (
    tool: RunTool,
    formData: FormData,
    meta: { files: File[]; settings?: GenerateSettings },
  ) => EnqueueResult
}

const Ctx = createContext<TrackedGenerationContextValue | null>(null)

let queueCounter = 0
function nextQueueId(): string {
  queueCounter += 1
  return `queue-${Date.now().toString(36)}-${queueCounter}`
}

/**
 * Cheap deterministic fingerprint of a queue item's *meaningful* payload.
 * Used to short-circuit client-side duplicates BEFORE the POST reaches the
 * backend β€” the backend already blocks simultaneous duplicates via the
 * input_fingerprint check, but once the first run finishes the backend
 * happily accepts a second identical submission. A user who
 * re-opens the wizard and clicks Start again (or any transient form of
 * double-dispatch) would otherwise chain a second copy the moment the
 * first completes.
 *
 * djb2 on a normalized payload string. Only needs to be stable within a
 * single browser session, so we don't need crypto.
 */
function fingerprintItem(item: QueueItem): string {
  const settings = item.settings
    ? JSON.stringify(sortedEntries(item.settings as unknown as Record<string, unknown>))
    : ''
  let payload: string
  if (item.kind === 'text') {
    payload = `text|${item.tool}|${(item.text ?? '').trim()}|${settings}`
  } else if (item.kind === 'html') {
    payload = `html|${item.tool}|${(item.html ?? '').trim()}|${settings}`
  } else {
    const fileSig = (item.files ?? [])
      .map((f) => `${f.name}:${f.size}:${f.lastModified}`)
      .join(',')
    payload = `image|${item.tool}|${fileSig}|${settings}`
  }
  let hash = 5381
  for (let i = 0; i < payload.length; i += 1) {
    hash = ((hash << 5) + hash + payload.charCodeAt(i)) | 0
  }
  return hash.toString(36)
}

function sortedEntries(obj: Record<string, unknown>): Array<[string, unknown]> {
  return Object.keys(obj)
    .sort()
    .map((k) => [k, obj[k]] as [string, unknown])
}

function toPersistedQueueItem(item: QueueItem): PendingClientQueueItem | null {
  if (item.kind !== 'text' && item.kind !== 'html') return null
  if (item.tool !== 'text-to-video' && item.tool !== 'html-to-video') return null
  const payload = item.kind === 'text' ? item.text : item.html
  if (!payload?.trim()) return null
  return {
    id: item.id,
    tool: item.tool,
    kind: item.kind,
    inputPreview: item.inputPreview,
    inputText: item.inputText,
    queuedAt: item.queuedAt,
    settings: item.settings,
    text: item.kind === 'text' ? payload : undefined,
    html: item.kind === 'html' ? payload : undefined,
  }
}

function fromPersistedQueueItem(item: PendingClientQueueItem): QueueItem | null {
  const payload = item.kind === 'text' ? item.text : item.html
  if (!payload?.trim()) return null
  return {
    id: item.id || nextQueueId(),
    tool: item.tool,
    kind: item.kind,
    inputPreview: item.inputPreview || payload.slice(0, 200),
    inputText: item.inputText ?? payload,
    queuedAt: item.queuedAt || Date.now(),
    settings: item.settings,
    text: item.kind === 'text' ? payload : undefined,
    html: item.kind === 'html' ? payload : undefined,
  }
}

function etaFromBackendRun(run: BackendRunDetail['run']): number | undefined {
  const raw =
    run.settings?.estimated_total_seconds ??
    run.metrics?.estimated_total_seconds ??
    run.metrics?.eta_seconds
  const value = typeof raw === 'number' ? raw : Number(raw)
  return Number.isFinite(value) && value > 0 ? value : undefined
}

async function cleanupReplacementTargets(targets: ReplacementTargets): Promise<void> {
  const seen = new Set<string>()
  const deletions: Array<Promise<unknown>> = []
  const add = (type: Parameters<typeof api.deleteFile>[0], filename?: string) => {
    if (!filename) return
    const key = `${type}:${filename}`
    if (seen.has(key)) return
    seen.add(key)
    deletions.push(api.deleteFile(type, filename).catch(() => undefined))
  }
  add('html', targets.htmlFilename)
  for (const file of targets.screenshotFiles ?? []) add('screenshot', file)
  add('presentation', targets.presentationFile)
  add('video', targets.videoFile)
  await Promise.all(deletions)
}

export function TrackedGenerationProvider({ children }: { children: ReactNode }) {
  const gen = useGenerate()
  const runs = useRuns()
  const toast = useToast()
  const { settings: appSettings } = useSettings()
  const runIdRef = useRef<string | null>(null)
  const pendingRef = useRef<PendingMeta | null>(null)
  // Queue holds ONLY pending items (never the currently-executing one).
  // The active item is tracked separately via `activeQueueIdRef` /
  // `activeFingerprintRef`. Previously queue[0] was "the running one" and
  // the completion effect filtered it out by id β€” that created two racing
  // sources of truth (the ref claim in pushOrStart and the filter in the
  // terminal-state effect) and, together with React 19's concurrent
  // rendering, could let the same id get dispatched twice when the first
  // run finished. Keeping the active item out of the queue entirely
  // eliminates that class of races.
  const [queue, setQueue] = useState<QueueItem[]>([])
  const persistedQueueLoadedRef = useRef(false)
  const activeQueueIdRef = useRef<string | null>(null)
  // Fingerprint of the currently-executing item, set by `dispatch` and
  // cleared on terminal-state. Used by pushOrStart to client-side-dedupe
  // "user resubmits the same content" before we ever POST.
  const activeFingerprintRef = useRef<string | null>(null)
  const activeReplaceTargetsRef = useRef<ReplacementTargets | null>(null)
  // Guards against accidentally dispatching the same queue id twice
  // (StrictMode re-invoking effects, duplicate auto-dequeue firings, etc).
  const dispatchedIdsRef = useRef<Set<string>>(new Set())
  // Set to true in `dispatch()` once we've actually fired the underlying
  // generate call, cleared once the run terminates. Without this flag the
  // auto-dequeue effect can race with `pushOrStart` and pop the just-claimed
  // item before the worker even started.
  const activeStartedRef = useRef(false)
  // When the backend rejects the active run with 409 the next item in the
  // queue would 409 too β€” pause auto-dispatch and surface a banner so the
  // user can decide. Cleared on `resumeQueue()` or the next manual enqueue.
  const [paused, setPaused] = useState(false)
  const [pausedReason, setPausedReason] = useState<
    'in_flight' | 'duplicate' | 'unknown' | null
  >(null)
  const [queueModeNotice, setQueueModeNotice] = useState<string | null>(null)
  const previousConcurrentRef = useRef(appSettings.concurrentPipelineRuns)
  const trackedRunInProgress = runs.runs.some((run) => run.status === 'running')

  useEffect(() => {
    const previous = previousConcurrentRef.current
    if (previous !== appSettings.concurrentPipelineRuns) {
      setQueueModeNotice(
        appSettings.concurrentPipelineRuns
          ? 'Concurrency turned on. Pending Text -> Video jobs can start in parallel while screenshot and PowerPoint stages stay gated.'
          : 'Concurrency turned off. Pending jobs will stay in serial order and start one at a time.',
      )
      previousConcurrentRef.current = appSettings.concurrentPipelineRuns
    }
  }, [appSettings.concurrentPipelineRuns])

  useEffect(() => {
    let cancelled = false
    void api.getPendingClientQueue()
      .then((response) => {
        if (cancelled) return
        const restored = response.items
          .map(fromPersistedQueueItem)
          .filter((item): item is QueueItem => !!item)
        if (restored.length > 0) {
          setQueue((prev) => {
            const existing = new Set(prev.map((item) => item.id))
            return [...prev, ...restored.filter((item) => !existing.has(item.id))]
          })
        }
      })
      .catch(() => undefined)
      .finally(() => {
        if (!cancelled) persistedQueueLoadedRef.current = true
      })
    return () => {
      cancelled = true
    }
  }, [])

  useEffect(() => {
    if (!persistedQueueLoadedRef.current) return
    const persisted = queue
      .map(toPersistedQueueItem)
      .filter((item): item is PendingClientQueueItem => !!item)
    void api.savePendingClientQueue(persisted).catch(() => undefined)
  }, [queue])

  // Connect generation events β†’ runs store. Lifts the "run row" into
  // existence as soon as the SSE stream reports `running`, finishes it on
  // terminal events. We also create the row on a *terminal* state when
  // `pendingRef` is still set β€” this catches the rare case where the POST
  // 409s before the SSE ever flips to running, so the run still appears
  // in Processes instead of vanishing silently.
  useEffect(() => {
    const s = gen.state

    if (s.status === 'running' && pendingRef.current && !runIdRef.current) {
      runIdRef.current = runs.start(pendingRef.current)
      pendingRef.current = null
      return
    }

    const terminal =
      s.status === 'success' || s.status === 'error' || s.status === 'cancelled'
    if (terminal && pendingRef.current && !runIdRef.current) {
      runIdRef.current = runs.start(pendingRef.current)
      pendingRef.current = null
    }

    if (!runIdRef.current) return

    if (s.status === 'running') {
      runs.update(runIdRef.current, {
        stage: s.stage,
        message: s.message,
        progress: s.progress,
        etaSeconds: s.etaSeconds,
        operationId: s.operationId,
      })
    }

    if (s.status === 'success') {
      const replacement = activeReplaceTargetsRef.current
      runs.finish(runIdRef.current, {
        status: 'success',
        htmlFilename: s.result?.html_filename,
        screenshotFiles: s.result?.screenshot_files,
        screenshotFolder: s.result?.screenshot_folder,
        presentationFile: s.result?.presentation_file,
        videoFile: s.result?.video_file,
        operationId: s.result?.operation_id ?? s.operationId,
        etaSeconds: s.etaSeconds,
      })
      if (replacement) {
        void cleanupReplacementTargets(replacement).then(() => {
          if (replacement.runId) runs.remove(replacement.runId)
        })
      }
      runIdRef.current = null
    } else if (s.status === 'error') {
      runs.finish(runIdRef.current, { status: 'error', error: s.error })
      runIdRef.current = null
    } else if (s.status === 'cancelled') {
      runs.finish(runIdRef.current, { status: 'cancelled' })
      runIdRef.current = null
    }
  }, [gen.state, runs])

  useEffect(() => {
    if (gen.state.status !== 'running') return
    let cancelled = false
    let wakeLock: { release: () => Promise<void> } | null = null

    const requestWakeLock = async () => {
      try {
        const nav = navigator as Navigator & {
          wakeLock?: { request: (type: 'screen') => Promise<{ release: () => Promise<void> }> }
        }
        if (!nav.wakeLock) return
        wakeLock = await nav.wakeLock.request('screen')
        if (cancelled) {
          await wakeLock.release().catch(() => undefined)
          wakeLock = null
        }
      } catch {
        /* Best effort: browser/OS can deny wake lock. */
      }
    }

    void requestWakeLock()
    return () => {
      cancelled = true
      void wakeLock?.release().catch(() => undefined)
    }
  }, [gen.state.status])

  // Dispatcher β€” starts a queue item's generate call. Safe to call with
  // any item kind; chooses the right hook under the hood. Idempotent: if
  // called twice with the same item id we silently ignore the second call,
  // which defends against double-dispatch from StrictMode / re-render
  // races without changing steady-state behaviour.
  const dispatch = useCallback(
    (item: QueueItem) => {
      if (dispatchedIdsRef.current.has(item.id)) return
      dispatchedIdsRef.current.add(item.id)
      activeQueueIdRef.current = item.id
      activeFingerprintRef.current = fingerprintItem(item)
      activeReplaceTargetsRef.current = item.replaceTargets ?? null
      activeStartedRef.current = true
      if (item.kind === 'text' && item.text !== undefined && item.settings) {
        pendingRef.current = {
          tool: item.tool,
          inputPreview: item.inputPreview,
          inputText: item.inputText ?? item.text,
          settings: item.settings,
        }
        void gen.generate(item.text, item.settings)
      } else if (item.kind === 'html' && item.html !== undefined && item.settings) {
        pendingRef.current = {
          tool: item.tool,
          inputPreview: item.inputPreview,
          inputText: item.inputText ?? item.html,
          settings: item.settings,
        }
        void gen.generateFromHtml(item.html, item.settings)
      } else if (item.kind === 'image' && item.formData) {
        pendingRef.current = {
          tool: item.tool,
          inputPreview: item.inputPreview,
          inputText: item.inputText ?? item.inputPreview,
          inputFiles: item.files?.map((f) => f.name),
          settings: item.settings,
        }
        void gen.generateFromImage(item.formData)
      }
    },
    [gen],
  )

  const dispatchBackgroundText = useCallback(
    (item: QueueItem): EnqueueResult => {
      if (item.kind !== 'text' || !item.text || !item.settings) {
        return { queueId: item.id, startedImmediately: false }
      }
      if (dispatchedIdsRef.current.has(item.id)) {
        return { queueId: item.id, startedImmediately: false, duplicateOf: 'active' }
      }
      dispatchedIdsRef.current.add(item.id)
      const localRunId = runs.start({
        tool: item.tool,
        inputPreview: item.inputPreview,
        inputText: item.inputText ?? item.text,
        settings: item.settings,
      })
      const replacement = item.replaceTargets ?? null

      void (async () => {
        try {
          const started = await api.startTextToVideoRun(item.text!, item.settings!)
          runs.update(localRunId, {
            operationId: started.operation_id,
            etaSeconds: started.estimated_total_seconds,
          })
          let done = false
          while (!done) {
            await new Promise((resolve) => window.setTimeout(resolve, 1500))
            const detail = await api.getRun(started.run_id)
            const run = detail.run
            const status = String(run.status ?? '')
            if (status === 'queued' || status === 'running') {
          runs.update(localRunId, {
                stage: run.stage,
                message: run.message,
                progress: run.progress,
                etaSeconds: etaFromBackendRun(run),
                operationId: run.operation_id ?? started.operation_id,
              })
            }
            if (status === 'completed') {
              const outputs = run.outputs ?? {}
              runs.finish(localRunId, {
                status: 'success',
                htmlFilename: outputs.html_filename ?? outputs.html_file,
                screenshotFiles: outputs.screenshot_files,
                screenshotFolder: outputs.screenshot_folder,
                presentationFile: outputs.presentation_file ?? outputs.presentation_path,
                videoFile: outputs.video_file ?? outputs.video_path,
                operationId: run.operation_id ?? started.operation_id,
              })
              if (replacement) {
                await cleanupReplacementTargets(replacement)
                if (replacement.runId) runs.remove(replacement.runId)
              }
              done = true
            } else if (status === 'failed') {
              runs.finish(localRunId, {
                status: 'error',
                error: run.message ?? 'Process failed',
                operationId: run.operation_id ?? started.operation_id,
              })
              done = true
            } else if (status === 'cancelled') {
              runs.finish(localRunId, {
                status: 'cancelled',
                operationId: run.operation_id ?? started.operation_id,
              })
              done = true
            }
          }
        } catch (err) {
          runs.finish(localRunId, {
            status: 'error',
            error: err instanceof Error ? err.message : String(err),
          })
        }
      })()

      return { queueId: item.id, startedImmediately: true }
    },
    [runs],
  )

  const canRunInBackground = useCallback(
    (item: QueueItem): boolean =>
      appSettings.concurrentPipelineRuns &&
      item.kind === 'text' &&
      item.tool === 'text-to-video',
    [appSettings.concurrentPipelineRuns],
  )

  // Auto-dequeue: when the current run terminates, pop the next queue
  // item and kick it off. The short delay gives React a tick to render the
  // "success"/"error" state before we flip back to "running" for the next
  // item β€” otherwise the UI never flashes the completion state for a run
  // whose successor is queued behind it.
  //
  // Guards:
  //   1. `activeStartedRef` β€” we only act on terminal states that follow a
  //      run we actually dispatched. Without this, a fresh `pushOrStart`
  //      committed in the same tick as a previous run's terminal state
  //      can be clobbered here.
  //   2. Pause-on-rejection β€” when the run errored with a backend 409 we
  //      don't auto-fire the next item; doing so would cascade-fail every
  //      queued item with the same reason.
  //
  // Because `queue` now only contains pending items (the running one is
  // tracked separately via `activeQueueIdRef`), we no longer need to
  // filter the completed id out β€” `queue[0]` IS the next pending item.
  useEffect(() => {
    const s = gen.state
    if (s.status !== 'success' && s.status !== 'error' && s.status !== 'cancelled') {
      return
    }
    if (!activeStartedRef.current) return

    const rejected = s.status === 'error' && s.rejectedReason
    const rejectedReason = s.rejectedReason ?? null

    const timer = window.setTimeout(() => {
      activeStartedRef.current = false
      activeQueueIdRef.current = null
      activeFingerprintRef.current = null
      activeReplaceTargetsRef.current = null
      if (paused) return
      if (rejected) {
        setPaused(true)
        setPausedReason(rejectedReason ?? 'unknown')
        return
      }
      setQueue((prev) => {
        if (prev.length === 0) return prev
        const [next, ...rest] = prev
        activeQueueIdRef.current = next.id
        activeFingerprintRef.current = fingerprintItem(next)
        activeReplaceTargetsRef.current = next.replaceTargets ?? null
        window.setTimeout(() => dispatch(next), 0)
        return rest
      })
    }, 250)
    return () => window.clearTimeout(timer)
  }, [dispatch, gen.state, paused])

  // If a running backend job was restored from the process log, this hook can
  // be idle while the backend is still busy. Keep new submissions pending until
  // that tracked run finishes, then drain the queue.
  useEffect(() => {
    const idleNow =
      gen.state.status === 'idle' ||
      gen.state.status === 'success' ||
      gen.state.status === 'error' ||
      gen.state.status === 'cancelled'
    if (!idleNow || paused || activeStartedRef.current || activeQueueIdRef.current) return
    if (!appSettings.concurrentPipelineRuns && trackedRunInProgress) return

    setQueue((prev) => {
      if (prev.length === 0) return prev
      const [next, ...rest] = prev
      activeQueueIdRef.current = next.id
      activeFingerprintRef.current = fingerprintItem(next)
      activeReplaceTargetsRef.current = next.replaceTargets ?? null
      window.setTimeout(() => dispatch(next), 0)
      return rest
    })
  }, [
    appSettings.concurrentPipelineRuns,
    dispatch,
    gen.state.status,
    paused,
    queue.length,
    trackedRunInProgress,
  ])

  // When concurrency is enabled while a serial run is already active, promote
  // the existing pending text-to-video queue in FIFO order. New submissions
  // append behind these items and wait for this effect, so the last clicked
  // item cannot jump ahead of older queued work.
  useEffect(() => {
    if (!appSettings.concurrentPipelineRuns || paused || queue.length === 0) return
    setQueue((prev) => {
      const runnable: QueueItem[] = []
      let index = 0
      while (index < prev.length && canRunInBackground(prev[index])) {
        runnable.push(prev[index])
        index += 1
      }
      if (runnable.length === 0) return prev
      const rest = prev.slice(index)
      runnable.forEach((item, offset) => {
        window.setTimeout(() => dispatchBackgroundText(item), offset * 25)
      })
      return rest
    })
  }, [
    appSettings.concurrentPipelineRuns,
    canRunInBackground,
    dispatchBackgroundText,
    paused,
    queue.length,
  ])

  const pushOrStart = useCallback(
    (item: QueueItem): EnqueueResult => {
      // Any new manual submission resumes a paused queue β€” we assume the
      // user has waited out / handled whatever caused the previous 409.
      if (paused) {
        setPaused(false)
        setPausedReason(null)
      }

      // Client-side duplicate guard. Blocks the common "user resubmits the
      // same content while it's already running/queued" path so we don't
      // silently chain a second identical run the moment the first
      // completes. Match against both the active fingerprint and every
      // pending queue entry.
      const fp = fingerprintItem(item)
      if (activeFingerprintRef.current === fp && activeQueueIdRef.current) {
        toast.push({
          variant: 'info',
          title: 'Already running',
          message: 'This exact content is already being processed β€” opening its progress view.',
        })
        return {
          queueId: activeQueueIdRef.current,
          startedImmediately: false,
          duplicateOf: 'active',
        }
      }
      const existing = queue.find((q) => fingerprintItem(q) === fp)
      if (existing) {
        toast.push({
          variant: 'info',
          title: 'Already queued',
          message: 'This exact content is already queued β€” opening its entry in Processes.',
        })
        return {
          queueId: existing.id,
          startedImmediately: false,
          duplicateOf: 'queued',
        }
      }

      const idleNow =
        gen.state.status === 'idle' ||
        gen.state.status === 'success' ||
        gen.state.status === 'error' ||
        gen.state.status === 'cancelled'
      const backendSlotBusy = !appSettings.concurrentPipelineRuns && trackedRunInProgress
      if (canRunInBackground(item) && !idleNow) {
        if (queue.length === 0) {
          return dispatchBackgroundText(item)
        }
        setQueue((prev) => [...prev, item])
        return { queueId: item.id, startedImmediately: false }
      }
      if (idleNow && !activeQueueIdRef.current && !backendSlotBusy) {
        // Claim the active slot *synchronously* so a rapid second submission
        // in the same tick doesn't also see `activeQueueIdRef` as empty and
        // race to dispatch. The item is NOT appended to `queue` β€” queue is
        // for pending-only items now; the running one lives in the refs.
        activeQueueIdRef.current = item.id
        activeFingerprintRef.current = fp
        activeReplaceTargetsRef.current = item.replaceTargets ?? null
        window.setTimeout(() => dispatch(item), 0)
        return { queueId: item.id, startedImmediately: true }
      }
      setQueue((prev) => [...prev, item])
      return { queueId: item.id, startedImmediately: false }
    },
    [
      appSettings.concurrentPipelineRuns,
      canRunInBackground,
      dispatch,
      dispatchBackgroundText,
      gen.state.status,
      paused,
      queue,
      toast,
      trackedRunInProgress,
    ],
  )

  const enqueueText = useCallback(
    (
      tool: RunTool,
      text: string,
      settings: GenerateSettings,
      options?: { replaceTargets?: ReplacementTargets },
    ): EnqueueResult =>
      pushOrStart({
        id: nextQueueId(),
        tool,
        kind: 'text',
        inputPreview: text.slice(0, 200),
        inputText: text,
        queuedAt: Date.now(),
        text,
        settings,
        replaceTargets: options?.replaceTargets,
      }),
    [pushOrStart],
  )

  const enqueueHtml = useCallback(
    (
      tool: RunTool,
      html: string,
      settings: GenerateSettings,
      options?: { replaceTargets?: ReplacementTargets },
    ): EnqueueResult =>
      pushOrStart({
        id: nextQueueId(),
        tool,
        kind: 'html',
        inputPreview: html.slice(0, 200),
        inputText: html,
        queuedAt: Date.now(),
        html,
        settings,
        replaceTargets: options?.replaceTargets,
      }),
    [pushOrStart],
  )

  const enqueueImage = useCallback(
    (
      tool: RunTool,
      formData: FormData,
      meta: { files: File[]; settings?: GenerateSettings },
    ): EnqueueResult =>
      pushOrStart({
        id: nextQueueId(),
        tool,
        kind: 'image',
        inputPreview: meta.files.length ? meta.files.map((f) => f.name).join(', ') : '(image/pdf)',
        inputText: meta.files.length ? meta.files.map((f) => f.name).join('\n') : '(image/pdf)',
        queuedAt: Date.now(),
        formData,
        files: meta.files,
        settings: meta.settings,
      }),
    [pushOrStart],
  )

  const cancelQueued = useCallback((queueId: string) => {
    // `queue` only contains pending items now, so a plain filter is safe β€”
    // no risk of accidentally yanking the in-flight item out of under the
    // dispatcher.
    setQueue((prev) => prev.filter((q) => q.id !== queueId))
  }, [])

  const pauseQueue = useCallback(() => {
    setPaused(true)
    setPausedReason(null)
  }, [])

  const resumeQueue = useCallback(() => {
    setPaused(false)
    setPausedReason(null)
    // Pull the next queued item (if any) and fire it. We re-use the
    // dispatch path so all the pendingRef / runIdRef bookkeeping is
    // identical to a fresh submission.
    setQueue((prev) => {
      if (activeQueueIdRef.current) return prev
      if (gen.state.status === 'running') return prev
      if (!appSettings.concurrentPipelineRuns && trackedRunInProgress) return prev
      if (prev.length === 0) return prev
      const [next, ...rest] = prev
      activeQueueIdRef.current = next.id
      activeFingerprintRef.current = fingerprintItem(next)
      activeReplaceTargetsRef.current = next.replaceTargets ?? null
      window.setTimeout(() => dispatch(next), 0)
      return rest
    })
  }, [appSettings.concurrentPipelineRuns, dispatch, gen.state.status, trackedRunInProgress])

  const dismissQueueModeNotice = useCallback(() => {
    setQueueModeNotice(null)
  }, [])

  const reorderQueued = useCallback((queueId: string, targetQueueId: string) => {
    if (queueId === targetQueueId) return
    setQueue((prev) => {
      const from = prev.findIndex((q) => q.id === queueId)
      const to = prev.findIndex((q) => q.id === targetQueueId)
      if (from < 0 || to < 0) return prev
      const next = prev.slice()
      const [item] = next.splice(from, 1)
      next.splice(to, 0, item)
      return next
    })
  }, [])

  const updateQueued = useCallback(
    (queueId: string, patch: Partial<Pick<QueueItem, 'text' | 'html' | 'settings'>>) => {
      setQueue((prev) =>
        prev.map((item) => {
          if (item.id !== queueId) return item
          const text = patch.text ?? item.text
          const html = patch.html ?? item.html
          return {
            ...item,
            ...patch,
            inputText: item.kind === 'html' ? html ?? item.inputText : text ?? item.inputText,
            inputPreview:
              item.kind === 'html'
                ? (html ?? item.inputPreview).slice(0, 200)
                : (text ?? item.inputPreview).slice(0, 200),
          }
        }),
      )
    },
    [],
  )

  const value = useMemo<TrackedGenerationContextValue>(
    () => ({
      state: gen.state,
      queue,
      paused,
      pausedReason,
      queueModeNotice,
      dismissQueueModeNotice,
      cancel: gen.cancel,
      cancelQueued,
      pauseQueue,
      resumeQueue,
      reorderQueued,
      updateQueued,
      reset: gen.reset,
      enqueueText,
      enqueueHtml,
      enqueueImage,
    }),
    [
      gen.state,
      gen.cancel,
      gen.reset,
      queue,
      paused,
      pausedReason,
      queueModeNotice,
      dismissQueueModeNotice,
      cancelQueued,
      pauseQueue,
      resumeQueue,
      reorderQueued,
      updateQueued,
      enqueueText,
      enqueueHtml,
      enqueueImage,
    ],
  )

  return <Ctx.Provider value={value}>{children}</Ctx.Provider>
}

// eslint-disable-next-line react-refresh/only-export-components
export function useTrackedGenerate(tool: RunTool) {
  const ctx = useContext(Ctx)
  if (!ctx) {
    throw new Error('useTrackedGenerate must be used inside <TrackedGenerationProvider>')
  }
  return useMemo(
    () => ({
      state: ctx.state,
      queue: ctx.queue,
      cancel: ctx.cancel,
      cancelQueued: ctx.cancelQueued,
      pauseQueue: ctx.pauseQueue,
      resumeQueue: ctx.resumeQueue,
      reorderQueued: ctx.reorderQueued,
      updateQueued: ctx.updateQueued,
      reset: ctx.reset,
      // Back-compat aliases so existing wizards keep working verbatim: all
      // three are now enqueues and return void (ignored by old callers).
      generate: (
        text: string,
        settings: GenerateSettings,
        options?: { replaceTargets?: ReplacementTargets },
      ) => ctx.enqueueText(tool, text, settings, options),
      generateFromHtml: (
        html: string,
        settings: GenerateSettings,
        options?: { replaceTargets?: ReplacementTargets },
      ) => ctx.enqueueHtml(tool, html, settings, options),
      generateFromImage: (
        fd: FormData,
        meta?: { files?: File[]; settings?: GenerateSettings },
      ) => ctx.enqueueImage(tool, fd, { files: meta?.files ?? [], settings: meta?.settings }),
    }),
    [ctx, tool],
  )
}

// eslint-disable-next-line react-refresh/only-export-components
export function useGenerationQueue() {
  const ctx = useContext(Ctx)
  if (!ctx) {
    throw new Error('useGenerationQueue must be used inside <TrackedGenerationProvider>')
  }
  return {
    queue: ctx.queue,
    cancelQueued: ctx.cancelQueued,
    cancel: ctx.cancel,
    pauseQueue: ctx.pauseQueue,
    state: ctx.state,
    paused: ctx.paused,
      pausedReason: ctx.pausedReason,
      queueModeNotice: ctx.queueModeNotice,
      dismissQueueModeNotice: ctx.dismissQueueModeNotice,
      resumeQueue: ctx.resumeQueue,
    reorderQueued: ctx.reorderQueued,
    updateQueued: ctx.updateQueued,
    enqueueText: ctx.enqueueText,
    enqueueHtml: ctx.enqueueHtml,
  }
}