LucasLooTan Claude Opus 4.7 (1M context) commited on
Commit
9ad7573
Β·
1 Parent(s): 62443b6

fix: webcam stream actually fires + JS auto-arms record toggle

Browse files

Root cause: gradio 4.44.1's gr.Image(streaming=True) only dispatches
.stream() events while the user is actively "recording" (Webcam.svelte
gates take_picture on `recording=true`). We had hidden the record
toggle via CSS, so the stream never started and _stash_frame never ran.

Fixes:
- Inject JS that polls for the (hidden) record button and clicks it
once per mount, flipping recording=true so frames flow.
- .stream() now wires to a hidden gr.Number sink β€” outputs=[] silently
disables the handler in 4.44.1.
- landmark_classifier: retry on transient HF download failure instead
of caching the failure forever, log full exception text + token state,
and fall back to anonymous fetch when the token call 404s.
- app.py: configure root logging so signbridge.* messages reach stdout.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

app.py CHANGED
@@ -7,11 +7,17 @@ Gradio interface it builds. Keep this file thin β€” real UI lives in
7
 
8
  from __future__ import annotations
9
 
 
10
  import os
11
 
12
  from dotenv import load_dotenv
13
 
14
- from signbridge.space import build_demo
 
 
 
 
 
15
 
16
 
17
  def main() -> None:
 
7
 
8
  from __future__ import annotations
9
 
10
+ import logging
11
  import os
12
 
13
  from dotenv import load_dotenv
14
 
15
+ logging.basicConfig(
16
+ level=logging.INFO,
17
+ format="%(asctime)s %(levelname)s %(name)s: %(message)s",
18
+ )
19
+
20
+ from signbridge.space import build_demo # noqa: E402
21
 
22
 
23
  def main() -> None:
signbridge/recognizer/landmark_classifier.py CHANGED
@@ -44,12 +44,35 @@ def _resolve_weight(local_override: str | None, filename: str) -> Path | None:
44
  except ImportError:
45
  logger.warning("huggingface_hub missing; cannot fetch %s.", filename)
46
  return None
47
- try:
48
- local = hf_hub_download(repo_id=_HF_REPO, filename=filename, repo_type="model")
49
- return Path(local)
50
- except Exception as exc: # noqa: BLE001 β€” HF Hub can fail for many reasons
51
- logger.warning("hf_hub_download(%s) failed: %s", filename, type(exc).__name__)
52
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  _lock = threading.Lock()
55
  _state: dict[str, object] = {"loaded": False, "landmarker": None, "mlp": None, "classes": None}
@@ -66,7 +89,11 @@ def _normalize_landmarks(coords3: np.ndarray) -> np.ndarray:
66
 
67
 
68
  def _ensure_loaded() -> bool:
69
- """Lazy-load MediaPipe + MLP. Returns True if both ready."""
 
 
 
 
70
  if _state["loaded"]:
71
  return _state["landmarker"] is not None and _state["mlp"] is not None
72
  with _lock:
@@ -75,13 +102,11 @@ def _ensure_loaded() -> bool:
75
 
76
  mlp_path = _resolve_weight(_MLP_LOCAL_OVERRIDE, _MLP_FILENAME)
77
  if mlp_path is None:
78
- logger.info("MLP weights unavailable; landmark classifier disabled.")
79
- _state["loaded"] = True
80
  return False
81
  hand_path = _resolve_weight(_HAND_LOCAL_OVERRIDE, _HAND_FILENAME)
82
  if hand_path is None:
83
- logger.info("hand_landmarker.task unavailable; landmark classifier disabled.")
84
- _state["loaded"] = True
85
  return False
86
 
87
  try:
@@ -91,7 +116,7 @@ def _ensure_loaded() -> bool:
91
  import torch.nn as nn # type: ignore[import-not-found]
92
  except ImportError as exc:
93
  logger.warning("landmark classifier deps missing (%s); disabled.", exc)
94
- _state["loaded"] = True
95
  return False
96
 
97
  opts = vision.HandLandmarkerOptions(
 
44
  except ImportError:
45
  logger.warning("huggingface_hub missing; cannot fetch %s.", filename)
46
  return None
47
+ token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_HUB_TOKEN") or None
48
+ logger.info(
49
+ "hf_hub_download(%s) attempt: repo=%s token_len=%d",
50
+ filename, _HF_REPO, len(token) if token else 0,
51
+ )
52
+ # First attempt: with explicit token (if set). If that fails with
53
+ # auth-flavoured RepositoryNotFoundError, retry anonymously β€” public
54
+ # repos work without auth, and a stale/invalid token can poison even
55
+ # public reads.
56
+ last_exc: Exception | None = None
57
+ for attempt_token in (token, None):
58
+ try:
59
+ local = hf_hub_download(
60
+ repo_id=_HF_REPO,
61
+ filename=filename,
62
+ repo_type="model",
63
+ token=attempt_token,
64
+ )
65
+ logger.info("hf_hub_download(%s) ok via %s", filename, "token" if attempt_token else "anonymous")
66
+ return Path(local)
67
+ except Exception as exc: # noqa: BLE001 β€” many failure modes
68
+ last_exc = exc
69
+ logger.warning(
70
+ "hf_hub_download(%s) failed (token=%s): %s β€” %s",
71
+ filename, "yes" if attempt_token else "no", type(exc).__name__, str(exc)[:300],
72
+ )
73
+ if attempt_token is None:
74
+ break # already tried anonymously
75
+ return None
76
 
77
  _lock = threading.Lock()
78
  _state: dict[str, object] = {"loaded": False, "landmarker": None, "mlp": None, "classes": None}
 
89
 
90
 
91
  def _ensure_loaded() -> bool:
92
+ """Lazy-load MediaPipe + MLP. Returns True if both ready.
93
+
94
+ Transient failures (HF Hub blip, momentary network) are NOT cached
95
+ so the next call retries. Only deps-missing (ImportError) is fatal
96
+ and cached, since it can't fix itself at runtime."""
97
  if _state["loaded"]:
98
  return _state["landmarker"] is not None and _state["mlp"] is not None
99
  with _lock:
 
102
 
103
  mlp_path = _resolve_weight(_MLP_LOCAL_OVERRIDE, _MLP_FILENAME)
104
  if mlp_path is None:
105
+ logger.warning("MLP weights download failed; will retry on next call.")
 
106
  return False
107
  hand_path = _resolve_weight(_HAND_LOCAL_OVERRIDE, _HAND_FILENAME)
108
  if hand_path is None:
109
+ logger.warning("hand_landmarker.task download failed; will retry on next call.")
 
110
  return False
111
 
112
  try:
 
116
  import torch.nn as nn # type: ignore[import-not-found]
117
  except ImportError as exc:
118
  logger.warning("landmark classifier deps missing (%s); disabled.", exc)
119
+ _state["loaded"] = True # cache: deps won't appear at runtime
120
  return False
121
 
122
  opts = vision.HandLandmarkerOptions(
signbridge/space.py CHANGED
@@ -82,11 +82,8 @@ class _SessionState:
82
  last_audio_path: str | None = None
83
 
84
 
85
- # Single-user demo: one global latest-frame variable instead of a
86
- # session-keyed dict. The previous session-keyed approach failed because
87
- # adding `gr.Request` to a stream-handler signature appears to silently
88
- # kill the handler in gradio 4.44.1 (TypeError swallowed by the queue
89
- # worker). No request injection here = no failure surface.
90
  _latest_frame: np.ndarray | None = None
91
  _frame_lock = threading.Lock()
92
  _stash_count = 0
@@ -96,6 +93,26 @@ def _new_session() -> _SessionState:
96
  return _SessionState()
97
 
98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
  def _format_history(signs: list[str]) -> str:
100
  if not signs:
101
  return "_(no signs captured yet β€” try signing the letter A and pressing Capture)_"
@@ -146,44 +163,26 @@ def _shared_extractor() -> LandmarkExtractor:
146
  return _extractor_singleton
147
 
148
 
149
- def _stash_frame(frame: np.ndarray | None) -> None:
150
- """Webcam .change() callback β€” writes every live frame to the global
151
- `_latest_frame`. Bare signature (no gr.Request, no extra params) so
152
- gradio's signature inspection can't fail."""
153
- global _latest_frame, _stash_count
154
- if frame is None:
155
- return
156
- with _frame_lock:
157
- _latest_frame = frame
158
- _stash_count += 1
159
- # Log every ~30 frames (~1/sec at 30 fps webcam) so HF run logs
160
- # confirm the handler is actually firing.
161
- if _stash_count == 1 or _stash_count % 30 == 0:
162
- logger.info(
163
- "_stash_frame fired %d times; last shape=%s dtype=%s",
164
- _stash_count, frame.shape, frame.dtype,
165
- )
166
-
167
-
168
  def _capture_sign(state: _SessionState) -> tuple[str, str, _SessionState]:
169
- """Take-image button handler. Reads the latest live frame from the
170
- global cache, runs recognition, appends to history."""
171
  with _frame_lock:
172
  frame = _latest_frame
173
-
174
- logger.info(
175
- "_capture_sign: frame_present=%s, stash_count=%d",
176
- frame is not None, _stash_count,
177
  )
178
 
179
  if frame is None:
180
  return (
181
- "_no frame yet β€” make sure the camera preview is live and try again_",
182
  _format_history(state.sign_history),
183
  state,
184
  )
185
 
186
  token, confidence = _recognize(frame)
 
 
187
  if not token or confidence < 0.5:
188
  return (
189
  "_couldn't recognise that one β€” try centering the gesture and a plain background_",
@@ -265,12 +264,52 @@ _WEBCAM_BUTTON_LABEL_CSS = """
265
  font-size: 13px;
266
  color: #1e1b4b;
267
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268
  """
269
 
270
 
271
  def build_demo() -> gr.Blocks:
272
  with gr.Blocks(
273
- title="SignBridge", theme=gr.themes.Soft(), css=_WEBCAM_BUTTON_LABEL_CSS
 
 
 
274
  ) as demo:
275
  gr.Markdown(
276
  "# 🀟 SignBridge β€” real-time ASL β†’ English speech\n"
@@ -294,25 +333,27 @@ def build_demo() -> gr.Blocks:
294
  gr.HTML(
295
  '<div class="signbridge-webcam-help">'
296
  '<b>How it works:</b> '
297
- '<b>1.</b> click the webcam once to grant access Β· '
298
  '<b>2.</b> sign a letter (A–Z) Β· '
299
  '<b>3.</b> click <b>πŸ“Έ Take image</b> β€” recognition is automatic Β· '
300
  '<b>4.</b> repeat for the next letter, then press <b>πŸ”Š Speak</b>.'
301
  "</div>"
302
  )
 
 
 
 
 
 
 
 
303
  webcam = gr.Image(
304
  sources=["webcam"],
305
- # streaming=True keeps the live preview running
306
- # after the one-time permission grant, so the
307
- # user never sees the access-prompt screen
308
- # again. Frames are stashed in session state via
309
- # the .stream() handler, and the Take-image
310
- # button reads from there.
311
  streaming=True,
312
  label="Sign here",
313
  height=420,
314
  type="numpy",
315
- elem_classes=["signbridge-webcam"],
316
  )
317
  with gr.Row():
318
  capture_btn = gr.Button(
@@ -341,13 +382,13 @@ def build_demo() -> gr.Blocks:
341
  "Spell out a word letter-by-letter, then press Speak."
342
  )
343
 
344
- # Use .change() (not .stream()) β€” it fires on every value
345
- # change which, for a streaming webcam, is every frame.
346
- # outputs=[] is required (None doesn't wire the event).
347
- webcam.change(
348
  fn=_stash_frame,
349
  inputs=[webcam],
350
- outputs=[],
351
  show_progress="hidden",
352
  )
353
  capture_btn.click(
 
82
  last_audio_path: str | None = None
83
 
84
 
85
+ # Single-user demo: one global latest-frame variable populated by the
86
+ # .stream() handler. The Take-image button reads from here.
 
 
 
87
  _latest_frame: np.ndarray | None = None
88
  _frame_lock = threading.Lock()
89
  _stash_count = 0
 
93
  return _SessionState()
94
 
95
 
96
+ def _stash_frame(frame: np.ndarray | None) -> int:
97
+ """Webcam .stream() callback. Fires every ~500ms (gradio's internal
98
+ setInterval in Webcam.svelte) once `recording=true`. Writes the
99
+ latest live frame to the global cache. Returns _stash_count so we
100
+ can wire a real (hidden) output β€” empty outputs=[] silently
101
+ disables the handler in gradio 4.44.1."""
102
+ global _latest_frame, _stash_count
103
+ if frame is None:
104
+ return _stash_count
105
+ with _frame_lock:
106
+ _latest_frame = frame
107
+ _stash_count += 1
108
+ if _stash_count == 1 or _stash_count % 30 == 0:
109
+ print(
110
+ f"[stash] fired #{_stash_count} shape={frame.shape}",
111
+ flush=True,
112
+ )
113
+ return _stash_count
114
+
115
+
116
  def _format_history(signs: list[str]) -> str:
117
  if not signs:
118
  return "_(no signs captured yet β€” try signing the letter A and pressing Capture)_"
 
163
  return _extractor_singleton
164
 
165
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  def _capture_sign(state: _SessionState) -> tuple[str, str, _SessionState]:
167
+ """Take-image button handler. Reads the latest streamed frame from
168
+ the global cache, runs recognition, appends to history."""
169
  with _frame_lock:
170
  frame = _latest_frame
171
+ print(
172
+ f"[capture] stash_count={_stash_count} frame_present={frame is not None}",
173
+ flush=True,
 
174
  )
175
 
176
  if frame is None:
177
  return (
178
+ "_no frame yet β€” wait a moment for the camera to start streaming, then try again_",
179
  _format_history(state.sign_history),
180
  state,
181
  )
182
 
183
  token, confidence = _recognize(frame)
184
+ print(f"[capture] recognised token={token!r} conf={confidence:.2f}", flush=True)
185
+
186
  if not token or confidence < 0.5:
187
  return (
188
  "_couldn't recognise that one β€” try centering the gesture and a plain background_",
 
264
  font-size: 13px;
265
  color: #1e1b4b;
266
  }
267
+ /* Snapshot tab uses streaming + a custom Take-image button. We hide
268
+ gradio's built-in controls so the user only sees the live preview
269
+ and our button. A small JS snippet auto-clicks the (hidden) record
270
+ toggle once after permission is granted, which makes Webcam.svelte
271
+ start dispatching the .stream() event every 500ms. The
272
+ "Click to Access Webcam" placeholder is a separate DOM node and
273
+ stays visible β€” browsers require a user gesture for getUserMedia(). */
274
+ .signbridge-webcam-snapshot .source-selection,
275
+ .signbridge-webcam-snapshot .controls,
276
+ .signbridge-webcam-snapshot .button-wrap {
277
+ display: none !important;
278
+ }
279
+ """
280
+
281
+
282
+ # JS injected at app load. Runs in the browser. Polls for gradio's
283
+ # hidden record button inside our snapshot webcam and clicks it once
284
+ # per mount, which flips Webcam.svelte's `recording=true` and starts
285
+ # the .stream() frame loop. Without this, .stream() never fires β€”
286
+ # gradio gates frame dispatch on the record toggle.
287
+ _AUTO_ARM_STREAM_JS = """
288
+ () => {
289
+ const SELECTOR = '.signbridge-webcam-snapshot .button-wrap > button';
290
+ const tick = () => {
291
+ document.querySelectorAll(SELECTOR).forEach((btn) => {
292
+ if (btn.dataset.signbridgeArmed) return;
293
+ // Only arm a freshly-mounted (not-yet-recording) button.
294
+ const titleDiv = btn.querySelector('div[title]');
295
+ if (titleDiv && titleDiv.title === 'start recording') {
296
+ btn.click();
297
+ btn.dataset.signbridgeArmed = '1';
298
+ console.log('[signbridge] auto-armed webcam stream');
299
+ }
300
+ });
301
+ };
302
+ setInterval(tick, 500);
303
+ }
304
  """
305
 
306
 
307
  def build_demo() -> gr.Blocks:
308
  with gr.Blocks(
309
+ title="SignBridge",
310
+ theme=gr.themes.Soft(),
311
+ css=_WEBCAM_BUTTON_LABEL_CSS,
312
+ js=_AUTO_ARM_STREAM_JS,
313
  ) as demo:
314
  gr.Markdown(
315
  "# 🀟 SignBridge β€” real-time ASL β†’ English speech\n"
 
333
  gr.HTML(
334
  '<div class="signbridge-webcam-help">'
335
  '<b>How it works:</b> '
336
+ '<b>1.</b> click the preview once to grant camera access Β· '
337
  '<b>2.</b> sign a letter (A–Z) Β· '
338
  '<b>3.</b> click <b>πŸ“Έ Take image</b> β€” recognition is automatic Β· '
339
  '<b>4.</b> repeat for the next letter, then press <b>πŸ”Š Speak</b>.'
340
  "</div>"
341
  )
342
+ # streaming=True keeps the live preview running
343
+ # continuously. _AUTO_ARM_STREAM_JS clicks the
344
+ # hidden record button after permission grant
345
+ # so Webcam.svelte starts dispatching frames
346
+ # via the .stream() event (gated on
347
+ # `recording=true`). We hide the record/stop
348
+ # controls via CSS so the user only sees a
349
+ # clean preview + our Take-image button.
350
  webcam = gr.Image(
351
  sources=["webcam"],
 
 
 
 
 
 
352
  streaming=True,
353
  label="Sign here",
354
  height=420,
355
  type="numpy",
356
+ elem_classes=["signbridge-webcam", "signbridge-webcam-snapshot"],
357
  )
358
  with gr.Row():
359
  capture_btn = gr.Button(
 
382
  "Spell out a word letter-by-letter, then press Speak."
383
  )
384
 
385
+ # Hidden Number sink for the .stream() handler β€” empty
386
+ # outputs=[] silently disables it in gradio 4.44.1.
387
+ _stash_sink = gr.Number(value=0, visible=False)
388
+ webcam.stream(
389
  fn=_stash_frame,
390
  inputs=[webcam],
391
+ outputs=[_stash_sink],
392
  show_progress="hidden",
393
  )
394
  capture_btn.click(