shank commited on
Commit
2005cd2
Β·
1 Parent(s): d0d5f60

fix: remove wandb - click conflict with gradio causes resolution-too-deep

Browse files
Files changed (2) hide show
  1. requirements.txt +5 -3
  2. training/train_grpo.py +8 -1
requirements.txt CHANGED
@@ -1,9 +1,11 @@
1
  # ── Training dependencies ──────────────────────────────────────────────────────
2
  # Fully pinned to a pre-validated compatible set.
3
- # DO NOT add loose deps here β€” they cause pip backtracking (resolution-too-deep).
4
- # gradio is intentionally omitted: HF Spaces injects gradio[oauth,mcp]==6.13.0 itself.
 
 
 
5
 
6
- wandb==0.18.7
7
  datasets==3.0.2
8
  transformers==4.46.3
9
  accelerate==1.0.1
 
1
  # ── Training dependencies ──────────────────────────────────────────────────────
2
  # Fully pinned to a pre-validated compatible set.
3
+ # NOTES:
4
+ # - gradio is injected by HF Spaces automatically β€” do NOT add it here
5
+ # - wandb is excluded: it conflicts with gradio over click versioning
6
+ # (wandb>=0.18 requires click!=8.0.0,>=7.1 but gradio requires click>=8.1)
7
+ # wandb is initialized at runtime by the training script if available
8
 
 
9
  datasets==3.0.2
10
  transformers==4.46.3
11
  accelerate==1.0.1
training/train_grpo.py CHANGED
@@ -49,8 +49,15 @@ if os.environ.get("FORCE_BOOTSTRAP_DEPS") == "1":
49
 
50
  # ── GPU/training imports (skipped in --test-local mode) ───────────────────────
51
  if not args.test_local:
 
 
 
 
 
 
 
 
52
  import torch
53
- import wandb
54
  from datasets import Dataset
55
  from transformers import (
56
  AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TrainerCallback
 
49
 
50
  # ── GPU/training imports (skipped in --test-local mode) ───────────────────────
51
  if not args.test_local:
52
+ # wandb is not in requirements.txt (conflicts with gradio over click versioning)
53
+ # Install it at runtime before importing
54
+ try:
55
+ import wandb
56
+ except ImportError:
57
+ os.system(f"{sys.executable} -m pip install -q 'wandb>=0.18.0'")
58
+ import wandb
59
+
60
  import torch
 
61
  from datasets import Dataset
62
  from transformers import (
63
  AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TrainerCallback