shank commited on
Commit Β·
2005cd2
1
Parent(s): d0d5f60
fix: remove wandb - click conflict with gradio causes resolution-too-deep
Browse files- requirements.txt +5 -3
- training/train_grpo.py +8 -1
requirements.txt
CHANGED
|
@@ -1,9 +1,11 @@
|
|
| 1 |
# ββ Training dependencies ββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
# Fully pinned to a pre-validated compatible set.
|
| 3 |
-
#
|
| 4 |
-
# gradio is
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
wandb==0.18.7
|
| 7 |
datasets==3.0.2
|
| 8 |
transformers==4.46.3
|
| 9 |
accelerate==1.0.1
|
|
|
|
| 1 |
# ββ Training dependencies ββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
# Fully pinned to a pre-validated compatible set.
|
| 3 |
+
# NOTES:
|
| 4 |
+
# - gradio is injected by HF Spaces automatically β do NOT add it here
|
| 5 |
+
# - wandb is excluded: it conflicts with gradio over click versioning
|
| 6 |
+
# (wandb>=0.18 requires click!=8.0.0,>=7.1 but gradio requires click>=8.1)
|
| 7 |
+
# wandb is initialized at runtime by the training script if available
|
| 8 |
|
|
|
|
| 9 |
datasets==3.0.2
|
| 10 |
transformers==4.46.3
|
| 11 |
accelerate==1.0.1
|
training/train_grpo.py
CHANGED
|
@@ -49,8 +49,15 @@ if os.environ.get("FORCE_BOOTSTRAP_DEPS") == "1":
|
|
| 49 |
|
| 50 |
# ββ GPU/training imports (skipped in --test-local mode) βββββββββββββββββββββββ
|
| 51 |
if not args.test_local:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
import torch
|
| 53 |
-
import wandb
|
| 54 |
from datasets import Dataset
|
| 55 |
from transformers import (
|
| 56 |
AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TrainerCallback
|
|
|
|
| 49 |
|
| 50 |
# ββ GPU/training imports (skipped in --test-local mode) βββββββββββββββββββββββ
|
| 51 |
if not args.test_local:
|
| 52 |
+
# wandb is not in requirements.txt (conflicts with gradio over click versioning)
|
| 53 |
+
# Install it at runtime before importing
|
| 54 |
+
try:
|
| 55 |
+
import wandb
|
| 56 |
+
except ImportError:
|
| 57 |
+
os.system(f"{sys.executable} -m pip install -q 'wandb>=0.18.0'")
|
| 58 |
+
import wandb
|
| 59 |
+
|
| 60 |
import torch
|
|
|
|
| 61 |
from datasets import Dataset
|
| 62 |
from transformers import (
|
| 63 |
AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TrainerCallback
|