| |
| set -e |
|
|
| echo "🚀 Starting Zenith fine-tuning (safe 2-hour config)..." |
|
|
| export BASE_MODEL="DeepSeek-Coder-V2-Lite-Instruct" |
| export OUTPUT_DIR="outputs/zenith-lora" |
| export DATA_PATH="data/zenith_combined.jsonl" |
| export STEPS=300 |
| export BATCH=2 |
| export GRAD_ACC=2 |
| export LR=5e-5 |
| export LORA_R=8 |
| export LORA_ALPHA=16 |
| export LORA_DROPOUT=0.1 |
| export WARMUP_RATIO=0.1 |
| export WEIGHT_DECAY=0.01 |
| export MAX_GRAD_NORM=1.0 |
| export LOG_STEPS=10 |
| export SAVE_STEPS=50 |
| export SAVE_LIMIT=2 |
| export EVAL_STEPS=50 |
| export EARLY_STOP_PATIENCE=3 |
| export SEED=42 |
| export MAX_SEQ_LEN=2048 |
| export USE_4BIT=1 |
| export REPORT_TO="none" |
|
|
| python3 train.py | tee outputs/train_log.txt |
|
|
| echo "✅ Training finished! Logs saved to outputs/train_log.txt" |
|
|