| MONTHS=(2407 2408 2409 2410 2411 2412 2501 2502 2503 2504 2505 2506) | |
| # tinyllama 1b 3 epoch | |
| #MAX_TOKENS=( | |
| # $((211812352*3)) | |
| # $((177209344*3)) | |
| # $((240123904*3)) | |
| # $((258998272*3)) | |
| # $((250609664*3)) | |
| # $((304087040*3)) | |
| # $((266338304*3)) | |
| # $((330301440*3)) | |
| # $((394264576*3)) | |
| # $((375390208*3)) | |
| # $((470810624*3)) | |
| # $((508559360*3)) | |
| #) | |
| # tinyllama 1b | |
| MAX_TOKENS=( | |
| 211812352 | |
| 177209344 | |
| 240123904 | |
| 258998272 | |
| 250609664 | |
| 304087040 | |
| 266338304 | |
| 330301440 | |
| 394264576 | |
| 375390208 | |
| 470810624 | |
| 508559360 | |
| ) | |
| # Qwen2-7B | |
| # MAX_TOKENS=( | |
| # 200278016 | |
| # 167772160 | |
| # 226492416 | |
| # 244318208 | |
| # 236978176 | |
| # 286261248 | |
| # 251658240 | |
| # 311427072 | |
| # 371195904 | |
| # 353370112 | |
| # 442499072 | |
| # 478150656 | |
| #) | |
| # Qwen2-7B | |
| # MAX_TOKENS=( | |
| # $((200278016*3)) | |
| # $((167772160*3)) | |
| # $((226492416*3)) | |
| # $((244318208*3)) | |
| # $((236978176*3)) | |
| # $((286261248*3)) | |
| # $((251658240*3)) | |
| # $((311427072*3)) | |
| # $((371195904*3)) | |
| # $((353370112*3)) | |
| # $((442499072*3)) | |
| # $((478150656*3)) | |
| #) | |
| # openllama 3b | |
| #MAX_TOKENS=( | |
| # 208666624 | |
| # 175112192 | |
| # 236978176 | |
| # 255852544 | |
| # 247463936 | |
| # 298844160 | |
| # 262144000 | |
| # 325058560 | |
| # 387973120 | |
| # 369098752 | |
| # 462422016 | |
| # 500170752 | |
| #) | |
| OUT_ROOT="./out/pretrain/tinyllama_lr_plus" | |
| #"./out/pretrain/qwen2_3_epoch" | |
| #"./out/pretrain/tinyllama_3_epoch" | |
| #"./out/pretrain/qwen2" | |
| #"./out/pretrain/openllama" | |
| LOG_ROOT="./out/pretrain/tinyllama_lr_plus/teelogs" | |
| #"./out/pretrain/qwen2_3_epoch/teelogs" | |
| #"./out/pretrain/tinyllama_3_epoch/teelogs" | |
| #"./out/pretrain/qwen2/teelogs" | |
| #"./out/pretrain/openllama/teelogs" | |
| DATA_ROOT="litgpt/data/arxiv_tinyllama_tokenized/" | |
| #"litgpt/data/arxiv_qwen2_tokenized/" | |
| # | |
| #"litgpt/data/arxiv_openllama_tokenized/" | |
| CKPT_0="./checkpoints/TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T/" | |
| #"./checkpoints/Qwen/Qwen2-7B/" | |
| #"./checkpoints/TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T/" | |
| #"./checkpoints/openlm-research/open_llama_3b/" | |
| CONFIG_FILE="config_hub/pretrain/tinyllama_cl.yaml" | |
| #"config_hub/pretrain/qwen2_7b_cl.yaml" | |
| #"config_hub/pretrain/tinyllama_cl.yaml" | |
| #"config_hub/pretrain/openllama_cl.yaml" | |
| export CUDA_VISIBLE_DEVICES="4,5,6,7" | |
| mkdir -p "$LOG_ROOT" | |
| START_IDX=0 | |
| for ((i=$START_IDX; i<${#MONTHS[@]}; i++)); do | |
| MONTH=${MONTHS[$i]} | |
| TOKENS=${MAX_TOKENS[$i]} | |
| if [ $i -eq 0 ]; then | |
| INIT_CKPT="$CKPT_0" | |
| else | |
| PREV_MONTH=${MONTHS[$((i-1))]} | |
| INIT_CKPT="${OUT_ROOT}/${PREV_MONTH}/final" | |
| fi | |
| OUT_DIR="${OUT_ROOT}/${MONTH}" | |
| LOG_FILE="${LOG_ROOT}/${MONTH}.txt" | |
| DATA_DIR="${DATA_ROOT}/${MONTH}/" | |
| mkdir -p "$OUT_DIR" | |
| echo "===== Start Training $MONTH =====" | |
| echo "init_ckpt: $INIT_CKPT" | |
| echo "out_dir: $OUT_DIR" | |
| echo "max_tokens: $TOKENS" | |
| litgpt pretrain \ | |
| --config $CONFIG_FILE \ | |
| --train.max_tokens $TOKENS \ | |
| --out_dir $OUT_DIR \ | |
| --initial_checkpoint_dir $INIT_CKPT \ | |
| --data_dir $DATA_DIR \ | |
| 2>&1 | tee "$LOG_FILE" | |
| echo "===== $MONTH Ended =====" | |
| sleep 60 | |
| done | |