lfm2-scenarios / stage2 /metadata.json
AlexWortega's picture
Upload all stages
5ebc41b verified
raw
history blame contribute delete
921 Bytes
{
"stage": 2,
"timestamp": "2026-02-06T19:13:54.849683",
"config": {
"data_dir": "/home/alexw/data_scenarios/train",
"output_dir": "/home/alexw/checkpoints/lfm2-scenarios",
"epochs_per_stage": 1,
"batch_size": 4,
"grad_accum": 8,
"lr": 0.0002,
"max_seq_length": 8192,
"curriculum_stages": 5,
"max_examples_per_stage": 50000,
"max_context_frames": 200,
"complexity_metric": "difficulty",
"physics_loss_weight": 0.01,
"resume": null,
"wandb_project": "physics-llm",
"wandb_offline": true,
"lora_r": 32,
"lora_alpha": 64,
"model": "LiquidAI/LFM2-350M",
"timestamp": "2026-02-05T15:22:28.882113"
},
"metrics": {
"train_runtime": 25665.868,
"train_samples_per_second": 1.948,
"train_steps_per_second": 0.061,
"total_flos": 6.962195086080061e+17,
"train_loss": 0.6443475265954446,
"epoch": 1.0,
"step": 1563
}
}