lfm2-scenarios / stage0 /metadata.json
AlexWortega's picture
Upload all stages
5ebc41b verified
raw
history blame contribute delete
921 Bytes
{
"stage": 0,
"timestamp": "2026-02-06T05:09:31.670235",
"config": {
"data_dir": "/home/alexw/data_scenarios/train",
"output_dir": "/home/alexw/checkpoints/lfm2-scenarios",
"epochs_per_stage": 1,
"batch_size": 4,
"grad_accum": 8,
"lr": 0.0002,
"max_seq_length": 8192,
"curriculum_stages": 5,
"max_examples_per_stage": 50000,
"max_context_frames": 200,
"complexity_metric": "difficulty",
"physics_loss_weight": 0.01,
"resume": null,
"wandb_project": "physics-llm",
"wandb_offline": true,
"lora_r": 32,
"lora_alpha": 64,
"model": "LiquidAI/LFM2-350M",
"timestamp": "2026-02-05T15:22:28.882113"
},
"metrics": {
"train_runtime": 49432.3609,
"train_samples_per_second": 1.011,
"train_steps_per_second": 0.032,
"total_flos": 5.73779584645996e+17,
"train_loss": 0.5622381947395974,
"epoch": 1.0,
"step": 1563
}
}