File size: 922 Bytes
5ebc41b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
{
  "stage": 3,
  "timestamp": "2026-02-07T02:11:38.871641",
  "config": {
    "data_dir": "/home/alexw/data_scenarios/train",
    "output_dir": "/home/alexw/checkpoints/lfm2-scenarios",
    "epochs_per_stage": 1,
    "batch_size": 4,
    "grad_accum": 8,
    "lr": 0.0002,
    "max_seq_length": 8192,
    "curriculum_stages": 5,
    "max_examples_per_stage": 50000,
    "max_context_frames": 200,
    "complexity_metric": "difficulty",
    "physics_loss_weight": 0.01,
    "resume": null,
    "wandb_project": "physics-llm",
    "wandb_offline": true,
    "lora_r": 32,
    "lora_alpha": 64,
    "model": "LiquidAI/LFM2-350M",
    "timestamp": "2026-02-05T15:22:28.882113"
  },
  "metrics": {
    "train_runtime": 24868.7124,
    "train_samples_per_second": 2.011,
    "train_steps_per_second": 0.063,
    "total_flos": 6.976691242146017e+17,
    "train_loss": 0.6747274479649422,
    "epoch": 1.0,
    "step": 1563
  }
}