Upload training_logs/phase5b_subseq_h8__seed2.json with huggingface_hub
Browse files
training_logs/phase5b_subseq_h8__seed2.json
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"loss": [
|
| 3 |
+
122365.8203125,
|
| 4 |
+
50755.3203125,
|
| 5 |
+
48106.0390625,
|
| 6 |
+
46928.90625,
|
| 7 |
+
46147.08984375,
|
| 8 |
+
46012.6640625,
|
| 9 |
+
45494.9765625,
|
| 10 |
+
45529.53125,
|
| 11 |
+
44784.171875,
|
| 12 |
+
45222.734375,
|
| 13 |
+
44835.0703125,
|
| 14 |
+
44880.765625,
|
| 15 |
+
45198.2109375,
|
| 16 |
+
44935.16015625,
|
| 17 |
+
44609.23046875,
|
| 18 |
+
44905.33203125
|
| 19 |
+
],
|
| 20 |
+
"l0": [
|
| 21 |
+
500.0,
|
| 22 |
+
495.7255859375,
|
| 23 |
+
496.443115234375,
|
| 24 |
+
496.525390625,
|
| 25 |
+
495.62646484375,
|
| 26 |
+
496.406005859375,
|
| 27 |
+
495.75634765625,
|
| 28 |
+
496.06640625,
|
| 29 |
+
494.609375,
|
| 30 |
+
495.182373046875,
|
| 31 |
+
495.240234375,
|
| 32 |
+
495.90625,
|
| 33 |
+
495.67578125,
|
| 34 |
+
495.1162109375,
|
| 35 |
+
494.698486328125,
|
| 36 |
+
495.119384765625
|
| 37 |
+
],
|
| 38 |
+
"steps_logged": [
|
| 39 |
+
0,
|
| 40 |
+
200,
|
| 41 |
+
400,
|
| 42 |
+
600,
|
| 43 |
+
800,
|
| 44 |
+
1000,
|
| 45 |
+
1200,
|
| 46 |
+
1400,
|
| 47 |
+
1600,
|
| 48 |
+
1800,
|
| 49 |
+
2000,
|
| 50 |
+
2200,
|
| 51 |
+
2400,
|
| 52 |
+
2600,
|
| 53 |
+
2800,
|
| 54 |
+
3000
|
| 55 |
+
],
|
| 56 |
+
"final_step": 3000,
|
| 57 |
+
"converged": true,
|
| 58 |
+
"plateau_last": 0.005922902461389111,
|
| 59 |
+
"elapsed_s": 5840.123419761658,
|
| 60 |
+
"T_max": 10,
|
| 61 |
+
"t_sample": 5,
|
| 62 |
+
"shifts": [
|
| 63 |
+
1,
|
| 64 |
+
2,
|
| 65 |
+
5
|
| 66 |
+
],
|
| 67 |
+
"matryoshka_h_size": 3686,
|
| 68 |
+
"row": 13,
|
| 69 |
+
"arch_id": "phase5b_subseq_h8",
|
| 70 |
+
"arch": "phase5b_subseq_h8",
|
| 71 |
+
"group": 2,
|
| 72 |
+
"src_class": "SubseqH8",
|
| 73 |
+
"src_module": "src.architectures.phase5b_subseq_sampling_txcdr",
|
| 74 |
+
"T": null,
|
| 75 |
+
"n_layers": null,
|
| 76 |
+
"k_win": 500,
|
| 77 |
+
"k_pos": 100,
|
| 78 |
+
"alpha": null,
|
| 79 |
+
"gamma": null,
|
| 80 |
+
"n_scales": null,
|
| 81 |
+
"seed": 2,
|
| 82 |
+
"d_in": 2304,
|
| 83 |
+
"d_sae": 18432,
|
| 84 |
+
"subject_model": "google/gemma-2-2b",
|
| 85 |
+
"anchor_layer": 12,
|
| 86 |
+
"mlc_layers": [
|
| 87 |
+
10,
|
| 88 |
+
11,
|
| 89 |
+
12,
|
| 90 |
+
13,
|
| 91 |
+
14
|
| 92 |
+
],
|
| 93 |
+
"phase": "phase7_unification",
|
| 94 |
+
"run_id": "phase5b_subseq_h8__seed2"
|
| 95 |
+
}
|