Upload training_logs/it_phase5b_subseq_h8__seed42.json with huggingface_hub
Browse files
training_logs/it_phase5b_subseq_h8__seed42.json
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"loss": [
|
| 3 |
+
176646.40625,
|
| 4 |
+
83437.6640625,
|
| 5 |
+
78705.734375,
|
| 6 |
+
76847.0390625,
|
| 7 |
+
76004.984375,
|
| 8 |
+
75623.4609375,
|
| 9 |
+
74648.8046875,
|
| 10 |
+
74630.4609375,
|
| 11 |
+
74368.6875,
|
| 12 |
+
74384.96875,
|
| 13 |
+
73978.6484375,
|
| 14 |
+
73781.1484375,
|
| 15 |
+
73685.5234375,
|
| 16 |
+
73644.7578125,
|
| 17 |
+
73773.625,
|
| 18 |
+
73894.203125
|
| 19 |
+
],
|
| 20 |
+
"l0": [
|
| 21 |
+
500.0,
|
| 22 |
+
500.0,
|
| 23 |
+
500.0,
|
| 24 |
+
500.0,
|
| 25 |
+
500.0,
|
| 26 |
+
500.0,
|
| 27 |
+
500.0,
|
| 28 |
+
500.0,
|
| 29 |
+
500.0,
|
| 30 |
+
500.0,
|
| 31 |
+
500.0,
|
| 32 |
+
500.0,
|
| 33 |
+
500.0,
|
| 34 |
+
500.0,
|
| 35 |
+
500.0,
|
| 36 |
+
500.0
|
| 37 |
+
],
|
| 38 |
+
"steps_logged": [
|
| 39 |
+
0,
|
| 40 |
+
200,
|
| 41 |
+
400,
|
| 42 |
+
600,
|
| 43 |
+
800,
|
| 44 |
+
1000,
|
| 45 |
+
1200,
|
| 46 |
+
1400,
|
| 47 |
+
1600,
|
| 48 |
+
1800,
|
| 49 |
+
2000,
|
| 50 |
+
2200,
|
| 51 |
+
2400,
|
| 52 |
+
2600,
|
| 53 |
+
2800,
|
| 54 |
+
3000
|
| 55 |
+
],
|
| 56 |
+
"final_step": 3000,
|
| 57 |
+
"converged": true,
|
| 58 |
+
"plateau_last": 0.008688741850918075,
|
| 59 |
+
"elapsed_s": 13961.203199386597,
|
| 60 |
+
"T_max": 10,
|
| 61 |
+
"t_sample": 5,
|
| 62 |
+
"shifts": [
|
| 63 |
+
1,
|
| 64 |
+
2,
|
| 65 |
+
5
|
| 66 |
+
],
|
| 67 |
+
"matryoshka_h_size": 3686,
|
| 68 |
+
"row": 13,
|
| 69 |
+
"arch_id": "phase5b_subseq_h8",
|
| 70 |
+
"arch": "phase5b_subseq_h8",
|
| 71 |
+
"group": 2,
|
| 72 |
+
"src_class": "SubseqH8",
|
| 73 |
+
"src_module": "src.architectures.phase5b_subseq_sampling_txcdr",
|
| 74 |
+
"T": null,
|
| 75 |
+
"n_layers": null,
|
| 76 |
+
"k_win": 500,
|
| 77 |
+
"k_pos": 100,
|
| 78 |
+
"alpha": null,
|
| 79 |
+
"gamma": null,
|
| 80 |
+
"n_scales": null,
|
| 81 |
+
"seed": 42,
|
| 82 |
+
"d_in": 2304,
|
| 83 |
+
"d_sae": 18432,
|
| 84 |
+
"subject_model": "google/gemma-2-2b-it",
|
| 85 |
+
"anchor_layer": 13,
|
| 86 |
+
"mlc_layers": [
|
| 87 |
+
11,
|
| 88 |
+
12,
|
| 89 |
+
13,
|
| 90 |
+
14,
|
| 91 |
+
15
|
| 92 |
+
],
|
| 93 |
+
"phase": "phase7_unification",
|
| 94 |
+
"run_id": "it_phase5b_subseq_h8__seed42"
|
| 95 |
+
}
|