han1823123123 commited on
Commit
0dc1f49
·
verified ·
1 Parent(s): d1b08f0

Upload training_logs/phase57_partB_h8_bare_multidistance_t8__seed2.json with huggingface_hub

Browse files
training_logs/phase57_partB_h8_bare_multidistance_t8__seed2.json ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 131105.46875,
4
+ 49938.7734375,
5
+ 45657.55859375,
6
+ 43916.515625,
7
+ 42158.81640625,
8
+ 41354.92578125,
9
+ 40911.6015625,
10
+ 40583.2578125,
11
+ 40218.3125,
12
+ 39827.85546875,
13
+ 39444.05859375,
14
+ 39287.98828125,
15
+ 38879.515625,
16
+ 39048.8359375,
17
+ 39085.46484375,
18
+ 38834.828125,
19
+ 38768.65625,
20
+ 38675.30078125
21
+ ],
22
+ "l0": [
23
+ 500.0,
24
+ 494.115234375,
25
+ 494.99267578125,
26
+ 493.963134765625,
27
+ 493.06884765625,
28
+ 493.5966796875,
29
+ 493.33984375,
30
+ 494.2724609375,
31
+ 493.384521484375,
32
+ 493.865234375,
33
+ 490.434814453125,
34
+ 492.0595703125,
35
+ 490.78271484375,
36
+ 492.669189453125,
37
+ 493.01220703125,
38
+ 493.131591796875,
39
+ 492.715087890625,
40
+ 493.031005859375
41
+ ],
42
+ "steps_logged": [
43
+ 0,
44
+ 200,
45
+ 400,
46
+ 600,
47
+ 800,
48
+ 1000,
49
+ 1200,
50
+ 1400,
51
+ 1600,
52
+ 1800,
53
+ 2000,
54
+ 2200,
55
+ 2400,
56
+ 2600,
57
+ 2800,
58
+ 3000,
59
+ 3200,
60
+ 3400
61
+ ],
62
+ "final_step": 3400,
63
+ "converged": true,
64
+ "plateau_last": 0.01641546993155928,
65
+ "elapsed_s": 5500.773526668549,
66
+ "shifts": [
67
+ 1,
68
+ 2,
69
+ 4
70
+ ],
71
+ "matryoshka_h_size": 3686,
72
+ "alpha": null,
73
+ "row": 35,
74
+ "arch_id": "phase57_partB_h8_bare_multidistance_t8",
75
+ "arch": "phase57_partB_h8_bare_multidistance_t8",
76
+ "group": 4,
77
+ "src_class": "TXCBareMultiDistanceContrastiveAntidead",
78
+ "src_module": "src.architectures.txc_bare_multidistance_contrastive_antidead",
79
+ "T": 8,
80
+ "T_max": null,
81
+ "t_sample": null,
82
+ "n_layers": null,
83
+ "k_win": 500,
84
+ "k_pos": 62,
85
+ "gamma": null,
86
+ "n_scales": null,
87
+ "seed": 2,
88
+ "d_in": 2304,
89
+ "d_sae": 18432,
90
+ "subject_model": "google/gemma-2-2b",
91
+ "anchor_layer": 12,
92
+ "mlc_layers": [
93
+ 10,
94
+ 11,
95
+ 12,
96
+ 13,
97
+ 14
98
+ ],
99
+ "phase": "phase7_unification",
100
+ "run_id": "phase57_partB_h8_bare_multidistance_t8__seed2"
101
+ }