han1823123123 commited on
Commit
e9cd08d
·
verified ·
1 Parent(s): 329acdb

Upload training_logs/phase57_partB_h8_bare_multidistance_t6__seed42.json with huggingface_hub

Browse files
training_logs/phase57_partB_h8_bare_multidistance_t6__seed42.json ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 98607.6171875,
4
+ 35463.8125,
5
+ 32273.236328125,
6
+ 30310.177734375,
7
+ 29334.013671875,
8
+ 28720.517578125,
9
+ 28474.7109375,
10
+ 28120.681640625,
11
+ 27622.5,
12
+ 27440.517578125,
13
+ 27096.298828125,
14
+ 27021.529296875,
15
+ 26942.298828125,
16
+ 26724.263671875,
17
+ 26878.90234375,
18
+ 26633.091796875,
19
+ 26506.583984375,
20
+ 26274.544921875,
21
+ 26507.201171875
22
+ ],
23
+ "l0": [
24
+ 500.0,
25
+ 496.08984375,
26
+ 494.271728515625,
27
+ 494.17724609375,
28
+ 493.876953125,
29
+ 493.8984375,
30
+ 495.0078125,
31
+ 494.23828125,
32
+ 493.20703125,
33
+ 493.994140625,
34
+ 492.217041015625,
35
+ 494.3681640625,
36
+ 492.01123046875,
37
+ 491.59765625,
38
+ 493.94384765625,
39
+ 493.038818359375,
40
+ 491.141845703125,
41
+ 492.671875,
42
+ 491.564697265625
43
+ ],
44
+ "steps_logged": [
45
+ 0,
46
+ 200,
47
+ 400,
48
+ 600,
49
+ 800,
50
+ 1000,
51
+ 1200,
52
+ 1400,
53
+ 1600,
54
+ 1800,
55
+ 2000,
56
+ 2200,
57
+ 2400,
58
+ 2600,
59
+ 2800,
60
+ 3000,
61
+ 3200,
62
+ 3400,
63
+ 3600
64
+ ],
65
+ "final_step": 3600,
66
+ "converged": true,
67
+ "plateau_last": 0.017930010207387017,
68
+ "elapsed_s": 3221.432736635208,
69
+ "shifts": [
70
+ 1,
71
+ 3
72
+ ],
73
+ "matryoshka_h_size": 3686,
74
+ "alpha": null,
75
+ "row": 33,
76
+ "arch_id": "phase57_partB_h8_bare_multidistance_t6",
77
+ "arch": "phase57_partB_h8_bare_multidistance_t6",
78
+ "group": 4,
79
+ "src_class": "TXCBareMultiDistanceContrastiveAntidead",
80
+ "src_module": "src.architectures.txc_bare_multidistance_contrastive_antidead",
81
+ "T": 6,
82
+ "T_max": null,
83
+ "t_sample": null,
84
+ "n_layers": null,
85
+ "k_win": 500,
86
+ "k_pos": 83,
87
+ "gamma": null,
88
+ "n_scales": null,
89
+ "seed": 42,
90
+ "d_in": 2304,
91
+ "d_sae": 18432,
92
+ "subject_model": "google/gemma-2-2b",
93
+ "anchor_layer": 12,
94
+ "mlc_layers": [
95
+ 10,
96
+ 11,
97
+ 12,
98
+ 13,
99
+ 14
100
+ ],
101
+ "phase": "phase7_unification",
102
+ "run_id": "phase57_partB_h8_bare_multidistance_t6__seed42"
103
+ }