han1823123123 commited on
Commit
3abe35a
·
verified ·
1 Parent(s): 0ca45a4

Upload training_logs/phase57_partB_h8_bare_multidistance_t8__seed42.json with huggingface_hub

Browse files
training_logs/phase57_partB_h8_bare_multidistance_t8__seed42.json ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 125762.0859375,
4
+ 49783.36328125,
5
+ 45440.4765625,
6
+ 43496.02734375,
7
+ 42341.21875,
8
+ 41413.65625,
9
+ 40795.0546875,
10
+ 40230.0234375,
11
+ 40052.5546875,
12
+ 39713.82421875,
13
+ 39552.44140625,
14
+ 39013.96484375,
15
+ 39125.41015625,
16
+ 39065.23046875,
17
+ 39086.8671875,
18
+ 38683.83203125,
19
+ 38890.515625
20
+ ],
21
+ "l0": [
22
+ 500.0,
23
+ 495.0986328125,
24
+ 493.41357421875,
25
+ 493.93408203125,
26
+ 493.39501953125,
27
+ 493.65234375,
28
+ 493.029296875,
29
+ 491.93701171875,
30
+ 492.22021484375,
31
+ 492.56201171875,
32
+ 492.852294921875,
33
+ 491.02734375,
34
+ 490.918212890625,
35
+ 491.971435546875,
36
+ 493.36474609375,
37
+ 491.755126953125,
38
+ 492.518798828125
39
+ ],
40
+ "steps_logged": [
41
+ 0,
42
+ 200,
43
+ 400,
44
+ 600,
45
+ 800,
46
+ 1000,
47
+ 1200,
48
+ 1400,
49
+ 1600,
50
+ 1800,
51
+ 2000,
52
+ 2200,
53
+ 2400,
54
+ 2600,
55
+ 2800,
56
+ 3000,
57
+ 3200
58
+ ],
59
+ "final_step": 3200,
60
+ "converged": true,
61
+ "plateau_last": 0.01868906443901308,
62
+ "elapsed_s": 4832.761939764023,
63
+ "shifts": [
64
+ 1,
65
+ 2,
66
+ 4
67
+ ],
68
+ "matryoshka_h_size": 3686,
69
+ "alpha": null,
70
+ "row": 35,
71
+ "arch_id": "phase57_partB_h8_bare_multidistance_t8",
72
+ "arch": "phase57_partB_h8_bare_multidistance_t8",
73
+ "group": 4,
74
+ "src_class": "TXCBareMultiDistanceContrastiveAntidead",
75
+ "src_module": "src.architectures.txc_bare_multidistance_contrastive_antidead",
76
+ "T": 8,
77
+ "T_max": null,
78
+ "t_sample": null,
79
+ "n_layers": null,
80
+ "k_win": 500,
81
+ "k_pos": 62,
82
+ "gamma": null,
83
+ "n_scales": null,
84
+ "seed": 42,
85
+ "d_in": 2304,
86
+ "d_sae": 18432,
87
+ "subject_model": "google/gemma-2-2b",
88
+ "anchor_layer": 12,
89
+ "mlc_layers": [
90
+ 10,
91
+ 11,
92
+ 12,
93
+ 13,
94
+ 14
95
+ ],
96
+ "phase": "phase7_unification",
97
+ "run_id": "phase57_partB_h8_bare_multidistance_t8__seed42"
98
+ }