han1823123123 commited on
Commit
a8a1686
·
verified ·
1 Parent(s): 17aba74

Upload training_logs/phase57_partB_h8_bare_multidistance_t9__seed1.json with huggingface_hub

Browse files
training_logs/phase57_partB_h8_bare_multidistance_t9__seed1.json ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 127962.5546875,
4
+ 51273.875,
5
+ 47019.99609375,
6
+ 45007.99609375,
7
+ 43787.14453125,
8
+ 42891.7578125,
9
+ 42378.12890625,
10
+ 41683.0,
11
+ 41553.15234375,
12
+ 40835.6328125,
13
+ 40923.5625,
14
+ 40681.88671875,
15
+ 40999.2734375,
16
+ 40610.6953125,
17
+ 40525.515625,
18
+ 40043.76953125,
19
+ 40232.875
20
+ ],
21
+ "l0": [
22
+ 500.0,
23
+ 495.394287109375,
24
+ 494.80126953125,
25
+ 494.4013671875,
26
+ 492.4873046875,
27
+ 493.312744140625,
28
+ 494.326171875,
29
+ 492.109375,
30
+ 494.06298828125,
31
+ 491.5498046875,
32
+ 492.344970703125,
33
+ 491.951904296875,
34
+ 494.55859375,
35
+ 492.784912109375,
36
+ 493.404052734375,
37
+ 492.083740234375,
38
+ 492.326171875
39
+ ],
40
+ "steps_logged": [
41
+ 0,
42
+ 200,
43
+ 400,
44
+ 600,
45
+ 800,
46
+ 1000,
47
+ 1200,
48
+ 1400,
49
+ 1600,
50
+ 1800,
51
+ 2000,
52
+ 2200,
53
+ 2400,
54
+ 2600,
55
+ 2800,
56
+ 3000,
57
+ 3200
58
+ ],
59
+ "final_step": 3200,
60
+ "converged": true,
61
+ "plateau_last": 0.015874899712026075,
62
+ "elapsed_s": 5466.467396020889,
63
+ "shifts": [
64
+ 1,
65
+ 2,
66
+ 4
67
+ ],
68
+ "matryoshka_h_size": 3686,
69
+ "alpha": null,
70
+ "row": 36,
71
+ "arch_id": "phase57_partB_h8_bare_multidistance_t9",
72
+ "arch": "phase57_partB_h8_bare_multidistance_t9",
73
+ "group": 4,
74
+ "src_class": "TXCBareMultiDistanceContrastiveAntidead",
75
+ "src_module": "src.architectures.txc_bare_multidistance_contrastive_antidead",
76
+ "T": 9,
77
+ "T_max": null,
78
+ "t_sample": null,
79
+ "n_layers": null,
80
+ "k_win": 500,
81
+ "k_pos": 56,
82
+ "gamma": null,
83
+ "n_scales": null,
84
+ "seed": 1,
85
+ "d_in": 2304,
86
+ "d_sae": 18432,
87
+ "subject_model": "google/gemma-2-2b",
88
+ "anchor_layer": 12,
89
+ "mlc_layers": [
90
+ 10,
91
+ 11,
92
+ 12,
93
+ 13,
94
+ 14
95
+ ],
96
+ "phase": "phase7_unification",
97
+ "run_id": "phase57_partB_h8_bare_multidistance_t9__seed1"
98
+ }