han1823123123 commited on
Commit
47587f7
·
verified ·
1 Parent(s): c3262d6

Upload training_logs/txcdr_t32__seed42.json with huggingface_hub

Browse files
training_logs/txcdr_t32__seed42.json ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 140012.03125,
4
+ 9209.7822265625,
5
+ 8516.005859375,
6
+ 8033.82177734375,
7
+ 7904.2890625,
8
+ 7703.19140625,
9
+ 7547.212890625,
10
+ 7433.3984375,
11
+ 7332.8681640625,
12
+ 7271.283203125,
13
+ 7191.47412109375,
14
+ 7137.9775390625,
15
+ 7093.7158203125,
16
+ 7055.6298828125,
17
+ 7058.69091796875,
18
+ 7020.099609375,
19
+ 6977.46435546875,
20
+ 6987.169921875,
21
+ 6996.357421875
22
+ ],
23
+ "l0": [
24
+ 500.0,
25
+ 496.380126953125,
26
+ 495.966796875,
27
+ 496.725830078125,
28
+ 496.65625,
29
+ 497.186279296875,
30
+ 497.12646484375,
31
+ 497.02294921875,
32
+ 497.03466796875,
33
+ 497.32080078125,
34
+ 496.580810546875,
35
+ 496.334716796875,
36
+ 496.052490234375,
37
+ 495.736572265625,
38
+ 496.844482421875,
39
+ 496.494384765625,
40
+ 495.609619140625,
41
+ 497.097412109375,
42
+ 496.830078125
43
+ ],
44
+ "steps_logged": [
45
+ 0,
46
+ 200,
47
+ 400,
48
+ 600,
49
+ 800,
50
+ 1000,
51
+ 1200,
52
+ 1400,
53
+ 1600,
54
+ 1800,
55
+ 2000,
56
+ 2200,
57
+ 2400,
58
+ 2600,
59
+ 2800,
60
+ 3000,
61
+ 3200,
62
+ 3400,
63
+ 3600
64
+ ],
65
+ "final_step": 3600,
66
+ "converged": true,
67
+ "plateau_last": 0.01986844025496277,
68
+ "elapsed_s": 4145.3756313323975,
69
+ "row": 29,
70
+ "arch_id": "txcdr_t32",
71
+ "arch": "txcdr_t32",
72
+ "group": 3,
73
+ "src_class": "TemporalCrosscoder",
74
+ "src_module": "src.architectures.crosscoder",
75
+ "T": 32,
76
+ "T_max": null,
77
+ "t_sample": null,
78
+ "n_layers": null,
79
+ "k_win": 500,
80
+ "k_pos": 16,
81
+ "shifts": null,
82
+ "alpha": null,
83
+ "gamma": null,
84
+ "n_scales": null,
85
+ "seed": 42,
86
+ "d_in": 2304,
87
+ "d_sae": 18432,
88
+ "subject_model": "google/gemma-2-2b",
89
+ "anchor_layer": 12,
90
+ "mlc_layers": [
91
+ 10,
92
+ 11,
93
+ 12,
94
+ 13,
95
+ 14
96
+ ],
97
+ "phase": "phase7_unification",
98
+ "run_id": "txcdr_t32__seed42"
99
+ }