han1823123123 commited on
Commit
1f5a07a
·
verified ·
1 Parent(s): 7795058

Upload training_logs/it_tfa_big__seed42.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. training_logs/it_tfa_big__seed42.json +125 -0
training_logs/it_tfa_big__seed42.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "loss": [
3
+ 2721.08935546875,
4
+ 1053.83203125,
5
+ 1007.916259765625,
6
+ 996.9671020507812,
7
+ 979.9942626953125,
8
+ 965.4766845703125,
9
+ 951.537353515625,
10
+ 953.679931640625,
11
+ 932.845703125,
12
+ 943.7442016601562,
13
+ 931.8909912109375,
14
+ 914.9315185546875,
15
+ 902.3789672851562,
16
+ 907.9058227539062,
17
+ 891.9739990234375,
18
+ 887.3131103515625,
19
+ 876.37939453125,
20
+ 865.1234130859375,
21
+ 873.8612060546875,
22
+ 883.999267578125,
23
+ 874.157470703125,
24
+ 844.5355224609375,
25
+ 839.478515625,
26
+ 862.7964477539062,
27
+ 856.760498046875,
28
+ 863.526123046875,
29
+ 835.718017578125
30
+ ],
31
+ "l0": [
32
+ 500.0,
33
+ 500.0,
34
+ 500.0,
35
+ 500.0,
36
+ 500.0,
37
+ 500.0,
38
+ 500.0,
39
+ 500.0,
40
+ 500.0,
41
+ 500.0,
42
+ 500.0,
43
+ 500.0,
44
+ 500.0,
45
+ 500.0,
46
+ 500.0,
47
+ 500.0,
48
+ 500.0,
49
+ 500.0,
50
+ 500.0,
51
+ 500.0,
52
+ 500.0,
53
+ 500.0,
54
+ 500.0,
55
+ 500.0,
56
+ 500.0,
57
+ 500.0,
58
+ 500.0
59
+ ],
60
+ "steps_logged": [
61
+ 0,
62
+ 200,
63
+ 400,
64
+ 600,
65
+ 800,
66
+ 1000,
67
+ 1200,
68
+ 1400,
69
+ 1600,
70
+ 1800,
71
+ 2000,
72
+ 2200,
73
+ 2400,
74
+ 2600,
75
+ 2800,
76
+ 3000,
77
+ 3200,
78
+ 3400,
79
+ 3600,
80
+ 3800,
81
+ 4000,
82
+ 4200,
83
+ 4400,
84
+ 4600,
85
+ 4800,
86
+ 5000,
87
+ 5200
88
+ ],
89
+ "final_step": 5200,
90
+ "converged": true,
91
+ "plateau_last": 0.01920854087932094,
92
+ "elapsed_s": 7397.639723777771,
93
+ "scaling_factor": 0.22266921934949058,
94
+ "skipped_steps": 0,
95
+ "row": 7,
96
+ "arch_id": "tfa_big",
97
+ "arch": "tfa_big",
98
+ "group": 1,
99
+ "src_class": "TemporalSAE",
100
+ "src_module": "src.architectures._tfa_module",
101
+ "T": null,
102
+ "T_max": null,
103
+ "t_sample": null,
104
+ "n_layers": null,
105
+ "k_win": 500,
106
+ "k_pos": null,
107
+ "shifts": null,
108
+ "alpha": null,
109
+ "gamma": null,
110
+ "n_scales": null,
111
+ "seed": 42,
112
+ "d_in": 2304,
113
+ "d_sae": 18432,
114
+ "subject_model": "google/gemma-2-2b-it",
115
+ "anchor_layer": 13,
116
+ "mlc_layers": [
117
+ 11,
118
+ 12,
119
+ 13,
120
+ 14,
121
+ 15
122
+ ],
123
+ "phase": "phase7_unification",
124
+ "run_id": "it_tfa_big__seed42"
125
+ }