michaelchenkj commited on
Commit
3c3d1c2
·
verified ·
1 Parent(s): 81e9a93

Upload folder using huggingface_hub

Browse files
results/l2r90-wd0033-ssl-noNorm/checkpoint-1080.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1080",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1080",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.22489270386266094
10
+ ],
11
+ "mean": 0.22489270386266094,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24947145877378435
18
+ ],
19
+ "mean": 0.24947145877378435,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6223241590214067
26
+ ],
27
+ "mean": 0.6223241590214067,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2504481179047998
42
+ ],
43
+ "mean": 0.2504481179047998,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.20904522613065327
58
+ ],
59
+ "mean": 0.20904522613065327,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5081610446137106
74
+ ],
75
+ "mean": 0.5081610446137106,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5232833464877664
90
+ ],
91
+ "mean": 0.5232833464877664,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.346878938891345
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-1440.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1440",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1440",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6186544342507645
26
+ ],
27
+ "mean": 0.6186544342507645,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2504481179047998
42
+ ],
43
+ "mean": 0.2504481179047998,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.2100502512562814
58
+ ],
59
+ "mean": 0.2100502512562814,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5021762785636561
74
+ ],
75
+ "mean": 0.5021762785636561,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5082872928176796
90
+ ],
91
+ "mean": 0.5082872928176796,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3448458191381204
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-1800.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1800",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1800",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5935779816513761
26
+ ],
27
+ "mean": 0.5935779816513761,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19656019656019655
34
+ ],
35
+ "mean": 0.19656019656019655,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.25114519020115517
42
+ ],
43
+ "mean": 0.25114519020115517,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20276497695852536
50
+ ],
51
+ "mean": 0.20276497695852536,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21072026800670016
58
+ ],
59
+ "mean": 0.21072026800670016,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4956474428726877
74
+ ],
75
+ "mean": 0.4956474428726877,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5201262825572218
90
+ ],
91
+ "mean": 0.5201262825572218,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.34310797304763574
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-2160.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2160",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2160",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25031712473572937
18
+ ],
19
+ "mean": 0.25031712473572937,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5785932721712538
26
+ ],
27
+ "mean": 0.5785932721712538,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19737919737919737
34
+ ],
35
+ "mean": 0.19737919737919737,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2545309699263095
42
+ ],
43
+ "mean": 0.2545309699263095,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20583717357910905
50
+ ],
51
+ "mean": 0.20583717357910905,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.2137353433835846
58
+ ],
59
+ "mean": 0.2137353433835846,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.514145810663765
74
+ ],
75
+ "mean": 0.514145810663765,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5209155485398579
90
+ ],
91
+ "mean": 0.5209155485398579,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3444732284854709
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-2520.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2520",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2520",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.22660944206008585
10
+ ],
11
+ "mean": 0.22660944206008585,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25031712473572937
18
+ ],
19
+ "mean": 0.25031712473572937,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5333333333333333
26
+ ],
27
+ "mean": 0.5333333333333333,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19737919737919737
34
+ ],
35
+ "mean": 0.19737919737919737,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.25204142601075485
42
+ ],
43
+ "mean": 0.25204142601075485,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20276497695852536
50
+ ],
51
+ "mean": 0.20276497695852536,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21139028475711893
58
+ ],
59
+ "mean": 0.21139028475711893,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.278
66
+ ],
67
+ "mean": 0.278,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5081610446137106
74
+ ],
75
+ "mean": 0.5081610446137106,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5098658247829518
90
+ ],
91
+ "mean": 0.5098658247829518,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.33835115042103703
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-2880.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2880",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2880",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2240343347639485
10
+ ],
11
+ "mean": 0.2240343347639485,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25116279069767444
18
+ ],
19
+ "mean": 0.25116279069767444,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5767584097859327
26
+ ],
27
+ "mean": 0.5767584097859327,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.1981981981981982
34
+ ],
35
+ "mean": 0.1981981981981982,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2522405895239992
42
+ ],
43
+ "mean": 0.2522405895239992,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.2073732718894009
50
+ ],
51
+ "mean": 0.2073732718894009,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.2137353433835846
58
+ ],
59
+ "mean": 0.2137353433835846,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.28
66
+ ],
67
+ "mean": 0.28,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5059847660500544
74
+ ],
75
+ "mean": 0.5059847660500544,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5059194948697711
90
+ ],
91
+ "mean": 0.5059194948697711,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3424915635602331
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-3240.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-3240",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-3240",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2223175965665236
10
+ ],
11
+ "mean": 0.2223175965665236,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25200845665961946
18
+ ],
19
+ "mean": 0.25200845665961946,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5614678899082569
26
+ ],
27
+ "mean": 0.5614678899082569,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.2022932022932023
34
+ ],
35
+ "mean": 0.2022932022932023,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.252141007767377
42
+ ],
43
+ "mean": 0.252141007767377,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.21658986175115208
50
+ ],
51
+ "mean": 0.21658986175115208,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21239530988274707
58
+ ],
59
+ "mean": 0.21239530988274707,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.28
66
+ ],
67
+ "mean": 0.28,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5059847660500544
74
+ ],
75
+ "mean": 0.5059847660500544,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.553
82
+ ],
83
+ "mean": 0.553,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5193370165745856
90
+ ],
91
+ "mean": 0.5193370165745856,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3434122824957744
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-360.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-360",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-360",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.23862660944206007
10
+ ],
11
+ "mean": 0.23862660944206007,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25116279069767444
18
+ ],
19
+ "mean": 0.25116279069767444,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6217125382262997
26
+ ],
27
+ "mean": 0.6217125382262997,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.2194922194922195
34
+ ],
35
+ "mean": 0.2194922194922195,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2523401712806214
42
+ ],
43
+ "mean": 0.2523401712806214,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.23348694316436253
50
+ ],
51
+ "mean": 0.23348694316436253,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21139028475711893
58
+ ],
59
+ "mean": 0.21139028475711893,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.278
66
+ ],
67
+ "mean": 0.278,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5108813928182807
74
+ ],
75
+ "mean": 0.5108813928182807,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5019731649565904
90
+ ],
91
+ "mean": 0.5019731649565904,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.35191510134865706
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-3600.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-3600",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-3600",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.22317596566523606
10
+ ],
11
+ "mean": 0.22317596566523606,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.2507399577167019
18
+ ],
19
+ "mean": 0.2507399577167019,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5620795107033639
26
+ ],
27
+ "mean": 0.5620795107033639,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.20147420147420148
34
+ ],
35
+ "mean": 0.20147420147420148,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2531368253335989
42
+ ],
43
+ "mean": 0.2531368253335989,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.21505376344086022
50
+ ],
51
+ "mean": 0.21505376344086022,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21474036850921274
58
+ ],
59
+ "mean": 0.21474036850921274,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.284
66
+ ],
67
+ "mean": 0.284,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5076169749727966
74
+ ],
75
+ "mean": 0.5076169749727966,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.554
82
+ ],
83
+ "mean": 0.554,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5138121546961326
90
+ ],
91
+ "mean": 0.5138121546961326,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.34362088386473677
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-72.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-72",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-72",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.24377682403433476
10
+ ],
11
+ "mean": 0.24377682403433476,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25919661733615224
18
+ ],
19
+ "mean": 0.25919661733615224,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6067278287461774
26
+ ],
27
+ "mean": 0.6067278287461774,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.20802620802620803
34
+ ],
35
+ "mean": 0.20802620802620803,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2508464449312886
42
+ ],
43
+ "mean": 0.2508464449312886,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.22887864823348694
50
+ ],
51
+ "mean": 0.22887864823348694,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.2154103852596315
58
+ ],
59
+ "mean": 0.2154103852596315,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.272
66
+ ],
67
+ "mean": 0.272,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4956474428726877
74
+ ],
75
+ "mean": 0.4956474428726877,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5146014206787688
90
+ ],
91
+ "mean": 0.5146014206787688,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3497374381926123
96
+ }
results/l2r90-wd0033-ssl-noNorm/checkpoint-720.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-720",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-720",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6217125382262997
26
+ ],
27
+ "mean": 0.6217125382262997,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19901719901719903
34
+ ],
35
+ "mean": 0.19901719901719903,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.25194184425413263
42
+ ],
43
+ "mean": 0.25194184425413263,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21641541038525963
58
+ ],
59
+ "mean": 0.21641541038525963,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5119695321001088
74
+ ],
75
+ "mean": 0.5119695321001088,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5074980268350434
90
+ ],
91
+ "mean": 0.5074980268350434,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.34695463552754463
96
+ }
results/l2r90-wd0033-ssl/checkpoint-1080.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1080",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1080",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6223241590214067
26
+ ],
27
+ "mean": 0.6223241590214067,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2504481179047998
42
+ ],
43
+ "mean": 0.2504481179047998,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21809045226130652
58
+ ],
59
+ "mean": 0.21809045226130652,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4967355821545158
74
+ ],
75
+ "mean": 0.4967355821545158,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3442677258330612,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-1440.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1440",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1440",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6186544342507645
26
+ ],
27
+ "mean": 0.6186544342507645,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2504481179047998
42
+ ],
43
+ "mean": 0.2504481179047998,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20276497695852536
50
+ ],
51
+ "mean": 0.20276497695852536,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21072026800670016
58
+ ],
59
+ "mean": 0.21072026800670016,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4967355821545158
74
+ ],
75
+ "mean": 0.4967355821545158,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3431244524389212,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-1800.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-1800",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-1800",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5892966360856269
26
+ ],
27
+ "mean": 0.5892966360856269,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2504481179047998
42
+ ],
43
+ "mean": 0.2504481179047998,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20276497695852536
50
+ ],
51
+ "mean": 0.20276497695852536,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.20938023450586266
58
+ ],
59
+ "mean": 0.20938023450586266,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4956474428726877
74
+ ],
75
+ "mean": 0.4956474428726877,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3402348187163936,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-2160.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2160",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2160",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25031712473572937
18
+ ],
19
+ "mean": 0.25031712473572937,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5749235474006116
26
+ ],
27
+ "mean": 0.5749235474006116,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19492219492219492
34
+ ],
35
+ "mean": 0.19492219492219492,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2509460266879108
42
+ ],
43
+ "mean": 0.2509460266879108,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21306532663316582
58
+ ],
59
+ "mean": 0.21306532663316582,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4961915125136017
74
+ ],
75
+ "mean": 0.4961915125136017,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3394615380199006,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-2520.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2520",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2520",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.22660944206008585
10
+ ],
11
+ "mean": 0.22660944206008585,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.2507399577167019
18
+ ],
19
+ "mean": 0.2507399577167019,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5345565749235474
26
+ ],
27
+ "mean": 0.5345565749235474,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2515435172276439
42
+ ],
43
+ "mean": 0.2515435172276439,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.20430107526881722
50
+ ],
51
+ "mean": 0.20430107526881722,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21608040201005024
58
+ ],
59
+ "mean": 0.21608040201005024,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4967355821545158
74
+ ],
75
+ "mean": 0.4967355821545158,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3363606167452781,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-2880.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-2880",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-2880",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2240343347639485
10
+ ],
11
+ "mean": 0.2240343347639485,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25116279069767444
18
+ ],
19
+ "mean": 0.25116279069767444,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.572782874617737
26
+ ],
27
+ "mean": 0.572782874617737,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19574119574119575
34
+ ],
35
+ "mean": 0.19574119574119575,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2515435172276439
42
+ ],
43
+ "mean": 0.2515435172276439,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.2119815668202765
50
+ ],
51
+ "mean": 0.2119815668202765,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21440536013400335
58
+ ],
59
+ "mean": 0.21440536013400335,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.28
66
+ ],
67
+ "mean": 0.28,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.49836779107725787
74
+ ],
75
+ "mean": 0.49836779107725787,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3406980425613853,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-3240.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-3240",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-3240",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2240343347639485
10
+ ],
11
+ "mean": 0.2240343347639485,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25243128964059197
18
+ ],
19
+ "mean": 0.25243128964059197,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5593272171253822
26
+ ],
27
+ "mean": 0.5593272171253822,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19737919737919737
34
+ ],
35
+ "mean": 0.19737919737919737,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.25114519020115517
42
+ ],
43
+ "mean": 0.25114519020115517,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.21351766513056836
50
+ ],
51
+ "mean": 0.21351766513056836,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21306532663316582
58
+ ],
59
+ "mean": 0.21306532663316582,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.282
66
+ ],
67
+ "mean": 0.282,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.49782372143634385
74
+ ],
75
+ "mean": 0.49782372143634385,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.553
82
+ ],
83
+ "mean": 0.553,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4964483030781373
90
+ ],
91
+ "mean": 0.4964483030781373,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.340015658671681,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-360.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-360",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-360",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2669527896995708
10
+ ],
11
+ "mean": 0.2669527896995708,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.2536997885835095
18
+ ],
19
+ "mean": 0.2536997885835095,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6217125382262997
26
+ ],
27
+ "mean": 0.6217125382262997,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.20966420966420968
34
+ ],
35
+ "mean": 0.20966420966420968,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2515435172276439
42
+ ],
43
+ "mean": 0.2515435172276439,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.23963133640552994
50
+ ],
51
+ "mean": 0.23963133640552994,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.20268006700167504
58
+ ],
59
+ "mean": 0.20268006700167504,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.5174102285092492
74
+ ],
75
+ "mean": 0.5174102285092492,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4996053670086819
90
+ ],
91
+ "mean": 0.4996053670086819,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3537181674842155,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-3600.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-3600",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-3600",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.2257510729613734
10
+ ],
11
+ "mean": 0.2257510729613734,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.253276955602537
18
+ ],
19
+ "mean": 0.253276955602537,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.5571865443425077
26
+ ],
27
+ "mean": 0.5571865443425077,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19983619983619982
34
+ ],
35
+ "mean": 0.19983619983619982,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2513443537143995
42
+ ],
43
+ "mean": 0.2513443537143995,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.21658986175115208
50
+ ],
51
+ "mean": 0.21658986175115208,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.21407035175879396
58
+ ],
59
+ "mean": 0.21407035175879396,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.278
66
+ ],
67
+ "mean": 0.278,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.49510337323177367
74
+ ],
75
+ "mean": 0.49510337323177367,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.554
82
+ ],
83
+ "mean": 0.554,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4964483030781373
90
+ ],
91
+ "mean": 0.4964483030781373,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3401460923888068,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-72.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-72",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-72",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.25407725321888414
10
+ ],
11
+ "mean": 0.25407725321888414,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.25835095137420716
18
+ ],
19
+ "mean": 0.25835095137420716,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6024464831804281
26
+ ],
27
+ "mean": 0.6024464831804281,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.20884520884520885
34
+ ],
35
+ "mean": 0.20884520884520885,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2506472814180442
42
+ ],
43
+ "mean": 0.2506472814180442,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.22887864823348694
50
+ ],
51
+ "mean": 0.22887864823348694,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.20301507537688443
58
+ ],
59
+ "mean": 0.20301507537688443,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.29
66
+ ],
67
+ "mean": 0.29,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.4923830250272035
74
+ ],
75
+ "mean": 0.4923830250272035,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.5201262825572218
90
+ ],
91
+ "mean": 0.5201262825572218,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3509791099301427,
96
+ "normalize_probe_inputs": true
97
+ }
results/l2r90-wd0033-ssl/checkpoint-720.json ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_path": "/data/michael/concept_model/runs/l2r90-wd0033-ssl/checkpoint-720",
3
+ "run": "l2r90-wd0033-ssl",
4
+ "checkpoint": "checkpoint-720",
5
+ "tasks": [
6
+ {
7
+ "task": "arc-challenge",
8
+ "accuracies": [
9
+ 0.22660944206008585
10
+ ],
11
+ "mean": 0.22660944206008585,
12
+ "ci": 0.0
13
+ },
14
+ {
15
+ "task": "arc-easy",
16
+ "accuracies": [
17
+ 0.24989429175475686
18
+ ],
19
+ "mean": 0.24989429175475686,
20
+ "ci": 0.0
21
+ },
22
+ {
23
+ "task": "boolq",
24
+ "accuracies": [
25
+ 0.6217125382262997
26
+ ],
27
+ "mean": 0.6217125382262997,
28
+ "ci": 0.0
29
+ },
30
+ {
31
+ "task": "commonsenseqa",
32
+ "accuracies": [
33
+ 0.19983619983619982
34
+ ],
35
+ "mean": 0.19983619983619982,
36
+ "ci": 0.0
37
+ },
38
+ {
39
+ "task": "hellaswag",
40
+ "accuracies": [
41
+ 0.2562238597888867
42
+ ],
43
+ "mean": 0.2562238597888867,
44
+ "ci": 0.0
45
+ },
46
+ {
47
+ "task": "logiqa",
48
+ "accuracies": [
49
+ 0.23195084485407066
50
+ ],
51
+ "mean": 0.23195084485407066,
52
+ "ci": 0.0
53
+ },
54
+ {
55
+ "task": "mathqa",
56
+ "accuracies": [
57
+ 0.2100502512562814
58
+ ],
59
+ "mean": 0.2100502512562814,
60
+ "ci": 0.0
61
+ },
62
+ {
63
+ "task": "openbookqa",
64
+ "accuracies": [
65
+ 0.276
66
+ ],
67
+ "mean": 0.276,
68
+ "ci": 0.0
69
+ },
70
+ {
71
+ "task": "piqa",
72
+ "accuracies": [
73
+ 0.514689880304679
74
+ ],
75
+ "mean": 0.514689880304679,
76
+ "ci": 0.0
77
+ },
78
+ {
79
+ "task": "pubmedqa",
80
+ "accuracies": [
81
+ 0.552
82
+ ],
83
+ "mean": 0.552,
84
+ "ci": 0.0
85
+ },
86
+ {
87
+ "task": "winogrande",
88
+ "accuracies": [
89
+ 0.4956590370955012
90
+ ],
91
+ "mean": 0.4956590370955012,
92
+ "ci": 0.0
93
+ }
94
+ ],
95
+ "mean_accuracy": 0.3486023950160692,
96
+ "normalize_probe_inputs": true
97
+ }