Farouk commited on
Commit Β·
a8baaff
1
Parent(s): 4223d79
Training in progress, step 10000
Browse files- adapter_model.bin +1 -1
- {checkpoint-8000 β checkpoint-10000}/README.md +0 -0
- {checkpoint-8000 β checkpoint-10000}/adapter_config.json +0 -0
- {checkpoint-8000 β checkpoint-10000}/adapter_model.bin +1 -1
- {checkpoint-8000 β checkpoint-10000}/added_tokens.json +0 -0
- {checkpoint-8000 β checkpoint-10000}/optimizer.pt +1 -1
- {checkpoint-8000 β checkpoint-10000}/rng_state.pth +1 -1
- {checkpoint-8000 β checkpoint-10000}/scheduler.pt +1 -1
- {checkpoint-8000 β checkpoint-10000}/special_tokens_map.json +0 -0
- {checkpoint-8000 β checkpoint-10000}/tokenizer.model +0 -0
- {checkpoint-8000 β checkpoint-10000}/tokenizer_config.json +0 -0
- {checkpoint-8000 β checkpoint-10000}/trainer_state.json +1913 -3
- {checkpoint-8000 β checkpoint-10000}/training_args.bin +0 -0
- checkpoint-7800/adapter_model/adapter_model/README.md +12 -0
- checkpoint-7800/adapter_model/adapter_model/adapter_model.bin +1 -1
adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a685267c478e7fa2c3ee9fb8c8629250bcf7b5e589a79cd525b6817227518e80
|
| 3 |
size 319977229
|
{checkpoint-8000 β checkpoint-10000}/README.md
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/adapter_config.json
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/adapter_model.bin
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a685267c478e7fa2c3ee9fb8c8629250bcf7b5e589a79cd525b6817227518e80
|
| 3 |
size 319977229
|
{checkpoint-8000 β checkpoint-10000}/added_tokens.json
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/optimizer.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1279539973
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ebe444fff6e8c36fb59cda5c99e57bb84496d81972a60b79c55cbfb640ce5fab
|
| 3 |
size 1279539973
|
{checkpoint-8000 β checkpoint-10000}/rng_state.pth
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14511
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:79666d0fb09322d37d026e0fdf05fb12f075906d3b358f4846e58a7fff26d1cd
|
| 3 |
size 14511
|
{checkpoint-8000 β checkpoint-10000}/scheduler.pt
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 627
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a4d1f3cad6e54148c289328646571ee0d43f9671c93cc919adafe5bd27a33d05
|
| 3 |
size 627
|
{checkpoint-8000 β checkpoint-10000}/special_tokens_map.json
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/tokenizer.model
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/tokenizer_config.json
RENAMED
|
File without changes
|
{checkpoint-8000 β checkpoint-10000}/trainer_state.json
RENAMED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": 0.7206757664680481,
|
| 3 |
"best_model_checkpoint": "experts/expert-4/checkpoint-7800",
|
| 4 |
-
"epoch":
|
| 5 |
-
"global_step":
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
@@ -7646,11 +7646,1921 @@
|
|
| 7646 |
"mmlu_eval_accuracy_world_religions": 0.8947368421052632,
|
| 7647 |
"mmlu_loss": 1.2022641924825102,
|
| 7648 |
"step": 8000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7649 |
}
|
| 7650 |
],
|
| 7651 |
"max_steps": 10000,
|
| 7652 |
"num_train_epochs": 2,
|
| 7653 |
-
"total_flos":
|
| 7654 |
"trial_name": null,
|
| 7655 |
"trial_params": null
|
| 7656 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": 0.7206757664680481,
|
| 3 |
"best_model_checkpoint": "experts/expert-4/checkpoint-7800",
|
| 4 |
+
"epoch": 1.1178804985747024,
|
| 5 |
+
"global_step": 10000,
|
| 6 |
"is_hyper_param_search": false,
|
| 7 |
"is_local_process_zero": true,
|
| 8 |
"is_world_process_zero": true,
|
|
|
|
| 7646 |
"mmlu_eval_accuracy_world_religions": 0.8947368421052632,
|
| 7647 |
"mmlu_loss": 1.2022641924825102,
|
| 7648 |
"step": 8000
|
| 7649 |
+
},
|
| 7650 |
+
{
|
| 7651 |
+
"epoch": 0.9,
|
| 7652 |
+
"learning_rate": 0.0002,
|
| 7653 |
+
"loss": 0.7554,
|
| 7654 |
+
"step": 8010
|
| 7655 |
+
},
|
| 7656 |
+
{
|
| 7657 |
+
"epoch": 0.9,
|
| 7658 |
+
"learning_rate": 0.0002,
|
| 7659 |
+
"loss": 0.6063,
|
| 7660 |
+
"step": 8020
|
| 7661 |
+
},
|
| 7662 |
+
{
|
| 7663 |
+
"epoch": 0.9,
|
| 7664 |
+
"learning_rate": 0.0002,
|
| 7665 |
+
"loss": 0.6717,
|
| 7666 |
+
"step": 8030
|
| 7667 |
+
},
|
| 7668 |
+
{
|
| 7669 |
+
"epoch": 0.9,
|
| 7670 |
+
"learning_rate": 0.0002,
|
| 7671 |
+
"loss": 0.681,
|
| 7672 |
+
"step": 8040
|
| 7673 |
+
},
|
| 7674 |
+
{
|
| 7675 |
+
"epoch": 0.9,
|
| 7676 |
+
"learning_rate": 0.0002,
|
| 7677 |
+
"loss": 0.7477,
|
| 7678 |
+
"step": 8050
|
| 7679 |
+
},
|
| 7680 |
+
{
|
| 7681 |
+
"epoch": 0.9,
|
| 7682 |
+
"learning_rate": 0.0002,
|
| 7683 |
+
"loss": 0.6894,
|
| 7684 |
+
"step": 8060
|
| 7685 |
+
},
|
| 7686 |
+
{
|
| 7687 |
+
"epoch": 0.9,
|
| 7688 |
+
"learning_rate": 0.0002,
|
| 7689 |
+
"loss": 0.6284,
|
| 7690 |
+
"step": 8070
|
| 7691 |
+
},
|
| 7692 |
+
{
|
| 7693 |
+
"epoch": 0.9,
|
| 7694 |
+
"learning_rate": 0.0002,
|
| 7695 |
+
"loss": 0.8124,
|
| 7696 |
+
"step": 8080
|
| 7697 |
+
},
|
| 7698 |
+
{
|
| 7699 |
+
"epoch": 0.9,
|
| 7700 |
+
"learning_rate": 0.0002,
|
| 7701 |
+
"loss": 0.7501,
|
| 7702 |
+
"step": 8090
|
| 7703 |
+
},
|
| 7704 |
+
{
|
| 7705 |
+
"epoch": 0.91,
|
| 7706 |
+
"learning_rate": 0.0002,
|
| 7707 |
+
"loss": 0.8261,
|
| 7708 |
+
"step": 8100
|
| 7709 |
+
},
|
| 7710 |
+
{
|
| 7711 |
+
"epoch": 0.91,
|
| 7712 |
+
"learning_rate": 0.0002,
|
| 7713 |
+
"loss": 0.8236,
|
| 7714 |
+
"step": 8110
|
| 7715 |
+
},
|
| 7716 |
+
{
|
| 7717 |
+
"epoch": 0.91,
|
| 7718 |
+
"learning_rate": 0.0002,
|
| 7719 |
+
"loss": 0.6545,
|
| 7720 |
+
"step": 8120
|
| 7721 |
+
},
|
| 7722 |
+
{
|
| 7723 |
+
"epoch": 0.91,
|
| 7724 |
+
"learning_rate": 0.0002,
|
| 7725 |
+
"loss": 0.6958,
|
| 7726 |
+
"step": 8130
|
| 7727 |
+
},
|
| 7728 |
+
{
|
| 7729 |
+
"epoch": 0.91,
|
| 7730 |
+
"learning_rate": 0.0002,
|
| 7731 |
+
"loss": 0.7055,
|
| 7732 |
+
"step": 8140
|
| 7733 |
+
},
|
| 7734 |
+
{
|
| 7735 |
+
"epoch": 0.91,
|
| 7736 |
+
"learning_rate": 0.0002,
|
| 7737 |
+
"loss": 0.6359,
|
| 7738 |
+
"step": 8150
|
| 7739 |
+
},
|
| 7740 |
+
{
|
| 7741 |
+
"epoch": 0.91,
|
| 7742 |
+
"learning_rate": 0.0002,
|
| 7743 |
+
"loss": 0.6509,
|
| 7744 |
+
"step": 8160
|
| 7745 |
+
},
|
| 7746 |
+
{
|
| 7747 |
+
"epoch": 0.91,
|
| 7748 |
+
"learning_rate": 0.0002,
|
| 7749 |
+
"loss": 0.8168,
|
| 7750 |
+
"step": 8170
|
| 7751 |
+
},
|
| 7752 |
+
{
|
| 7753 |
+
"epoch": 0.91,
|
| 7754 |
+
"learning_rate": 0.0002,
|
| 7755 |
+
"loss": 0.6494,
|
| 7756 |
+
"step": 8180
|
| 7757 |
+
},
|
| 7758 |
+
{
|
| 7759 |
+
"epoch": 0.92,
|
| 7760 |
+
"learning_rate": 0.0002,
|
| 7761 |
+
"loss": 0.7379,
|
| 7762 |
+
"step": 8190
|
| 7763 |
+
},
|
| 7764 |
+
{
|
| 7765 |
+
"epoch": 0.92,
|
| 7766 |
+
"learning_rate": 0.0002,
|
| 7767 |
+
"loss": 0.7671,
|
| 7768 |
+
"step": 8200
|
| 7769 |
+
},
|
| 7770 |
+
{
|
| 7771 |
+
"epoch": 0.92,
|
| 7772 |
+
"eval_loss": 0.7237617373466492,
|
| 7773 |
+
"eval_runtime": 133.8533,
|
| 7774 |
+
"eval_samples_per_second": 7.471,
|
| 7775 |
+
"eval_steps_per_second": 3.735,
|
| 7776 |
+
"step": 8200
|
| 7777 |
+
},
|
| 7778 |
+
{
|
| 7779 |
+
"epoch": 0.92,
|
| 7780 |
+
"mmlu_eval_accuracy": 0.5125238323825849,
|
| 7781 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365,
|
| 7782 |
+
"mmlu_eval_accuracy_anatomy": 0.7142857142857143,
|
| 7783 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 7784 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 7785 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5862068965517241,
|
| 7786 |
+
"mmlu_eval_accuracy_college_biology": 0.4375,
|
| 7787 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 7788 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 7789 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 7790 |
+
"mmlu_eval_accuracy_college_medicine": 0.5454545454545454,
|
| 7791 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 7792 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 7793 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5769230769230769,
|
| 7794 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 7795 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.375,
|
| 7796 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683,
|
| 7797 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 7798 |
+
"mmlu_eval_accuracy_global_facts": 0.6,
|
| 7799 |
+
"mmlu_eval_accuracy_high_school_biology": 0.46875,
|
| 7800 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
|
| 7801 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
|
| 7802 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112,
|
| 7803 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273,
|
| 7804 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
|
| 7805 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 7806 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.41379310344827586,
|
| 7807 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.6153846153846154,
|
| 7808 |
+
"mmlu_eval_accuracy_high_school_physics": 0.47058823529411764,
|
| 7809 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8,
|
| 7810 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 7811 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 7812 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6153846153846154,
|
| 7813 |
+
"mmlu_eval_accuracy_human_aging": 0.6086956521739131,
|
| 7814 |
+
"mmlu_eval_accuracy_human_sexuality": 0.5,
|
| 7815 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 7816 |
+
"mmlu_eval_accuracy_jurisprudence": 0.2727272727272727,
|
| 7817 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 7818 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 7819 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 7820 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 7821 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 7822 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186,
|
| 7823 |
+
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735,
|
| 7824 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.29,
|
| 7825 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 7826 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
| 7827 |
+
"mmlu_eval_accuracy_prehistory": 0.5714285714285714,
|
| 7828 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
| 7829 |
+
"mmlu_eval_accuracy_professional_law": 0.36470588235294116,
|
| 7830 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 7831 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5217391304347826,
|
| 7832 |
+
"mmlu_eval_accuracy_public_relations": 0.4166666666666667,
|
| 7833 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 7834 |
+
"mmlu_eval_accuracy_sociology": 0.8636363636363636,
|
| 7835 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 7836 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 7837 |
+
"mmlu_eval_accuracy_world_religions": 0.7894736842105263,
|
| 7838 |
+
"mmlu_loss": 0.9977992222919165,
|
| 7839 |
+
"step": 8200
|
| 7840 |
+
},
|
| 7841 |
+
{
|
| 7842 |
+
"epoch": 0.92,
|
| 7843 |
+
"learning_rate": 0.0002,
|
| 7844 |
+
"loss": 0.6726,
|
| 7845 |
+
"step": 8210
|
| 7846 |
+
},
|
| 7847 |
+
{
|
| 7848 |
+
"epoch": 0.92,
|
| 7849 |
+
"learning_rate": 0.0002,
|
| 7850 |
+
"loss": 0.7613,
|
| 7851 |
+
"step": 8220
|
| 7852 |
+
},
|
| 7853 |
+
{
|
| 7854 |
+
"epoch": 0.92,
|
| 7855 |
+
"learning_rate": 0.0002,
|
| 7856 |
+
"loss": 0.7592,
|
| 7857 |
+
"step": 8230
|
| 7858 |
+
},
|
| 7859 |
+
{
|
| 7860 |
+
"epoch": 0.92,
|
| 7861 |
+
"learning_rate": 0.0002,
|
| 7862 |
+
"loss": 0.7735,
|
| 7863 |
+
"step": 8240
|
| 7864 |
+
},
|
| 7865 |
+
{
|
| 7866 |
+
"epoch": 0.92,
|
| 7867 |
+
"learning_rate": 0.0002,
|
| 7868 |
+
"loss": 0.7698,
|
| 7869 |
+
"step": 8250
|
| 7870 |
+
},
|
| 7871 |
+
{
|
| 7872 |
+
"epoch": 0.92,
|
| 7873 |
+
"learning_rate": 0.0002,
|
| 7874 |
+
"loss": 0.6664,
|
| 7875 |
+
"step": 8260
|
| 7876 |
+
},
|
| 7877 |
+
{
|
| 7878 |
+
"epoch": 0.92,
|
| 7879 |
+
"learning_rate": 0.0002,
|
| 7880 |
+
"loss": 0.6488,
|
| 7881 |
+
"step": 8270
|
| 7882 |
+
},
|
| 7883 |
+
{
|
| 7884 |
+
"epoch": 0.93,
|
| 7885 |
+
"learning_rate": 0.0002,
|
| 7886 |
+
"loss": 0.7968,
|
| 7887 |
+
"step": 8280
|
| 7888 |
+
},
|
| 7889 |
+
{
|
| 7890 |
+
"epoch": 0.93,
|
| 7891 |
+
"learning_rate": 0.0002,
|
| 7892 |
+
"loss": 0.7509,
|
| 7893 |
+
"step": 8290
|
| 7894 |
+
},
|
| 7895 |
+
{
|
| 7896 |
+
"epoch": 0.93,
|
| 7897 |
+
"learning_rate": 0.0002,
|
| 7898 |
+
"loss": 0.713,
|
| 7899 |
+
"step": 8300
|
| 7900 |
+
},
|
| 7901 |
+
{
|
| 7902 |
+
"epoch": 0.93,
|
| 7903 |
+
"learning_rate": 0.0002,
|
| 7904 |
+
"loss": 0.7517,
|
| 7905 |
+
"step": 8310
|
| 7906 |
+
},
|
| 7907 |
+
{
|
| 7908 |
+
"epoch": 0.93,
|
| 7909 |
+
"learning_rate": 0.0002,
|
| 7910 |
+
"loss": 0.6979,
|
| 7911 |
+
"step": 8320
|
| 7912 |
+
},
|
| 7913 |
+
{
|
| 7914 |
+
"epoch": 0.93,
|
| 7915 |
+
"learning_rate": 0.0002,
|
| 7916 |
+
"loss": 0.6627,
|
| 7917 |
+
"step": 8330
|
| 7918 |
+
},
|
| 7919 |
+
{
|
| 7920 |
+
"epoch": 0.93,
|
| 7921 |
+
"learning_rate": 0.0002,
|
| 7922 |
+
"loss": 0.6101,
|
| 7923 |
+
"step": 8340
|
| 7924 |
+
},
|
| 7925 |
+
{
|
| 7926 |
+
"epoch": 0.93,
|
| 7927 |
+
"learning_rate": 0.0002,
|
| 7928 |
+
"loss": 0.7809,
|
| 7929 |
+
"step": 8350
|
| 7930 |
+
},
|
| 7931 |
+
{
|
| 7932 |
+
"epoch": 0.93,
|
| 7933 |
+
"learning_rate": 0.0002,
|
| 7934 |
+
"loss": 0.696,
|
| 7935 |
+
"step": 8360
|
| 7936 |
+
},
|
| 7937 |
+
{
|
| 7938 |
+
"epoch": 0.94,
|
| 7939 |
+
"learning_rate": 0.0002,
|
| 7940 |
+
"loss": 0.7072,
|
| 7941 |
+
"step": 8370
|
| 7942 |
+
},
|
| 7943 |
+
{
|
| 7944 |
+
"epoch": 0.94,
|
| 7945 |
+
"learning_rate": 0.0002,
|
| 7946 |
+
"loss": 0.6693,
|
| 7947 |
+
"step": 8380
|
| 7948 |
+
},
|
| 7949 |
+
{
|
| 7950 |
+
"epoch": 0.94,
|
| 7951 |
+
"learning_rate": 0.0002,
|
| 7952 |
+
"loss": 0.6619,
|
| 7953 |
+
"step": 8390
|
| 7954 |
+
},
|
| 7955 |
+
{
|
| 7956 |
+
"epoch": 0.94,
|
| 7957 |
+
"learning_rate": 0.0002,
|
| 7958 |
+
"loss": 0.7257,
|
| 7959 |
+
"step": 8400
|
| 7960 |
+
},
|
| 7961 |
+
{
|
| 7962 |
+
"epoch": 0.94,
|
| 7963 |
+
"eval_loss": 0.7216758131980896,
|
| 7964 |
+
"eval_runtime": 133.7373,
|
| 7965 |
+
"eval_samples_per_second": 7.477,
|
| 7966 |
+
"eval_steps_per_second": 3.739,
|
| 7967 |
+
"step": 8400
|
| 7968 |
+
},
|
| 7969 |
+
{
|
| 7970 |
+
"epoch": 0.94,
|
| 7971 |
+
"mmlu_eval_accuracy": 0.526984584707121,
|
| 7972 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453,
|
| 7973 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 7974 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 7975 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 7976 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449,
|
| 7977 |
+
"mmlu_eval_accuracy_college_biology": 0.5,
|
| 7978 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 7979 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 7980 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 7981 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 7982 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 7983 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 7984 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231,
|
| 7985 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 7986 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.1875,
|
| 7987 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 7988 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 7989 |
+
"mmlu_eval_accuracy_global_facts": 0.6,
|
| 7990 |
+
"mmlu_eval_accuracy_high_school_biology": 0.46875,
|
| 7991 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091,
|
| 7992 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
|
| 7993 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
|
| 7994 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 7995 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 7996 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.46511627906976744,
|
| 7997 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 7998 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5384615384615384,
|
| 7999 |
+
"mmlu_eval_accuracy_high_school_physics": 0.4117647058823529,
|
| 8000 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8,
|
| 8001 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.5217391304347826,
|
| 8002 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.7727272727272727,
|
| 8003 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 8004 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 8005 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 8006 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 8007 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 8008 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 8009 |
+
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365,
|
| 8010 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 8011 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 8012 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 8013 |
+
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907,
|
| 8014 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 8015 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.22,
|
| 8016 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 8017 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 8018 |
+
"mmlu_eval_accuracy_prehistory": 0.6285714285714286,
|
| 8019 |
+
"mmlu_eval_accuracy_professional_accounting": 0.41935483870967744,
|
| 8020 |
+
"mmlu_eval_accuracy_professional_law": 0.4176470588235294,
|
| 8021 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
| 8022 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5507246376811594,
|
| 8023 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 8024 |
+
"mmlu_eval_accuracy_security_studies": 0.5555555555555556,
|
| 8025 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 8026 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 8027 |
+
"mmlu_eval_accuracy_virology": 0.4444444444444444,
|
| 8028 |
+
"mmlu_eval_accuracy_world_religions": 0.7894736842105263,
|
| 8029 |
+
"mmlu_loss": 1.0874772744530485,
|
| 8030 |
+
"step": 8400
|
| 8031 |
+
},
|
| 8032 |
+
{
|
| 8033 |
+
"epoch": 0.94,
|
| 8034 |
+
"learning_rate": 0.0002,
|
| 8035 |
+
"loss": 0.741,
|
| 8036 |
+
"step": 8410
|
| 8037 |
+
},
|
| 8038 |
+
{
|
| 8039 |
+
"epoch": 0.94,
|
| 8040 |
+
"learning_rate": 0.0002,
|
| 8041 |
+
"loss": 0.7208,
|
| 8042 |
+
"step": 8420
|
| 8043 |
+
},
|
| 8044 |
+
{
|
| 8045 |
+
"epoch": 0.94,
|
| 8046 |
+
"learning_rate": 0.0002,
|
| 8047 |
+
"loss": 0.7545,
|
| 8048 |
+
"step": 8430
|
| 8049 |
+
},
|
| 8050 |
+
{
|
| 8051 |
+
"epoch": 0.94,
|
| 8052 |
+
"learning_rate": 0.0002,
|
| 8053 |
+
"loss": 0.6342,
|
| 8054 |
+
"step": 8440
|
| 8055 |
+
},
|
| 8056 |
+
{
|
| 8057 |
+
"epoch": 0.94,
|
| 8058 |
+
"learning_rate": 0.0002,
|
| 8059 |
+
"loss": 0.6757,
|
| 8060 |
+
"step": 8450
|
| 8061 |
+
},
|
| 8062 |
+
{
|
| 8063 |
+
"epoch": 0.95,
|
| 8064 |
+
"learning_rate": 0.0002,
|
| 8065 |
+
"loss": 0.6329,
|
| 8066 |
+
"step": 8460
|
| 8067 |
+
},
|
| 8068 |
+
{
|
| 8069 |
+
"epoch": 0.95,
|
| 8070 |
+
"learning_rate": 0.0002,
|
| 8071 |
+
"loss": 0.6294,
|
| 8072 |
+
"step": 8470
|
| 8073 |
+
},
|
| 8074 |
+
{
|
| 8075 |
+
"epoch": 0.95,
|
| 8076 |
+
"learning_rate": 0.0002,
|
| 8077 |
+
"loss": 0.7249,
|
| 8078 |
+
"step": 8480
|
| 8079 |
+
},
|
| 8080 |
+
{
|
| 8081 |
+
"epoch": 0.95,
|
| 8082 |
+
"learning_rate": 0.0002,
|
| 8083 |
+
"loss": 0.748,
|
| 8084 |
+
"step": 8490
|
| 8085 |
+
},
|
| 8086 |
+
{
|
| 8087 |
+
"epoch": 0.95,
|
| 8088 |
+
"learning_rate": 0.0002,
|
| 8089 |
+
"loss": 0.7092,
|
| 8090 |
+
"step": 8500
|
| 8091 |
+
},
|
| 8092 |
+
{
|
| 8093 |
+
"epoch": 0.95,
|
| 8094 |
+
"learning_rate": 0.0002,
|
| 8095 |
+
"loss": 0.6631,
|
| 8096 |
+
"step": 8510
|
| 8097 |
+
},
|
| 8098 |
+
{
|
| 8099 |
+
"epoch": 0.95,
|
| 8100 |
+
"learning_rate": 0.0002,
|
| 8101 |
+
"loss": 0.7569,
|
| 8102 |
+
"step": 8520
|
| 8103 |
+
},
|
| 8104 |
+
{
|
| 8105 |
+
"epoch": 0.95,
|
| 8106 |
+
"learning_rate": 0.0002,
|
| 8107 |
+
"loss": 0.7655,
|
| 8108 |
+
"step": 8530
|
| 8109 |
+
},
|
| 8110 |
+
{
|
| 8111 |
+
"epoch": 0.95,
|
| 8112 |
+
"learning_rate": 0.0002,
|
| 8113 |
+
"loss": 0.6968,
|
| 8114 |
+
"step": 8540
|
| 8115 |
+
},
|
| 8116 |
+
{
|
| 8117 |
+
"epoch": 0.96,
|
| 8118 |
+
"learning_rate": 0.0002,
|
| 8119 |
+
"loss": 0.6705,
|
| 8120 |
+
"step": 8550
|
| 8121 |
+
},
|
| 8122 |
+
{
|
| 8123 |
+
"epoch": 0.96,
|
| 8124 |
+
"learning_rate": 0.0002,
|
| 8125 |
+
"loss": 0.6807,
|
| 8126 |
+
"step": 8560
|
| 8127 |
+
},
|
| 8128 |
+
{
|
| 8129 |
+
"epoch": 0.96,
|
| 8130 |
+
"learning_rate": 0.0002,
|
| 8131 |
+
"loss": 0.7453,
|
| 8132 |
+
"step": 8570
|
| 8133 |
+
},
|
| 8134 |
+
{
|
| 8135 |
+
"epoch": 0.96,
|
| 8136 |
+
"learning_rate": 0.0002,
|
| 8137 |
+
"loss": 0.6911,
|
| 8138 |
+
"step": 8580
|
| 8139 |
+
},
|
| 8140 |
+
{
|
| 8141 |
+
"epoch": 0.96,
|
| 8142 |
+
"learning_rate": 0.0002,
|
| 8143 |
+
"loss": 0.6233,
|
| 8144 |
+
"step": 8590
|
| 8145 |
+
},
|
| 8146 |
+
{
|
| 8147 |
+
"epoch": 0.96,
|
| 8148 |
+
"learning_rate": 0.0002,
|
| 8149 |
+
"loss": 0.7262,
|
| 8150 |
+
"step": 8600
|
| 8151 |
+
},
|
| 8152 |
+
{
|
| 8153 |
+
"epoch": 0.96,
|
| 8154 |
+
"eval_loss": 0.7232338786125183,
|
| 8155 |
+
"eval_runtime": 133.8059,
|
| 8156 |
+
"eval_samples_per_second": 7.474,
|
| 8157 |
+
"eval_steps_per_second": 3.737,
|
| 8158 |
+
"step": 8600
|
| 8159 |
+
},
|
| 8160 |
+
{
|
| 8161 |
+
"epoch": 0.96,
|
| 8162 |
+
"mmlu_eval_accuracy": 0.5137482992996992,
|
| 8163 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453,
|
| 8164 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 8165 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 8166 |
+
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364,
|
| 8167 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552,
|
| 8168 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 8169 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 8170 |
+
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
|
| 8171 |
+
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365,
|
| 8172 |
+
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091,
|
| 8173 |
+
"mmlu_eval_accuracy_college_physics": 0.45454545454545453,
|
| 8174 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 8175 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.6153846153846154,
|
| 8176 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 8177 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 8178 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 8179 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 8180 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 8181 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 8182 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
|
| 8183 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 8184 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 8185 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 8186 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 8187 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4186046511627907,
|
| 8188 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724,
|
| 8189 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 8190 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 8191 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333,
|
| 8192 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
| 8193 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 8194 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 8195 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 8196 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 8197 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 8198 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 8199 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 8200 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 8201 |
+
"mmlu_eval_accuracy_management": 0.5454545454545454,
|
| 8202 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 8203 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 8204 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 8205 |
+
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576,
|
| 8206 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.22,
|
| 8207 |
+
"mmlu_eval_accuracy_nutrition": 0.6666666666666666,
|
| 8208 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 8209 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 8210 |
+
"mmlu_eval_accuracy_professional_accounting": 0.4838709677419355,
|
| 8211 |
+
"mmlu_eval_accuracy_professional_law": 0.3411764705882353,
|
| 8212 |
+
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355,
|
| 8213 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5507246376811594,
|
| 8214 |
+
"mmlu_eval_accuracy_public_relations": 0.4166666666666667,
|
| 8215 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 8216 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 8217 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 8218 |
+
"mmlu_eval_accuracy_virology": 0.4444444444444444,
|
| 8219 |
+
"mmlu_eval_accuracy_world_religions": 0.8421052631578947,
|
| 8220 |
+
"mmlu_loss": 1.1483409754912473,
|
| 8221 |
+
"step": 8600
|
| 8222 |
+
},
|
| 8223 |
+
{
|
| 8224 |
+
"epoch": 0.96,
|
| 8225 |
+
"learning_rate": 0.0002,
|
| 8226 |
+
"loss": 0.6649,
|
| 8227 |
+
"step": 8610
|
| 8228 |
+
},
|
| 8229 |
+
{
|
| 8230 |
+
"epoch": 0.96,
|
| 8231 |
+
"learning_rate": 0.0002,
|
| 8232 |
+
"loss": 0.7852,
|
| 8233 |
+
"step": 8620
|
| 8234 |
+
},
|
| 8235 |
+
{
|
| 8236 |
+
"epoch": 0.96,
|
| 8237 |
+
"learning_rate": 0.0002,
|
| 8238 |
+
"loss": 0.6614,
|
| 8239 |
+
"step": 8630
|
| 8240 |
+
},
|
| 8241 |
+
{
|
| 8242 |
+
"epoch": 0.97,
|
| 8243 |
+
"learning_rate": 0.0002,
|
| 8244 |
+
"loss": 0.7296,
|
| 8245 |
+
"step": 8640
|
| 8246 |
+
},
|
| 8247 |
+
{
|
| 8248 |
+
"epoch": 0.97,
|
| 8249 |
+
"learning_rate": 0.0002,
|
| 8250 |
+
"loss": 0.7971,
|
| 8251 |
+
"step": 8650
|
| 8252 |
+
},
|
| 8253 |
+
{
|
| 8254 |
+
"epoch": 0.97,
|
| 8255 |
+
"learning_rate": 0.0002,
|
| 8256 |
+
"loss": 0.7291,
|
| 8257 |
+
"step": 8660
|
| 8258 |
+
},
|
| 8259 |
+
{
|
| 8260 |
+
"epoch": 0.97,
|
| 8261 |
+
"learning_rate": 0.0002,
|
| 8262 |
+
"loss": 0.6949,
|
| 8263 |
+
"step": 8670
|
| 8264 |
+
},
|
| 8265 |
+
{
|
| 8266 |
+
"epoch": 0.97,
|
| 8267 |
+
"learning_rate": 0.0002,
|
| 8268 |
+
"loss": 0.7349,
|
| 8269 |
+
"step": 8680
|
| 8270 |
+
},
|
| 8271 |
+
{
|
| 8272 |
+
"epoch": 0.97,
|
| 8273 |
+
"learning_rate": 0.0002,
|
| 8274 |
+
"loss": 0.6495,
|
| 8275 |
+
"step": 8690
|
| 8276 |
+
},
|
| 8277 |
+
{
|
| 8278 |
+
"epoch": 0.97,
|
| 8279 |
+
"learning_rate": 0.0002,
|
| 8280 |
+
"loss": 0.7464,
|
| 8281 |
+
"step": 8700
|
| 8282 |
+
},
|
| 8283 |
+
{
|
| 8284 |
+
"epoch": 0.97,
|
| 8285 |
+
"learning_rate": 0.0002,
|
| 8286 |
+
"loss": 0.5793,
|
| 8287 |
+
"step": 8710
|
| 8288 |
+
},
|
| 8289 |
+
{
|
| 8290 |
+
"epoch": 0.97,
|
| 8291 |
+
"learning_rate": 0.0002,
|
| 8292 |
+
"loss": 0.7244,
|
| 8293 |
+
"step": 8720
|
| 8294 |
+
},
|
| 8295 |
+
{
|
| 8296 |
+
"epoch": 0.98,
|
| 8297 |
+
"learning_rate": 0.0002,
|
| 8298 |
+
"loss": 0.8785,
|
| 8299 |
+
"step": 8730
|
| 8300 |
+
},
|
| 8301 |
+
{
|
| 8302 |
+
"epoch": 0.98,
|
| 8303 |
+
"learning_rate": 0.0002,
|
| 8304 |
+
"loss": 0.7174,
|
| 8305 |
+
"step": 8740
|
| 8306 |
+
},
|
| 8307 |
+
{
|
| 8308 |
+
"epoch": 0.98,
|
| 8309 |
+
"learning_rate": 0.0002,
|
| 8310 |
+
"loss": 0.8183,
|
| 8311 |
+
"step": 8750
|
| 8312 |
+
},
|
| 8313 |
+
{
|
| 8314 |
+
"epoch": 0.98,
|
| 8315 |
+
"learning_rate": 0.0002,
|
| 8316 |
+
"loss": 0.7137,
|
| 8317 |
+
"step": 8760
|
| 8318 |
+
},
|
| 8319 |
+
{
|
| 8320 |
+
"epoch": 0.98,
|
| 8321 |
+
"learning_rate": 0.0002,
|
| 8322 |
+
"loss": 0.7427,
|
| 8323 |
+
"step": 8770
|
| 8324 |
+
},
|
| 8325 |
+
{
|
| 8326 |
+
"epoch": 0.98,
|
| 8327 |
+
"learning_rate": 0.0002,
|
| 8328 |
+
"loss": 0.6908,
|
| 8329 |
+
"step": 8780
|
| 8330 |
+
},
|
| 8331 |
+
{
|
| 8332 |
+
"epoch": 0.98,
|
| 8333 |
+
"learning_rate": 0.0002,
|
| 8334 |
+
"loss": 0.7299,
|
| 8335 |
+
"step": 8790
|
| 8336 |
+
},
|
| 8337 |
+
{
|
| 8338 |
+
"epoch": 0.98,
|
| 8339 |
+
"learning_rate": 0.0002,
|
| 8340 |
+
"loss": 0.7078,
|
| 8341 |
+
"step": 8800
|
| 8342 |
+
},
|
| 8343 |
+
{
|
| 8344 |
+
"epoch": 0.98,
|
| 8345 |
+
"eval_loss": 0.7242326736450195,
|
| 8346 |
+
"eval_runtime": 133.88,
|
| 8347 |
+
"eval_samples_per_second": 7.469,
|
| 8348 |
+
"eval_steps_per_second": 3.735,
|
| 8349 |
+
"step": 8800
|
| 8350 |
+
},
|
| 8351 |
+
{
|
| 8352 |
+
"epoch": 0.98,
|
| 8353 |
+
"mmlu_eval_accuracy": 0.47832951297756615,
|
| 8354 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 8355 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 8356 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 8357 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 8358 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
|
| 8359 |
+
"mmlu_eval_accuracy_college_biology": 0.5625,
|
| 8360 |
+
"mmlu_eval_accuracy_college_chemistry": 0.0,
|
| 8361 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 8362 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 8363 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 8364 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 8365 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 8366 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5,
|
| 8367 |
+
"mmlu_eval_accuracy_econometrics": 0.08333333333333333,
|
| 8368 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.1875,
|
| 8369 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 8370 |
+
"mmlu_eval_accuracy_formal_logic": 0.07142857142857142,
|
| 8371 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 8372 |
+
"mmlu_eval_accuracy_high_school_biology": 0.5,
|
| 8373 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 8374 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
|
| 8375 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.4444444444444444,
|
| 8376 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 8377 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 8378 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4186046511627907,
|
| 8379 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862,
|
| 8380 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
| 8381 |
+
"mmlu_eval_accuracy_high_school_physics": 0.5294117647058824,
|
| 8382 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8166666666666667,
|
| 8383 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
| 8384 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 8385 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6538461538461539,
|
| 8386 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 8387 |
+
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667,
|
| 8388 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 8389 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 8390 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556,
|
| 8391 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 8392 |
+
"mmlu_eval_accuracy_management": 0.5454545454545454,
|
| 8393 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 8394 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 8395 |
+
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907,
|
| 8396 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5789473684210527,
|
| 8397 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.22,
|
| 8398 |
+
"mmlu_eval_accuracy_nutrition": 0.5454545454545454,
|
| 8399 |
+
"mmlu_eval_accuracy_philosophy": 0.4117647058823529,
|
| 8400 |
+
"mmlu_eval_accuracy_prehistory": 0.6285714285714286,
|
| 8401 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
| 8402 |
+
"mmlu_eval_accuracy_professional_law": 0.3588235294117647,
|
| 8403 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 8404 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 8405 |
+
"mmlu_eval_accuracy_public_relations": 0.5833333333333334,
|
| 8406 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 8407 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 8408 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 8409 |
+
"mmlu_eval_accuracy_virology": 0.4444444444444444,
|
| 8410 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 8411 |
+
"mmlu_loss": 1.19566303125872,
|
| 8412 |
+
"step": 8800
|
| 8413 |
+
},
|
| 8414 |
+
{
|
| 8415 |
+
"epoch": 0.98,
|
| 8416 |
+
"learning_rate": 0.0002,
|
| 8417 |
+
"loss": 0.6977,
|
| 8418 |
+
"step": 8810
|
| 8419 |
+
},
|
| 8420 |
+
{
|
| 8421 |
+
"epoch": 0.99,
|
| 8422 |
+
"learning_rate": 0.0002,
|
| 8423 |
+
"loss": 0.7675,
|
| 8424 |
+
"step": 8820
|
| 8425 |
+
},
|
| 8426 |
+
{
|
| 8427 |
+
"epoch": 0.99,
|
| 8428 |
+
"learning_rate": 0.0002,
|
| 8429 |
+
"loss": 0.7544,
|
| 8430 |
+
"step": 8830
|
| 8431 |
+
},
|
| 8432 |
+
{
|
| 8433 |
+
"epoch": 0.99,
|
| 8434 |
+
"learning_rate": 0.0002,
|
| 8435 |
+
"loss": 0.6744,
|
| 8436 |
+
"step": 8840
|
| 8437 |
+
},
|
| 8438 |
+
{
|
| 8439 |
+
"epoch": 0.99,
|
| 8440 |
+
"learning_rate": 0.0002,
|
| 8441 |
+
"loss": 0.6586,
|
| 8442 |
+
"step": 8850
|
| 8443 |
+
},
|
| 8444 |
+
{
|
| 8445 |
+
"epoch": 0.99,
|
| 8446 |
+
"learning_rate": 0.0002,
|
| 8447 |
+
"loss": 0.7119,
|
| 8448 |
+
"step": 8860
|
| 8449 |
+
},
|
| 8450 |
+
{
|
| 8451 |
+
"epoch": 0.99,
|
| 8452 |
+
"learning_rate": 0.0002,
|
| 8453 |
+
"loss": 0.6337,
|
| 8454 |
+
"step": 8870
|
| 8455 |
+
},
|
| 8456 |
+
{
|
| 8457 |
+
"epoch": 0.99,
|
| 8458 |
+
"learning_rate": 0.0002,
|
| 8459 |
+
"loss": 0.6231,
|
| 8460 |
+
"step": 8880
|
| 8461 |
+
},
|
| 8462 |
+
{
|
| 8463 |
+
"epoch": 0.99,
|
| 8464 |
+
"learning_rate": 0.0002,
|
| 8465 |
+
"loss": 0.7832,
|
| 8466 |
+
"step": 8890
|
| 8467 |
+
},
|
| 8468 |
+
{
|
| 8469 |
+
"epoch": 0.99,
|
| 8470 |
+
"learning_rate": 0.0002,
|
| 8471 |
+
"loss": 0.6977,
|
| 8472 |
+
"step": 8900
|
| 8473 |
+
},
|
| 8474 |
+
{
|
| 8475 |
+
"epoch": 1.0,
|
| 8476 |
+
"learning_rate": 0.0002,
|
| 8477 |
+
"loss": 0.7891,
|
| 8478 |
+
"step": 8910
|
| 8479 |
+
},
|
| 8480 |
+
{
|
| 8481 |
+
"epoch": 1.0,
|
| 8482 |
+
"learning_rate": 0.0002,
|
| 8483 |
+
"loss": 0.741,
|
| 8484 |
+
"step": 8920
|
| 8485 |
+
},
|
| 8486 |
+
{
|
| 8487 |
+
"epoch": 1.0,
|
| 8488 |
+
"learning_rate": 0.0002,
|
| 8489 |
+
"loss": 0.7052,
|
| 8490 |
+
"step": 8930
|
| 8491 |
+
},
|
| 8492 |
+
{
|
| 8493 |
+
"epoch": 1.0,
|
| 8494 |
+
"learning_rate": 0.0002,
|
| 8495 |
+
"loss": 0.775,
|
| 8496 |
+
"step": 8940
|
| 8497 |
+
},
|
| 8498 |
+
{
|
| 8499 |
+
"epoch": 1.0,
|
| 8500 |
+
"learning_rate": 0.0002,
|
| 8501 |
+
"loss": 0.6596,
|
| 8502 |
+
"step": 8950
|
| 8503 |
+
},
|
| 8504 |
+
{
|
| 8505 |
+
"epoch": 1.0,
|
| 8506 |
+
"learning_rate": 0.0002,
|
| 8507 |
+
"loss": 0.7114,
|
| 8508 |
+
"step": 8960
|
| 8509 |
+
},
|
| 8510 |
+
{
|
| 8511 |
+
"epoch": 1.0,
|
| 8512 |
+
"learning_rate": 0.0002,
|
| 8513 |
+
"loss": 0.6218,
|
| 8514 |
+
"step": 8970
|
| 8515 |
+
},
|
| 8516 |
+
{
|
| 8517 |
+
"epoch": 1.0,
|
| 8518 |
+
"learning_rate": 0.0002,
|
| 8519 |
+
"loss": 0.6744,
|
| 8520 |
+
"step": 8980
|
| 8521 |
+
},
|
| 8522 |
+
{
|
| 8523 |
+
"epoch": 1.0,
|
| 8524 |
+
"learning_rate": 0.0002,
|
| 8525 |
+
"loss": 0.5488,
|
| 8526 |
+
"step": 8990
|
| 8527 |
+
},
|
| 8528 |
+
{
|
| 8529 |
+
"epoch": 1.01,
|
| 8530 |
+
"learning_rate": 0.0002,
|
| 8531 |
+
"loss": 0.6267,
|
| 8532 |
+
"step": 9000
|
| 8533 |
+
},
|
| 8534 |
+
{
|
| 8535 |
+
"epoch": 1.01,
|
| 8536 |
+
"eval_loss": 0.7288996577262878,
|
| 8537 |
+
"eval_runtime": 134.0038,
|
| 8538 |
+
"eval_samples_per_second": 7.462,
|
| 8539 |
+
"eval_steps_per_second": 3.731,
|
| 8540 |
+
"step": 9000
|
| 8541 |
+
},
|
| 8542 |
+
{
|
| 8543 |
+
"epoch": 1.01,
|
| 8544 |
+
"mmlu_eval_accuracy": 0.5211415195932566,
|
| 8545 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 8546 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 8547 |
+
"mmlu_eval_accuracy_astronomy": 0.5,
|
| 8548 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 8549 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655,
|
| 8550 |
+
"mmlu_eval_accuracy_college_biology": 0.375,
|
| 8551 |
+
"mmlu_eval_accuracy_college_chemistry": 0.25,
|
| 8552 |
+
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453,
|
| 8553 |
+
"mmlu_eval_accuracy_college_mathematics": 0.45454545454545453,
|
| 8554 |
+
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453,
|
| 8555 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 8556 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 8557 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.6538461538461539,
|
| 8558 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 8559 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 8560 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637,
|
| 8561 |
+
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427,
|
| 8562 |
+
"mmlu_eval_accuracy_global_facts": 0.4,
|
| 8563 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 8564 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.5,
|
| 8565 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
|
| 8566 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.7222222222222222,
|
| 8567 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 8568 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.8571428571428571,
|
| 8569 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4186046511627907,
|
| 8570 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.3793103448275862,
|
| 8571 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231,
|
| 8572 |
+
"mmlu_eval_accuracy_high_school_physics": 0.5294117647058824,
|
| 8573 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333,
|
| 8574 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216,
|
| 8575 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 8576 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 8577 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 8578 |
+
"mmlu_eval_accuracy_human_sexuality": 0.5833333333333334,
|
| 8579 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 8580 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 8581 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 8582 |
+
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182,
|
| 8583 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 8584 |
+
"mmlu_eval_accuracy_marketing": 0.92,
|
| 8585 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 8586 |
+
"mmlu_eval_accuracy_miscellaneous": 0.7093023255813954,
|
| 8587 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 8588 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 8589 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 8590 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 8591 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 8592 |
+
"mmlu_eval_accuracy_professional_accounting": 0.45161290322580644,
|
| 8593 |
+
"mmlu_eval_accuracy_professional_law": 0.4,
|
| 8594 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
| 8595 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 8596 |
+
"mmlu_eval_accuracy_public_relations": 0.4166666666666667,
|
| 8597 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 8598 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 8599 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 8600 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 8601 |
+
"mmlu_eval_accuracy_world_religions": 0.8421052631578947,
|
| 8602 |
+
"mmlu_loss": 1.0023171398253727,
|
| 8603 |
+
"step": 9000
|
| 8604 |
+
},
|
| 8605 |
+
{
|
| 8606 |
+
"epoch": 1.01,
|
| 8607 |
+
"learning_rate": 0.0002,
|
| 8608 |
+
"loss": 0.7466,
|
| 8609 |
+
"step": 9010
|
| 8610 |
+
},
|
| 8611 |
+
{
|
| 8612 |
+
"epoch": 1.01,
|
| 8613 |
+
"learning_rate": 0.0002,
|
| 8614 |
+
"loss": 0.5837,
|
| 8615 |
+
"step": 9020
|
| 8616 |
+
},
|
| 8617 |
+
{
|
| 8618 |
+
"epoch": 1.01,
|
| 8619 |
+
"learning_rate": 0.0002,
|
| 8620 |
+
"loss": 0.5695,
|
| 8621 |
+
"step": 9030
|
| 8622 |
+
},
|
| 8623 |
+
{
|
| 8624 |
+
"epoch": 1.01,
|
| 8625 |
+
"learning_rate": 0.0002,
|
| 8626 |
+
"loss": 0.5639,
|
| 8627 |
+
"step": 9040
|
| 8628 |
+
},
|
| 8629 |
+
{
|
| 8630 |
+
"epoch": 1.01,
|
| 8631 |
+
"learning_rate": 0.0002,
|
| 8632 |
+
"loss": 0.7013,
|
| 8633 |
+
"step": 9050
|
| 8634 |
+
},
|
| 8635 |
+
{
|
| 8636 |
+
"epoch": 1.01,
|
| 8637 |
+
"learning_rate": 0.0002,
|
| 8638 |
+
"loss": 0.529,
|
| 8639 |
+
"step": 9060
|
| 8640 |
+
},
|
| 8641 |
+
{
|
| 8642 |
+
"epoch": 1.01,
|
| 8643 |
+
"learning_rate": 0.0002,
|
| 8644 |
+
"loss": 0.5332,
|
| 8645 |
+
"step": 9070
|
| 8646 |
+
},
|
| 8647 |
+
{
|
| 8648 |
+
"epoch": 1.02,
|
| 8649 |
+
"learning_rate": 0.0002,
|
| 8650 |
+
"loss": 0.6165,
|
| 8651 |
+
"step": 9080
|
| 8652 |
+
},
|
| 8653 |
+
{
|
| 8654 |
+
"epoch": 1.02,
|
| 8655 |
+
"learning_rate": 0.0002,
|
| 8656 |
+
"loss": 0.6265,
|
| 8657 |
+
"step": 9090
|
| 8658 |
+
},
|
| 8659 |
+
{
|
| 8660 |
+
"epoch": 1.02,
|
| 8661 |
+
"learning_rate": 0.0002,
|
| 8662 |
+
"loss": 0.4978,
|
| 8663 |
+
"step": 9100
|
| 8664 |
+
},
|
| 8665 |
+
{
|
| 8666 |
+
"epoch": 1.02,
|
| 8667 |
+
"learning_rate": 0.0002,
|
| 8668 |
+
"loss": 0.6575,
|
| 8669 |
+
"step": 9110
|
| 8670 |
+
},
|
| 8671 |
+
{
|
| 8672 |
+
"epoch": 1.02,
|
| 8673 |
+
"learning_rate": 0.0002,
|
| 8674 |
+
"loss": 0.5499,
|
| 8675 |
+
"step": 9120
|
| 8676 |
+
},
|
| 8677 |
+
{
|
| 8678 |
+
"epoch": 1.02,
|
| 8679 |
+
"learning_rate": 0.0002,
|
| 8680 |
+
"loss": 0.6321,
|
| 8681 |
+
"step": 9130
|
| 8682 |
+
},
|
| 8683 |
+
{
|
| 8684 |
+
"epoch": 1.02,
|
| 8685 |
+
"learning_rate": 0.0002,
|
| 8686 |
+
"loss": 0.543,
|
| 8687 |
+
"step": 9140
|
| 8688 |
+
},
|
| 8689 |
+
{
|
| 8690 |
+
"epoch": 1.02,
|
| 8691 |
+
"learning_rate": 0.0002,
|
| 8692 |
+
"loss": 0.6712,
|
| 8693 |
+
"step": 9150
|
| 8694 |
+
},
|
| 8695 |
+
{
|
| 8696 |
+
"epoch": 1.02,
|
| 8697 |
+
"learning_rate": 0.0002,
|
| 8698 |
+
"loss": 0.6459,
|
| 8699 |
+
"step": 9160
|
| 8700 |
+
},
|
| 8701 |
+
{
|
| 8702 |
+
"epoch": 1.03,
|
| 8703 |
+
"learning_rate": 0.0002,
|
| 8704 |
+
"loss": 0.5362,
|
| 8705 |
+
"step": 9170
|
| 8706 |
+
},
|
| 8707 |
+
{
|
| 8708 |
+
"epoch": 1.03,
|
| 8709 |
+
"learning_rate": 0.0002,
|
| 8710 |
+
"loss": 0.5895,
|
| 8711 |
+
"step": 9180
|
| 8712 |
+
},
|
| 8713 |
+
{
|
| 8714 |
+
"epoch": 1.03,
|
| 8715 |
+
"learning_rate": 0.0002,
|
| 8716 |
+
"loss": 0.6564,
|
| 8717 |
+
"step": 9190
|
| 8718 |
+
},
|
| 8719 |
+
{
|
| 8720 |
+
"epoch": 1.03,
|
| 8721 |
+
"learning_rate": 0.0002,
|
| 8722 |
+
"loss": 0.6449,
|
| 8723 |
+
"step": 9200
|
| 8724 |
+
},
|
| 8725 |
+
{
|
| 8726 |
+
"epoch": 1.03,
|
| 8727 |
+
"eval_loss": 0.7265574932098389,
|
| 8728 |
+
"eval_runtime": 133.854,
|
| 8729 |
+
"eval_samples_per_second": 7.471,
|
| 8730 |
+
"eval_steps_per_second": 3.735,
|
| 8731 |
+
"step": 9200
|
| 8732 |
+
},
|
| 8733 |
+
{
|
| 8734 |
+
"epoch": 1.03,
|
| 8735 |
+
"mmlu_eval_accuracy": 0.4888921124078144,
|
| 8736 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453,
|
| 8737 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
| 8738 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 8739 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 8740 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.6206896551724138,
|
| 8741 |
+
"mmlu_eval_accuracy_college_biology": 0.5,
|
| 8742 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 8743 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 8744 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 8745 |
+
"mmlu_eval_accuracy_college_medicine": 0.6363636363636364,
|
| 8746 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 8747 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 8748 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.46153846153846156,
|
| 8749 |
+
"mmlu_eval_accuracy_econometrics": 0.3333333333333333,
|
| 8750 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 8751 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 8752 |
+
"mmlu_eval_accuracy_formal_logic": 0.35714285714285715,
|
| 8753 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 8754 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 8755 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.22727272727272727,
|
| 8756 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 8757 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 8758 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273,
|
| 8759 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 8760 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.4418604651162791,
|
| 8761 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793,
|
| 8762 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 8763 |
+
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354,
|
| 8764 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.85,
|
| 8765 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913,
|
| 8766 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 8767 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6538461538461539,
|
| 8768 |
+
"mmlu_eval_accuracy_human_aging": 0.5652173913043478,
|
| 8769 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 8770 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 8771 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 8772 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 8773 |
+
"mmlu_eval_accuracy_machine_learning": 0.0,
|
| 8774 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 8775 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 8776 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 8777 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6976744186046512,
|
| 8778 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5789473684210527,
|
| 8779 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.25,
|
| 8780 |
+
"mmlu_eval_accuracy_nutrition": 0.6060606060606061,
|
| 8781 |
+
"mmlu_eval_accuracy_philosophy": 0.4117647058823529,
|
| 8782 |
+
"mmlu_eval_accuracy_prehistory": 0.5714285714285714,
|
| 8783 |
+
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322,
|
| 8784 |
+
"mmlu_eval_accuracy_professional_law": 0.3941176470588235,
|
| 8785 |
+
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644,
|
| 8786 |
+
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375,
|
| 8787 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 8788 |
+
"mmlu_eval_accuracy_security_studies": 0.48148148148148145,
|
| 8789 |
+
"mmlu_eval_accuracy_sociology": 0.8181818181818182,
|
| 8790 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 8791 |
+
"mmlu_eval_accuracy_virology": 0.3888888888888889,
|
| 8792 |
+
"mmlu_eval_accuracy_world_religions": 0.6842105263157895,
|
| 8793 |
+
"mmlu_loss": 1.263020734718512,
|
| 8794 |
+
"step": 9200
|
| 8795 |
+
},
|
| 8796 |
+
{
|
| 8797 |
+
"epoch": 1.03,
|
| 8798 |
+
"learning_rate": 0.0002,
|
| 8799 |
+
"loss": 0.5477,
|
| 8800 |
+
"step": 9210
|
| 8801 |
+
},
|
| 8802 |
+
{
|
| 8803 |
+
"epoch": 1.03,
|
| 8804 |
+
"learning_rate": 0.0002,
|
| 8805 |
+
"loss": 0.6071,
|
| 8806 |
+
"step": 9220
|
| 8807 |
+
},
|
| 8808 |
+
{
|
| 8809 |
+
"epoch": 1.03,
|
| 8810 |
+
"learning_rate": 0.0002,
|
| 8811 |
+
"loss": 0.5685,
|
| 8812 |
+
"step": 9230
|
| 8813 |
+
},
|
| 8814 |
+
{
|
| 8815 |
+
"epoch": 1.03,
|
| 8816 |
+
"learning_rate": 0.0002,
|
| 8817 |
+
"loss": 0.6803,
|
| 8818 |
+
"step": 9240
|
| 8819 |
+
},
|
| 8820 |
+
{
|
| 8821 |
+
"epoch": 1.03,
|
| 8822 |
+
"learning_rate": 0.0002,
|
| 8823 |
+
"loss": 0.6741,
|
| 8824 |
+
"step": 9250
|
| 8825 |
+
},
|
| 8826 |
+
{
|
| 8827 |
+
"epoch": 1.04,
|
| 8828 |
+
"learning_rate": 0.0002,
|
| 8829 |
+
"loss": 0.6441,
|
| 8830 |
+
"step": 9260
|
| 8831 |
+
},
|
| 8832 |
+
{
|
| 8833 |
+
"epoch": 1.04,
|
| 8834 |
+
"learning_rate": 0.0002,
|
| 8835 |
+
"loss": 0.5959,
|
| 8836 |
+
"step": 9270
|
| 8837 |
+
},
|
| 8838 |
+
{
|
| 8839 |
+
"epoch": 1.04,
|
| 8840 |
+
"learning_rate": 0.0002,
|
| 8841 |
+
"loss": 0.6637,
|
| 8842 |
+
"step": 9280
|
| 8843 |
+
},
|
| 8844 |
+
{
|
| 8845 |
+
"epoch": 1.04,
|
| 8846 |
+
"learning_rate": 0.0002,
|
| 8847 |
+
"loss": 0.6131,
|
| 8848 |
+
"step": 9290
|
| 8849 |
+
},
|
| 8850 |
+
{
|
| 8851 |
+
"epoch": 1.04,
|
| 8852 |
+
"learning_rate": 0.0002,
|
| 8853 |
+
"loss": 0.6892,
|
| 8854 |
+
"step": 9300
|
| 8855 |
+
},
|
| 8856 |
+
{
|
| 8857 |
+
"epoch": 1.04,
|
| 8858 |
+
"learning_rate": 0.0002,
|
| 8859 |
+
"loss": 0.4894,
|
| 8860 |
+
"step": 9310
|
| 8861 |
+
},
|
| 8862 |
+
{
|
| 8863 |
+
"epoch": 1.04,
|
| 8864 |
+
"learning_rate": 0.0002,
|
| 8865 |
+
"loss": 0.5649,
|
| 8866 |
+
"step": 9320
|
| 8867 |
+
},
|
| 8868 |
+
{
|
| 8869 |
+
"epoch": 1.04,
|
| 8870 |
+
"learning_rate": 0.0002,
|
| 8871 |
+
"loss": 0.5696,
|
| 8872 |
+
"step": 9330
|
| 8873 |
+
},
|
| 8874 |
+
{
|
| 8875 |
+
"epoch": 1.04,
|
| 8876 |
+
"learning_rate": 0.0002,
|
| 8877 |
+
"loss": 0.5128,
|
| 8878 |
+
"step": 9340
|
| 8879 |
+
},
|
| 8880 |
+
{
|
| 8881 |
+
"epoch": 1.05,
|
| 8882 |
+
"learning_rate": 0.0002,
|
| 8883 |
+
"loss": 0.5584,
|
| 8884 |
+
"step": 9350
|
| 8885 |
+
},
|
| 8886 |
+
{
|
| 8887 |
+
"epoch": 1.05,
|
| 8888 |
+
"learning_rate": 0.0002,
|
| 8889 |
+
"loss": 0.5831,
|
| 8890 |
+
"step": 9360
|
| 8891 |
+
},
|
| 8892 |
+
{
|
| 8893 |
+
"epoch": 1.05,
|
| 8894 |
+
"learning_rate": 0.0002,
|
| 8895 |
+
"loss": 0.6261,
|
| 8896 |
+
"step": 9370
|
| 8897 |
+
},
|
| 8898 |
+
{
|
| 8899 |
+
"epoch": 1.05,
|
| 8900 |
+
"learning_rate": 0.0002,
|
| 8901 |
+
"loss": 0.6188,
|
| 8902 |
+
"step": 9380
|
| 8903 |
+
},
|
| 8904 |
+
{
|
| 8905 |
+
"epoch": 1.05,
|
| 8906 |
+
"learning_rate": 0.0002,
|
| 8907 |
+
"loss": 0.6305,
|
| 8908 |
+
"step": 9390
|
| 8909 |
+
},
|
| 8910 |
+
{
|
| 8911 |
+
"epoch": 1.05,
|
| 8912 |
+
"learning_rate": 0.0002,
|
| 8913 |
+
"loss": 0.5305,
|
| 8914 |
+
"step": 9400
|
| 8915 |
+
},
|
| 8916 |
+
{
|
| 8917 |
+
"epoch": 1.05,
|
| 8918 |
+
"eval_loss": 0.7279682755470276,
|
| 8919 |
+
"eval_runtime": 133.9561,
|
| 8920 |
+
"eval_samples_per_second": 7.465,
|
| 8921 |
+
"eval_steps_per_second": 3.733,
|
| 8922 |
+
"step": 9400
|
| 8923 |
+
},
|
| 8924 |
+
{
|
| 8925 |
+
"epoch": 1.05,
|
| 8926 |
+
"mmlu_eval_accuracy": 0.5057494715061752,
|
| 8927 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182,
|
| 8928 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 8929 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 8930 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 8931 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5862068965517241,
|
| 8932 |
+
"mmlu_eval_accuracy_college_biology": 0.4375,
|
| 8933 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 8934 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 8935 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 8936 |
+
"mmlu_eval_accuracy_college_medicine": 0.5454545454545454,
|
| 8937 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 8938 |
+
"mmlu_eval_accuracy_computer_security": 0.36363636363636365,
|
| 8939 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5384615384615384,
|
| 8940 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 8941 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.25,
|
| 8942 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.43902439024390244,
|
| 8943 |
+
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857,
|
| 8944 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 8945 |
+
"mmlu_eval_accuracy_high_school_biology": 0.5,
|
| 8946 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 8947 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 8948 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 8949 |
+
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182,
|
| 8950 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666,
|
| 8951 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 8952 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.3448275862068966,
|
| 8953 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 8954 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 8955 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8833333333333333,
|
| 8956 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
| 8957 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 8958 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6923076923076923,
|
| 8959 |
+
"mmlu_eval_accuracy_human_aging": 0.6956521739130435,
|
| 8960 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 8961 |
+
"mmlu_eval_accuracy_international_law": 0.8461538461538461,
|
| 8962 |
+
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365,
|
| 8963 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6666666666666666,
|
| 8964 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 8965 |
+
"mmlu_eval_accuracy_management": 0.7272727272727273,
|
| 8966 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 8967 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 8968 |
+
"mmlu_eval_accuracy_miscellaneous": 0.686046511627907,
|
| 8969 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 8970 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.2,
|
| 8971 |
+
"mmlu_eval_accuracy_nutrition": 0.696969696969697,
|
| 8972 |
+
"mmlu_eval_accuracy_philosophy": 0.4411764705882353,
|
| 8973 |
+
"mmlu_eval_accuracy_prehistory": 0.4857142857142857,
|
| 8974 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903,
|
| 8975 |
+
"mmlu_eval_accuracy_professional_law": 0.3941176470588235,
|
| 8976 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
| 8977 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5362318840579711,
|
| 8978 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 8979 |
+
"mmlu_eval_accuracy_security_studies": 0.5555555555555556,
|
| 8980 |
+
"mmlu_eval_accuracy_sociology": 0.8181818181818182,
|
| 8981 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 8982 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 8983 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 8984 |
+
"mmlu_loss": 1.0169057106069113,
|
| 8985 |
+
"step": 9400
|
| 8986 |
+
},
|
| 8987 |
+
{
|
| 8988 |
+
"epoch": 1.05,
|
| 8989 |
+
"learning_rate": 0.0002,
|
| 8990 |
+
"loss": 0.6793,
|
| 8991 |
+
"step": 9410
|
| 8992 |
+
},
|
| 8993 |
+
{
|
| 8994 |
+
"epoch": 1.05,
|
| 8995 |
+
"learning_rate": 0.0002,
|
| 8996 |
+
"loss": 0.5748,
|
| 8997 |
+
"step": 9420
|
| 8998 |
+
},
|
| 8999 |
+
{
|
| 9000 |
+
"epoch": 1.05,
|
| 9001 |
+
"learning_rate": 0.0002,
|
| 9002 |
+
"loss": 0.5601,
|
| 9003 |
+
"step": 9430
|
| 9004 |
+
},
|
| 9005 |
+
{
|
| 9006 |
+
"epoch": 1.06,
|
| 9007 |
+
"learning_rate": 0.0002,
|
| 9008 |
+
"loss": 0.6079,
|
| 9009 |
+
"step": 9440
|
| 9010 |
+
},
|
| 9011 |
+
{
|
| 9012 |
+
"epoch": 1.06,
|
| 9013 |
+
"learning_rate": 0.0002,
|
| 9014 |
+
"loss": 0.6073,
|
| 9015 |
+
"step": 9450
|
| 9016 |
+
},
|
| 9017 |
+
{
|
| 9018 |
+
"epoch": 1.06,
|
| 9019 |
+
"learning_rate": 0.0002,
|
| 9020 |
+
"loss": 0.5899,
|
| 9021 |
+
"step": 9460
|
| 9022 |
+
},
|
| 9023 |
+
{
|
| 9024 |
+
"epoch": 1.06,
|
| 9025 |
+
"learning_rate": 0.0002,
|
| 9026 |
+
"loss": 0.5806,
|
| 9027 |
+
"step": 9470
|
| 9028 |
+
},
|
| 9029 |
+
{
|
| 9030 |
+
"epoch": 1.06,
|
| 9031 |
+
"learning_rate": 0.0002,
|
| 9032 |
+
"loss": 0.6301,
|
| 9033 |
+
"step": 9480
|
| 9034 |
+
},
|
| 9035 |
+
{
|
| 9036 |
+
"epoch": 1.06,
|
| 9037 |
+
"learning_rate": 0.0002,
|
| 9038 |
+
"loss": 0.5847,
|
| 9039 |
+
"step": 9490
|
| 9040 |
+
},
|
| 9041 |
+
{
|
| 9042 |
+
"epoch": 1.06,
|
| 9043 |
+
"learning_rate": 0.0002,
|
| 9044 |
+
"loss": 0.545,
|
| 9045 |
+
"step": 9500
|
| 9046 |
+
},
|
| 9047 |
+
{
|
| 9048 |
+
"epoch": 1.06,
|
| 9049 |
+
"learning_rate": 0.0002,
|
| 9050 |
+
"loss": 0.6091,
|
| 9051 |
+
"step": 9510
|
| 9052 |
+
},
|
| 9053 |
+
{
|
| 9054 |
+
"epoch": 1.06,
|
| 9055 |
+
"learning_rate": 0.0002,
|
| 9056 |
+
"loss": 0.5732,
|
| 9057 |
+
"step": 9520
|
| 9058 |
+
},
|
| 9059 |
+
{
|
| 9060 |
+
"epoch": 1.07,
|
| 9061 |
+
"learning_rate": 0.0002,
|
| 9062 |
+
"loss": 0.693,
|
| 9063 |
+
"step": 9530
|
| 9064 |
+
},
|
| 9065 |
+
{
|
| 9066 |
+
"epoch": 1.07,
|
| 9067 |
+
"learning_rate": 0.0002,
|
| 9068 |
+
"loss": 0.5959,
|
| 9069 |
+
"step": 9540
|
| 9070 |
+
},
|
| 9071 |
+
{
|
| 9072 |
+
"epoch": 1.07,
|
| 9073 |
+
"learning_rate": 0.0002,
|
| 9074 |
+
"loss": 0.5312,
|
| 9075 |
+
"step": 9550
|
| 9076 |
+
},
|
| 9077 |
+
{
|
| 9078 |
+
"epoch": 1.07,
|
| 9079 |
+
"learning_rate": 0.0002,
|
| 9080 |
+
"loss": 0.7201,
|
| 9081 |
+
"step": 9560
|
| 9082 |
+
},
|
| 9083 |
+
{
|
| 9084 |
+
"epoch": 1.07,
|
| 9085 |
+
"learning_rate": 0.0002,
|
| 9086 |
+
"loss": 0.6885,
|
| 9087 |
+
"step": 9570
|
| 9088 |
+
},
|
| 9089 |
+
{
|
| 9090 |
+
"epoch": 1.07,
|
| 9091 |
+
"learning_rate": 0.0002,
|
| 9092 |
+
"loss": 0.688,
|
| 9093 |
+
"step": 9580
|
| 9094 |
+
},
|
| 9095 |
+
{
|
| 9096 |
+
"epoch": 1.07,
|
| 9097 |
+
"learning_rate": 0.0002,
|
| 9098 |
+
"loss": 0.5078,
|
| 9099 |
+
"step": 9590
|
| 9100 |
+
},
|
| 9101 |
+
{
|
| 9102 |
+
"epoch": 1.07,
|
| 9103 |
+
"learning_rate": 0.0002,
|
| 9104 |
+
"loss": 0.629,
|
| 9105 |
+
"step": 9600
|
| 9106 |
+
},
|
| 9107 |
+
{
|
| 9108 |
+
"epoch": 1.07,
|
| 9109 |
+
"eval_loss": 0.7299875617027283,
|
| 9110 |
+
"eval_runtime": 133.9022,
|
| 9111 |
+
"eval_samples_per_second": 7.468,
|
| 9112 |
+
"eval_steps_per_second": 3.734,
|
| 9113 |
+
"step": 9600
|
| 9114 |
+
},
|
| 9115 |
+
{
|
| 9116 |
+
"epoch": 1.07,
|
| 9117 |
+
"mmlu_eval_accuracy": 0.5098416308295152,
|
| 9118 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 9119 |
+
"mmlu_eval_accuracy_anatomy": 0.6428571428571429,
|
| 9120 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 9121 |
+
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454,
|
| 9122 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.6551724137931034,
|
| 9123 |
+
"mmlu_eval_accuracy_college_biology": 0.5,
|
| 9124 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 9125 |
+
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365,
|
| 9126 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 9127 |
+
"mmlu_eval_accuracy_college_medicine": 0.5454545454545454,
|
| 9128 |
+
"mmlu_eval_accuracy_college_physics": 0.2727272727272727,
|
| 9129 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 9130 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5,
|
| 9131 |
+
"mmlu_eval_accuracy_econometrics": 0.16666666666666666,
|
| 9132 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.3125,
|
| 9133 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 9134 |
+
"mmlu_eval_accuracy_formal_logic": 0.5,
|
| 9135 |
+
"mmlu_eval_accuracy_global_facts": 0.6,
|
| 9136 |
+
"mmlu_eval_accuracy_high_school_biology": 0.46875,
|
| 9137 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 9138 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 9139 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 9140 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 9141 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714,
|
| 9142 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 9143 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 9144 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156,
|
| 9145 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 9146 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8666666666666667,
|
| 9147 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087,
|
| 9148 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 9149 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7692307692307693,
|
| 9150 |
+
"mmlu_eval_accuracy_human_aging": 0.6086956521739131,
|
| 9151 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 9152 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 9153 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 9154 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 9155 |
+
"mmlu_eval_accuracy_machine_learning": 0.09090909090909091,
|
| 9156 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 9157 |
+
"mmlu_eval_accuracy_marketing": 0.8,
|
| 9158 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 9159 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186,
|
| 9160 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5,
|
| 9161 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.22,
|
| 9162 |
+
"mmlu_eval_accuracy_nutrition": 0.7272727272727273,
|
| 9163 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 9164 |
+
"mmlu_eval_accuracy_prehistory": 0.5142857142857142,
|
| 9165 |
+
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613,
|
| 9166 |
+
"mmlu_eval_accuracy_professional_law": 0.4117647058823529,
|
| 9167 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5806451612903226,
|
| 9168 |
+
"mmlu_eval_accuracy_professional_psychology": 0.4782608695652174,
|
| 9169 |
+
"mmlu_eval_accuracy_public_relations": 0.4166666666666667,
|
| 9170 |
+
"mmlu_eval_accuracy_security_studies": 0.5925925925925926,
|
| 9171 |
+
"mmlu_eval_accuracy_sociology": 0.8181818181818182,
|
| 9172 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 9173 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 9174 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 9175 |
+
"mmlu_loss": 0.9779613521095978,
|
| 9176 |
+
"step": 9600
|
| 9177 |
+
},
|
| 9178 |
+
{
|
| 9179 |
+
"epoch": 1.07,
|
| 9180 |
+
"learning_rate": 0.0002,
|
| 9181 |
+
"loss": 0.5819,
|
| 9182 |
+
"step": 9610
|
| 9183 |
+
},
|
| 9184 |
+
{
|
| 9185 |
+
"epoch": 1.08,
|
| 9186 |
+
"learning_rate": 0.0002,
|
| 9187 |
+
"loss": 0.5785,
|
| 9188 |
+
"step": 9620
|
| 9189 |
+
},
|
| 9190 |
+
{
|
| 9191 |
+
"epoch": 1.08,
|
| 9192 |
+
"learning_rate": 0.0002,
|
| 9193 |
+
"loss": 0.5661,
|
| 9194 |
+
"step": 9630
|
| 9195 |
+
},
|
| 9196 |
+
{
|
| 9197 |
+
"epoch": 1.08,
|
| 9198 |
+
"learning_rate": 0.0002,
|
| 9199 |
+
"loss": 0.5057,
|
| 9200 |
+
"step": 9640
|
| 9201 |
+
},
|
| 9202 |
+
{
|
| 9203 |
+
"epoch": 1.08,
|
| 9204 |
+
"learning_rate": 0.0002,
|
| 9205 |
+
"loss": 0.6379,
|
| 9206 |
+
"step": 9650
|
| 9207 |
+
},
|
| 9208 |
+
{
|
| 9209 |
+
"epoch": 1.08,
|
| 9210 |
+
"learning_rate": 0.0002,
|
| 9211 |
+
"loss": 0.6115,
|
| 9212 |
+
"step": 9660
|
| 9213 |
+
},
|
| 9214 |
+
{
|
| 9215 |
+
"epoch": 1.08,
|
| 9216 |
+
"learning_rate": 0.0002,
|
| 9217 |
+
"loss": 0.6409,
|
| 9218 |
+
"step": 9670
|
| 9219 |
+
},
|
| 9220 |
+
{
|
| 9221 |
+
"epoch": 1.08,
|
| 9222 |
+
"learning_rate": 0.0002,
|
| 9223 |
+
"loss": 0.4764,
|
| 9224 |
+
"step": 9680
|
| 9225 |
+
},
|
| 9226 |
+
{
|
| 9227 |
+
"epoch": 1.08,
|
| 9228 |
+
"learning_rate": 0.0002,
|
| 9229 |
+
"loss": 0.6586,
|
| 9230 |
+
"step": 9690
|
| 9231 |
+
},
|
| 9232 |
+
{
|
| 9233 |
+
"epoch": 1.08,
|
| 9234 |
+
"learning_rate": 0.0002,
|
| 9235 |
+
"loss": 0.6133,
|
| 9236 |
+
"step": 9700
|
| 9237 |
+
},
|
| 9238 |
+
{
|
| 9239 |
+
"epoch": 1.09,
|
| 9240 |
+
"learning_rate": 0.0002,
|
| 9241 |
+
"loss": 0.6197,
|
| 9242 |
+
"step": 9710
|
| 9243 |
+
},
|
| 9244 |
+
{
|
| 9245 |
+
"epoch": 1.09,
|
| 9246 |
+
"learning_rate": 0.0002,
|
| 9247 |
+
"loss": 0.5676,
|
| 9248 |
+
"step": 9720
|
| 9249 |
+
},
|
| 9250 |
+
{
|
| 9251 |
+
"epoch": 1.09,
|
| 9252 |
+
"learning_rate": 0.0002,
|
| 9253 |
+
"loss": 0.655,
|
| 9254 |
+
"step": 9730
|
| 9255 |
+
},
|
| 9256 |
+
{
|
| 9257 |
+
"epoch": 1.09,
|
| 9258 |
+
"learning_rate": 0.0002,
|
| 9259 |
+
"loss": 0.639,
|
| 9260 |
+
"step": 9740
|
| 9261 |
+
},
|
| 9262 |
+
{
|
| 9263 |
+
"epoch": 1.09,
|
| 9264 |
+
"learning_rate": 0.0002,
|
| 9265 |
+
"loss": 0.5572,
|
| 9266 |
+
"step": 9750
|
| 9267 |
+
},
|
| 9268 |
+
{
|
| 9269 |
+
"epoch": 1.09,
|
| 9270 |
+
"learning_rate": 0.0002,
|
| 9271 |
+
"loss": 0.5549,
|
| 9272 |
+
"step": 9760
|
| 9273 |
+
},
|
| 9274 |
+
{
|
| 9275 |
+
"epoch": 1.09,
|
| 9276 |
+
"learning_rate": 0.0002,
|
| 9277 |
+
"loss": 0.5799,
|
| 9278 |
+
"step": 9770
|
| 9279 |
+
},
|
| 9280 |
+
{
|
| 9281 |
+
"epoch": 1.09,
|
| 9282 |
+
"learning_rate": 0.0002,
|
| 9283 |
+
"loss": 0.5626,
|
| 9284 |
+
"step": 9780
|
| 9285 |
+
},
|
| 9286 |
+
{
|
| 9287 |
+
"epoch": 1.09,
|
| 9288 |
+
"learning_rate": 0.0002,
|
| 9289 |
+
"loss": 0.5728,
|
| 9290 |
+
"step": 9790
|
| 9291 |
+
},
|
| 9292 |
+
{
|
| 9293 |
+
"epoch": 1.1,
|
| 9294 |
+
"learning_rate": 0.0002,
|
| 9295 |
+
"loss": 0.664,
|
| 9296 |
+
"step": 9800
|
| 9297 |
+
},
|
| 9298 |
+
{
|
| 9299 |
+
"epoch": 1.1,
|
| 9300 |
+
"eval_loss": 0.7271477580070496,
|
| 9301 |
+
"eval_runtime": 133.8918,
|
| 9302 |
+
"eval_samples_per_second": 7.469,
|
| 9303 |
+
"eval_steps_per_second": 3.734,
|
| 9304 |
+
"step": 9800
|
| 9305 |
+
},
|
| 9306 |
+
{
|
| 9307 |
+
"epoch": 1.1,
|
| 9308 |
+
"mmlu_eval_accuracy": 0.5019912844612583,
|
| 9309 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727,
|
| 9310 |
+
"mmlu_eval_accuracy_anatomy": 0.5714285714285714,
|
| 9311 |
+
"mmlu_eval_accuracy_astronomy": 0.5625,
|
| 9312 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 9313 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5862068965517241,
|
| 9314 |
+
"mmlu_eval_accuracy_college_biology": 0.5,
|
| 9315 |
+
"mmlu_eval_accuracy_college_chemistry": 0.0,
|
| 9316 |
+
"mmlu_eval_accuracy_college_computer_science": 0.18181818181818182,
|
| 9317 |
+
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182,
|
| 9318 |
+
"mmlu_eval_accuracy_college_medicine": 0.5454545454545454,
|
| 9319 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 9320 |
+
"mmlu_eval_accuracy_computer_security": 0.45454545454545453,
|
| 9321 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5769230769230769,
|
| 9322 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 9323 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.1875,
|
| 9324 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244,
|
| 9325 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 9326 |
+
"mmlu_eval_accuracy_global_facts": 0.6,
|
| 9327 |
+
"mmlu_eval_accuracy_high_school_biology": 0.4375,
|
| 9328 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365,
|
| 9329 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556,
|
| 9330 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666,
|
| 9331 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 9332 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 9333 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395,
|
| 9334 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724,
|
| 9335 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5384615384615384,
|
| 9336 |
+
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354,
|
| 9337 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333,
|
| 9338 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.4782608695652174,
|
| 9339 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364,
|
| 9340 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.7307692307692307,
|
| 9341 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 9342 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 9343 |
+
"mmlu_eval_accuracy_international_law": 0.9230769230769231,
|
| 9344 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 9345 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 9346 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 9347 |
+
"mmlu_eval_accuracy_management": 0.6363636363636364,
|
| 9348 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 9349 |
+
"mmlu_eval_accuracy_medical_genetics": 0.9090909090909091,
|
| 9350 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628,
|
| 9351 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 9352 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.24,
|
| 9353 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 9354 |
+
"mmlu_eval_accuracy_philosophy": 0.5,
|
| 9355 |
+
"mmlu_eval_accuracy_prehistory": 0.5428571428571428,
|
| 9356 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3870967741935484,
|
| 9357 |
+
"mmlu_eval_accuracy_professional_law": 0.3764705882352941,
|
| 9358 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
| 9359 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
| 9360 |
+
"mmlu_eval_accuracy_public_relations": 0.4166666666666667,
|
| 9361 |
+
"mmlu_eval_accuracy_security_studies": 0.5185185185185185,
|
| 9362 |
+
"mmlu_eval_accuracy_sociology": 0.7727272727272727,
|
| 9363 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273,
|
| 9364 |
+
"mmlu_eval_accuracy_virology": 0.5555555555555556,
|
| 9365 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 9366 |
+
"mmlu_loss": 0.9786206829524539,
|
| 9367 |
+
"step": 9800
|
| 9368 |
+
},
|
| 9369 |
+
{
|
| 9370 |
+
"epoch": 1.1,
|
| 9371 |
+
"learning_rate": 0.0002,
|
| 9372 |
+
"loss": 0.6406,
|
| 9373 |
+
"step": 9810
|
| 9374 |
+
},
|
| 9375 |
+
{
|
| 9376 |
+
"epoch": 1.1,
|
| 9377 |
+
"learning_rate": 0.0002,
|
| 9378 |
+
"loss": 0.586,
|
| 9379 |
+
"step": 9820
|
| 9380 |
+
},
|
| 9381 |
+
{
|
| 9382 |
+
"epoch": 1.1,
|
| 9383 |
+
"learning_rate": 0.0002,
|
| 9384 |
+
"loss": 0.6407,
|
| 9385 |
+
"step": 9830
|
| 9386 |
+
},
|
| 9387 |
+
{
|
| 9388 |
+
"epoch": 1.1,
|
| 9389 |
+
"learning_rate": 0.0002,
|
| 9390 |
+
"loss": 0.6652,
|
| 9391 |
+
"step": 9840
|
| 9392 |
+
},
|
| 9393 |
+
{
|
| 9394 |
+
"epoch": 1.1,
|
| 9395 |
+
"learning_rate": 0.0002,
|
| 9396 |
+
"loss": 0.6415,
|
| 9397 |
+
"step": 9850
|
| 9398 |
+
},
|
| 9399 |
+
{
|
| 9400 |
+
"epoch": 1.1,
|
| 9401 |
+
"learning_rate": 0.0002,
|
| 9402 |
+
"loss": 0.5807,
|
| 9403 |
+
"step": 9860
|
| 9404 |
+
},
|
| 9405 |
+
{
|
| 9406 |
+
"epoch": 1.1,
|
| 9407 |
+
"learning_rate": 0.0002,
|
| 9408 |
+
"loss": 0.6375,
|
| 9409 |
+
"step": 9870
|
| 9410 |
+
},
|
| 9411 |
+
{
|
| 9412 |
+
"epoch": 1.1,
|
| 9413 |
+
"learning_rate": 0.0002,
|
| 9414 |
+
"loss": 0.6571,
|
| 9415 |
+
"step": 9880
|
| 9416 |
+
},
|
| 9417 |
+
{
|
| 9418 |
+
"epoch": 1.11,
|
| 9419 |
+
"learning_rate": 0.0002,
|
| 9420 |
+
"loss": 0.6483,
|
| 9421 |
+
"step": 9890
|
| 9422 |
+
},
|
| 9423 |
+
{
|
| 9424 |
+
"epoch": 1.11,
|
| 9425 |
+
"learning_rate": 0.0002,
|
| 9426 |
+
"loss": 0.6224,
|
| 9427 |
+
"step": 9900
|
| 9428 |
+
},
|
| 9429 |
+
{
|
| 9430 |
+
"epoch": 1.11,
|
| 9431 |
+
"learning_rate": 0.0002,
|
| 9432 |
+
"loss": 0.5958,
|
| 9433 |
+
"step": 9910
|
| 9434 |
+
},
|
| 9435 |
+
{
|
| 9436 |
+
"epoch": 1.11,
|
| 9437 |
+
"learning_rate": 0.0002,
|
| 9438 |
+
"loss": 0.6922,
|
| 9439 |
+
"step": 9920
|
| 9440 |
+
},
|
| 9441 |
+
{
|
| 9442 |
+
"epoch": 1.11,
|
| 9443 |
+
"learning_rate": 0.0002,
|
| 9444 |
+
"loss": 0.5911,
|
| 9445 |
+
"step": 9930
|
| 9446 |
+
},
|
| 9447 |
+
{
|
| 9448 |
+
"epoch": 1.11,
|
| 9449 |
+
"learning_rate": 0.0002,
|
| 9450 |
+
"loss": 0.5771,
|
| 9451 |
+
"step": 9940
|
| 9452 |
+
},
|
| 9453 |
+
{
|
| 9454 |
+
"epoch": 1.11,
|
| 9455 |
+
"learning_rate": 0.0002,
|
| 9456 |
+
"loss": 0.6384,
|
| 9457 |
+
"step": 9950
|
| 9458 |
+
},
|
| 9459 |
+
{
|
| 9460 |
+
"epoch": 1.11,
|
| 9461 |
+
"learning_rate": 0.0002,
|
| 9462 |
+
"loss": 0.6199,
|
| 9463 |
+
"step": 9960
|
| 9464 |
+
},
|
| 9465 |
+
{
|
| 9466 |
+
"epoch": 1.11,
|
| 9467 |
+
"learning_rate": 0.0002,
|
| 9468 |
+
"loss": 0.5564,
|
| 9469 |
+
"step": 9970
|
| 9470 |
+
},
|
| 9471 |
+
{
|
| 9472 |
+
"epoch": 1.12,
|
| 9473 |
+
"learning_rate": 0.0002,
|
| 9474 |
+
"loss": 0.6539,
|
| 9475 |
+
"step": 9980
|
| 9476 |
+
},
|
| 9477 |
+
{
|
| 9478 |
+
"epoch": 1.12,
|
| 9479 |
+
"learning_rate": 0.0002,
|
| 9480 |
+
"loss": 0.7082,
|
| 9481 |
+
"step": 9990
|
| 9482 |
+
},
|
| 9483 |
+
{
|
| 9484 |
+
"epoch": 1.12,
|
| 9485 |
+
"learning_rate": 0.0002,
|
| 9486 |
+
"loss": 0.6518,
|
| 9487 |
+
"step": 10000
|
| 9488 |
+
},
|
| 9489 |
+
{
|
| 9490 |
+
"epoch": 1.12,
|
| 9491 |
+
"eval_loss": 0.7289842367172241,
|
| 9492 |
+
"eval_runtime": 133.9649,
|
| 9493 |
+
"eval_samples_per_second": 7.465,
|
| 9494 |
+
"eval_steps_per_second": 3.732,
|
| 9495 |
+
"step": 10000
|
| 9496 |
+
},
|
| 9497 |
+
{
|
| 9498 |
+
"epoch": 1.12,
|
| 9499 |
+
"mmlu_eval_accuracy": 0.49959324228226615,
|
| 9500 |
+
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453,
|
| 9501 |
+
"mmlu_eval_accuracy_anatomy": 0.5,
|
| 9502 |
+
"mmlu_eval_accuracy_astronomy": 0.4375,
|
| 9503 |
+
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453,
|
| 9504 |
+
"mmlu_eval_accuracy_clinical_knowledge": 0.5517241379310345,
|
| 9505 |
+
"mmlu_eval_accuracy_college_biology": 0.4375,
|
| 9506 |
+
"mmlu_eval_accuracy_college_chemistry": 0.125,
|
| 9507 |
+
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727,
|
| 9508 |
+
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727,
|
| 9509 |
+
"mmlu_eval_accuracy_college_medicine": 0.5454545454545454,
|
| 9510 |
+
"mmlu_eval_accuracy_college_physics": 0.36363636363636365,
|
| 9511 |
+
"mmlu_eval_accuracy_computer_security": 0.5454545454545454,
|
| 9512 |
+
"mmlu_eval_accuracy_conceptual_physics": 0.5769230769230769,
|
| 9513 |
+
"mmlu_eval_accuracy_econometrics": 0.25,
|
| 9514 |
+
"mmlu_eval_accuracy_electrical_engineering": 0.1875,
|
| 9515 |
+
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536,
|
| 9516 |
+
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285,
|
| 9517 |
+
"mmlu_eval_accuracy_global_facts": 0.5,
|
| 9518 |
+
"mmlu_eval_accuracy_high_school_biology": 0.5,
|
| 9519 |
+
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182,
|
| 9520 |
+
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666,
|
| 9521 |
+
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556,
|
| 9522 |
+
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727,
|
| 9523 |
+
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191,
|
| 9524 |
+
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023,
|
| 9525 |
+
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276,
|
| 9526 |
+
"mmlu_eval_accuracy_high_school_microeconomics": 0.5,
|
| 9527 |
+
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826,
|
| 9528 |
+
"mmlu_eval_accuracy_high_school_psychology": 0.8,
|
| 9529 |
+
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173,
|
| 9530 |
+
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818,
|
| 9531 |
+
"mmlu_eval_accuracy_high_school_world_history": 0.6153846153846154,
|
| 9532 |
+
"mmlu_eval_accuracy_human_aging": 0.6521739130434783,
|
| 9533 |
+
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333,
|
| 9534 |
+
"mmlu_eval_accuracy_international_law": 1.0,
|
| 9535 |
+
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453,
|
| 9536 |
+
"mmlu_eval_accuracy_logical_fallacies": 0.6111111111111112,
|
| 9537 |
+
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727,
|
| 9538 |
+
"mmlu_eval_accuracy_management": 0.5454545454545454,
|
| 9539 |
+
"mmlu_eval_accuracy_marketing": 0.84,
|
| 9540 |
+
"mmlu_eval_accuracy_medical_genetics": 0.8181818181818182,
|
| 9541 |
+
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745,
|
| 9542 |
+
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842,
|
| 9543 |
+
"mmlu_eval_accuracy_moral_scenarios": 0.22,
|
| 9544 |
+
"mmlu_eval_accuracy_nutrition": 0.6363636363636364,
|
| 9545 |
+
"mmlu_eval_accuracy_philosophy": 0.47058823529411764,
|
| 9546 |
+
"mmlu_eval_accuracy_prehistory": 0.5714285714285714,
|
| 9547 |
+
"mmlu_eval_accuracy_professional_accounting": 0.3548387096774194,
|
| 9548 |
+
"mmlu_eval_accuracy_professional_law": 0.4176470588235294,
|
| 9549 |
+
"mmlu_eval_accuracy_professional_medicine": 0.5161290322580645,
|
| 9550 |
+
"mmlu_eval_accuracy_professional_psychology": 0.5072463768115942,
|
| 9551 |
+
"mmlu_eval_accuracy_public_relations": 0.5,
|
| 9552 |
+
"mmlu_eval_accuracy_security_studies": 0.4444444444444444,
|
| 9553 |
+
"mmlu_eval_accuracy_sociology": 0.7272727272727273,
|
| 9554 |
+
"mmlu_eval_accuracy_us_foreign_policy": 0.8181818181818182,
|
| 9555 |
+
"mmlu_eval_accuracy_virology": 0.5,
|
| 9556 |
+
"mmlu_eval_accuracy_world_religions": 0.7368421052631579,
|
| 9557 |
+
"mmlu_loss": 0.9495227412945608,
|
| 9558 |
+
"step": 10000
|
| 9559 |
}
|
| 9560 |
],
|
| 9561 |
"max_steps": 10000,
|
| 9562 |
"num_train_epochs": 2,
|
| 9563 |
+
"total_flos": 9.924566380907397e+17,
|
| 9564 |
"trial_name": null,
|
| 9565 |
"trial_params": null
|
| 9566 |
}
|
{checkpoint-8000 β checkpoint-10000}/training_args.bin
RENAMED
|
File without changes
|
checkpoint-7800/adapter_model/adapter_model/README.md
CHANGED
|
@@ -103,6 +103,17 @@ The following `bitsandbytes` quantization config was used during training:
|
|
| 103 |
- bnb_4bit_use_double_quant: True
|
| 104 |
- bnb_4bit_compute_dtype: bfloat16
|
| 105 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
The following `bitsandbytes` quantization config was used during training:
|
| 107 |
- load_in_8bit: False
|
| 108 |
- load_in_4bit: True
|
|
@@ -124,5 +135,6 @@ The following `bitsandbytes` quantization config was used during training:
|
|
| 124 |
- PEFT 0.4.0
|
| 125 |
- PEFT 0.4.0
|
| 126 |
- PEFT 0.4.0
|
|
|
|
| 127 |
|
| 128 |
- PEFT 0.4.0
|
|
|
|
| 103 |
- bnb_4bit_use_double_quant: True
|
| 104 |
- bnb_4bit_compute_dtype: bfloat16
|
| 105 |
|
| 106 |
+
The following `bitsandbytes` quantization config was used during training:
|
| 107 |
+
- load_in_8bit: False
|
| 108 |
+
- load_in_4bit: True
|
| 109 |
+
- llm_int8_threshold: 6.0
|
| 110 |
+
- llm_int8_skip_modules: None
|
| 111 |
+
- llm_int8_enable_fp32_cpu_offload: False
|
| 112 |
+
- llm_int8_has_fp16_weight: False
|
| 113 |
+
- bnb_4bit_quant_type: nf4
|
| 114 |
+
- bnb_4bit_use_double_quant: True
|
| 115 |
+
- bnb_4bit_compute_dtype: bfloat16
|
| 116 |
+
|
| 117 |
The following `bitsandbytes` quantization config was used during training:
|
| 118 |
- load_in_8bit: False
|
| 119 |
- load_in_4bit: True
|
|
|
|
| 135 |
- PEFT 0.4.0
|
| 136 |
- PEFT 0.4.0
|
| 137 |
- PEFT 0.4.0
|
| 138 |
+
- PEFT 0.4.0
|
| 139 |
|
| 140 |
- PEFT 0.4.0
|
checkpoint-7800/adapter_model/adapter_model/adapter_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 319977229
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:44ef52bb6df5a05adba1fbcead4ecc0c5f512b65e26dbb0631bc24f2d2906a2f
|
| 3 |
size 319977229
|