dongguanting commited on
Commit
7caaa15
·
verified ·
1 Parent(s): fe58ca0

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.9973474801061006,
3
+ "total_flos": 1.0915292825780224e+17,
4
+ "train_loss": 0.34044326600333263,
5
+ "train_runtime": 16671.2674,
6
+ "train_samples_per_second": 2.713,
7
+ "train_steps_per_second": 0.17
8
+ }
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 3584,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 18944,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 28,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 28,
16
+ "num_hidden_layers": 28,
17
+ "num_key_value_heads": 4,
18
+ "rms_norm_eps": 1e-06,
19
+ "rope_scaling": null,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": 131072,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.51.3",
25
+ "use_cache": false,
26
+ "use_sliding_window": false,
27
+ "vocab_size": 152064
28
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.51.3"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed47a2a1bdd745859e240ca86f36ada514fff5e2902de178e8e2a8a437ef9e85
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:952b5689d0a622c734d55a3fbee12ac66b73bbc04ff7cf93d5e0772940685d96
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b28cb6a34499ca0da1666388fc0f112b1f3fa5709a95149d8e8d07330e584142
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aecf5dac29337a29a07085a83ebffc4ca81f8f57357400b35ae84672dfe60e83
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 10000,
204
+ "pad_token": "<|endoftext|>",
205
+ "padding_side": "right",
206
+ "split_special_tokens": false,
207
+ "tokenizer_class": "Qwen2Tokenizer",
208
+ "unk_token": null
209
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.9973474801061006,
3
+ "total_flos": 1.0915292825780224e+17,
4
+ "train_loss": 0.34044326600333263,
5
+ "train_runtime": 16671.2674,
6
+ "train_samples_per_second": 2.713,
7
+ "train_steps_per_second": 0.17
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 2826, "loss": 0.741, "lr": 1.5901060070671379e-07, "epoch": 0.010610079575596816, "percentage": 0.35, "elapsed_time": "0:01:24", "remaining_time": "6:38:39"}
2
+ {"current_steps": 20, "total_steps": 2826, "loss": 0.5551, "lr": 3.356890459363958e-07, "epoch": 0.021220159151193633, "percentage": 0.71, "elapsed_time": "0:02:35", "remaining_time": "6:04:09"}
3
+ {"current_steps": 30, "total_steps": 2826, "loss": 0.6185, "lr": 5.123674911660778e-07, "epoch": 0.03183023872679045, "percentage": 1.06, "elapsed_time": "0:03:35", "remaining_time": "5:34:34"}
4
+ {"current_steps": 40, "total_steps": 2826, "loss": 0.6358, "lr": 6.890459363957598e-07, "epoch": 0.042440318302387266, "percentage": 1.42, "elapsed_time": "0:04:37", "remaining_time": "5:21:39"}
5
+ {"current_steps": 50, "total_steps": 2826, "loss": 0.5922, "lr": 8.657243816254418e-07, "epoch": 0.05305039787798409, "percentage": 1.77, "elapsed_time": "0:05:42", "remaining_time": "5:16:32"}
6
+ {"current_steps": 60, "total_steps": 2826, "loss": 0.6282, "lr": 1.0424028268551239e-06, "epoch": 0.0636604774535809, "percentage": 2.12, "elapsed_time": "0:06:37", "remaining_time": "5:05:32"}
7
+ {"current_steps": 70, "total_steps": 2826, "loss": 0.5836, "lr": 1.2190812720848057e-06, "epoch": 0.07427055702917772, "percentage": 2.48, "elapsed_time": "0:07:33", "remaining_time": "4:57:51"}
8
+ {"current_steps": 80, "total_steps": 2826, "loss": 0.5836, "lr": 1.3957597173144876e-06, "epoch": 0.08488063660477453, "percentage": 2.83, "elapsed_time": "0:08:31", "remaining_time": "4:52:48"}
9
+ {"current_steps": 90, "total_steps": 2826, "loss": 0.4983, "lr": 1.5724381625441699e-06, "epoch": 0.09549071618037135, "percentage": 3.18, "elapsed_time": "0:09:31", "remaining_time": "4:49:33"}
10
+ {"current_steps": 100, "total_steps": 2826, "loss": 0.6057, "lr": 1.7491166077738517e-06, "epoch": 0.10610079575596817, "percentage": 3.54, "elapsed_time": "0:10:27", "remaining_time": "4:44:57"}
11
+ {"current_steps": 110, "total_steps": 2826, "loss": 0.5135, "lr": 1.925795053003534e-06, "epoch": 0.11671087533156499, "percentage": 3.89, "elapsed_time": "0:11:26", "remaining_time": "4:42:35"}
12
+ {"current_steps": 120, "total_steps": 2826, "loss": 0.4844, "lr": 2.1024734982332157e-06, "epoch": 0.1273209549071618, "percentage": 4.25, "elapsed_time": "0:12:26", "remaining_time": "4:40:33"}
13
+ {"current_steps": 130, "total_steps": 2826, "loss": 0.5386, "lr": 2.279151943462898e-06, "epoch": 0.13793103448275862, "percentage": 4.6, "elapsed_time": "0:13:25", "remaining_time": "4:38:25"}
14
+ {"current_steps": 140, "total_steps": 2826, "loss": 0.4774, "lr": 2.45583038869258e-06, "epoch": 0.14854111405835543, "percentage": 4.95, "elapsed_time": "0:14:23", "remaining_time": "4:36:04"}
15
+ {"current_steps": 150, "total_steps": 2826, "loss": 0.5035, "lr": 2.6325088339222617e-06, "epoch": 0.15915119363395225, "percentage": 5.31, "elapsed_time": "0:15:26", "remaining_time": "4:35:28"}
16
+ {"current_steps": 160, "total_steps": 2826, "loss": 0.4897, "lr": 2.8091872791519436e-06, "epoch": 0.16976127320954906, "percentage": 5.66, "elapsed_time": "0:16:22", "remaining_time": "4:32:54"}
17
+ {"current_steps": 170, "total_steps": 2826, "loss": 0.5196, "lr": 2.985865724381626e-06, "epoch": 0.18037135278514588, "percentage": 6.02, "elapsed_time": "0:17:19", "remaining_time": "4:30:33"}
18
+ {"current_steps": 180, "total_steps": 2826, "loss": 0.4791, "lr": 3.162544169611308e-06, "epoch": 0.1909814323607427, "percentage": 6.37, "elapsed_time": "0:18:18", "remaining_time": "4:29:01"}
19
+ {"current_steps": 190, "total_steps": 2826, "loss": 0.5024, "lr": 3.3392226148409896e-06, "epoch": 0.20159151193633953, "percentage": 6.72, "elapsed_time": "0:19:13", "remaining_time": "4:26:38"}
20
+ {"current_steps": 200, "total_steps": 2826, "loss": 0.5781, "lr": 3.5159010600706715e-06, "epoch": 0.21220159151193635, "percentage": 7.08, "elapsed_time": "0:20:10", "remaining_time": "4:25:00"}
21
+ {"current_steps": 210, "total_steps": 2826, "loss": 0.4186, "lr": 3.6925795053003538e-06, "epoch": 0.22281167108753316, "percentage": 7.43, "elapsed_time": "0:21:09", "remaining_time": "4:23:34"}
22
+ {"current_steps": 220, "total_steps": 2826, "loss": 0.4819, "lr": 3.869257950530036e-06, "epoch": 0.23342175066312998, "percentage": 7.78, "elapsed_time": "0:22:04", "remaining_time": "4:21:34"}
23
+ {"current_steps": 230, "total_steps": 2826, "loss": 0.547, "lr": 4.045936395759718e-06, "epoch": 0.2440318302387268, "percentage": 8.14, "elapsed_time": "0:23:02", "remaining_time": "4:20:09"}
24
+ {"current_steps": 240, "total_steps": 2826, "loss": 0.5603, "lr": 4.222614840989399e-06, "epoch": 0.2546419098143236, "percentage": 8.49, "elapsed_time": "0:24:02", "remaining_time": "4:19:07"}
25
+ {"current_steps": 250, "total_steps": 2826, "loss": 0.4483, "lr": 4.399293286219082e-06, "epoch": 0.26525198938992045, "percentage": 8.85, "elapsed_time": "0:25:00", "remaining_time": "4:17:40"}
26
+ {"current_steps": 260, "total_steps": 2826, "loss": 0.5178, "lr": 4.575971731448763e-06, "epoch": 0.27586206896551724, "percentage": 9.2, "elapsed_time": "0:25:55", "remaining_time": "4:15:48"}
27
+ {"current_steps": 270, "total_steps": 2826, "loss": 0.5264, "lr": 4.752650176678445e-06, "epoch": 0.2864721485411141, "percentage": 9.55, "elapsed_time": "0:26:51", "remaining_time": "4:14:15"}
28
+ {"current_steps": 280, "total_steps": 2826, "loss": 0.5124, "lr": 4.929328621908128e-06, "epoch": 0.29708222811671087, "percentage": 9.91, "elapsed_time": "0:27:50", "remaining_time": "4:13:07"}
29
+ {"current_steps": 290, "total_steps": 2826, "loss": 0.4977, "lr": 4.99993132201408e-06, "epoch": 0.3076923076923077, "percentage": 10.26, "elapsed_time": "0:28:47", "remaining_time": "4:11:48"}
30
+ {"current_steps": 300, "total_steps": 2826, "loss": 0.5005, "lr": 4.9995116368759e-06, "epoch": 0.3183023872679045, "percentage": 10.62, "elapsed_time": "0:29:45", "remaining_time": "4:10:29"}
31
+ {"current_steps": 310, "total_steps": 2826, "loss": 0.4857, "lr": 4.998710485009401e-06, "epoch": 0.32891246684350134, "percentage": 10.97, "elapsed_time": "0:30:47", "remaining_time": "4:09:58"}
32
+ {"current_steps": 320, "total_steps": 2826, "loss": 0.4637, "lr": 4.99752798868358e-06, "epoch": 0.3395225464190981, "percentage": 11.32, "elapsed_time": "0:31:44", "remaining_time": "4:08:36"}
33
+ {"current_steps": 330, "total_steps": 2826, "loss": 0.4775, "lr": 4.99596432836689e-06, "epoch": 0.35013262599469497, "percentage": 11.68, "elapsed_time": "0:32:43", "remaining_time": "4:07:30"}
34
+ {"current_steps": 340, "total_steps": 2826, "loss": 0.5779, "lr": 4.994019742699705e-06, "epoch": 0.36074270557029176, "percentage": 12.03, "elapsed_time": "0:33:39", "remaining_time": "4:06:08"}
35
+ {"current_steps": 350, "total_steps": 2826, "loss": 0.5057, "lr": 4.991694528457891e-06, "epoch": 0.3713527851458886, "percentage": 12.38, "elapsed_time": "0:34:37", "remaining_time": "4:05:00"}
36
+ {"current_steps": 360, "total_steps": 2826, "loss": 0.5313, "lr": 4.988989040507518e-06, "epoch": 0.3819628647214854, "percentage": 12.74, "elapsed_time": "0:35:37", "remaining_time": "4:03:59"}
37
+ {"current_steps": 370, "total_steps": 2826, "loss": 0.4441, "lr": 4.985903691750697e-06, "epoch": 0.3925729442970822, "percentage": 13.09, "elapsed_time": "0:36:35", "remaining_time": "4:02:52"}
38
+ {"current_steps": 380, "total_steps": 2826, "loss": 0.4778, "lr": 4.982438953062572e-06, "epoch": 0.40318302387267907, "percentage": 13.45, "elapsed_time": "0:37:33", "remaining_time": "4:01:46"}
39
+ {"current_steps": 390, "total_steps": 2826, "loss": 0.4848, "lr": 4.978595353219449e-06, "epoch": 0.41379310344827586, "percentage": 13.8, "elapsed_time": "0:38:35", "remaining_time": "4:01:01"}
40
+ {"current_steps": 400, "total_steps": 2826, "loss": 0.4891, "lr": 4.974373478818098e-06, "epoch": 0.4244031830238727, "percentage": 14.15, "elapsed_time": "0:39:33", "remaining_time": "3:59:57"}
41
+ {"current_steps": 410, "total_steps": 2826, "loss": 0.4954, "lr": 4.969773974186235e-06, "epoch": 0.4350132625994695, "percentage": 14.51, "elapsed_time": "0:40:32", "remaining_time": "3:58:56"}
42
+ {"current_steps": 420, "total_steps": 2826, "loss": 0.5353, "lr": 4.964797541284175e-06, "epoch": 0.44562334217506633, "percentage": 14.86, "elapsed_time": "0:41:28", "remaining_time": "3:57:35"}
43
+ {"current_steps": 430, "total_steps": 2826, "loss": 0.5726, "lr": 4.959444939597712e-06, "epoch": 0.4562334217506631, "percentage": 15.22, "elapsed_time": "0:42:25", "remaining_time": "3:56:23"}
44
+ {"current_steps": 440, "total_steps": 2826, "loss": 0.5642, "lr": 4.953716986022204e-06, "epoch": 0.46684350132625996, "percentage": 15.57, "elapsed_time": "0:43:24", "remaining_time": "3:55:22"}
45
+ {"current_steps": 450, "total_steps": 2826, "loss": 0.4429, "lr": 4.947614554737904e-06, "epoch": 0.47745358090185674, "percentage": 15.92, "elapsed_time": "0:44:23", "remaining_time": "3:54:23"}
46
+ {"current_steps": 460, "total_steps": 2826, "loss": 0.4683, "lr": 4.941138577076538e-06, "epoch": 0.4880636604774536, "percentage": 16.28, "elapsed_time": "0:45:22", "remaining_time": "3:53:21"}
47
+ {"current_steps": 470, "total_steps": 2826, "loss": 0.4385, "lr": 4.934290041379182e-06, "epoch": 0.4986737400530504, "percentage": 16.63, "elapsed_time": "0:46:24", "remaining_time": "3:52:37"}
48
+ {"current_steps": 480, "total_steps": 2826, "loss": 0.4935, "lr": 4.92706999284541e-06, "epoch": 0.5092838196286472, "percentage": 16.99, "elapsed_time": "0:47:22", "remaining_time": "3:51:32"}
49
+ {"current_steps": 490, "total_steps": 2826, "loss": 0.4548, "lr": 4.9194795333737925e-06, "epoch": 0.519893899204244, "percentage": 17.34, "elapsed_time": "0:48:21", "remaining_time": "3:50:32"}
50
+ {"current_steps": 500, "total_steps": 2826, "loss": 0.5486, "lr": 4.911519821393718e-06, "epoch": 0.5305039787798409, "percentage": 17.69, "elapsed_time": "0:49:21", "remaining_time": "3:49:36"}
51
+ {"current_steps": 510, "total_steps": 2826, "loss": 0.5121, "lr": 4.9031920716886035e-06, "epoch": 0.5411140583554377, "percentage": 18.05, "elapsed_time": "0:50:17", "remaining_time": "3:48:23"}
52
+ {"current_steps": 520, "total_steps": 2826, "loss": 0.4495, "lr": 4.894497555210499e-06, "epoch": 0.5517241379310345, "percentage": 18.4, "elapsed_time": "0:51:16", "remaining_time": "3:47:24"}
53
+ {"current_steps": 530, "total_steps": 2826, "loss": 0.5028, "lr": 4.8854375988861134e-06, "epoch": 0.5623342175066313, "percentage": 18.75, "elapsed_time": "0:52:13", "remaining_time": "3:46:13"}
54
+ {"current_steps": 540, "total_steps": 2826, "loss": 0.5193, "lr": 4.87601358541431e-06, "epoch": 0.5729442970822282, "percentage": 19.11, "elapsed_time": "0:53:09", "remaining_time": "3:45:01"}
55
+ {"current_steps": 550, "total_steps": 2826, "loss": 0.545, "lr": 4.8662269530550825e-06, "epoch": 0.583554376657825, "percentage": 19.46, "elapsed_time": "0:54:07", "remaining_time": "3:43:57"}
56
+ {"current_steps": 560, "total_steps": 2826, "loss": 0.5219, "lr": 4.856079195410046e-06, "epoch": 0.5941644562334217, "percentage": 19.82, "elapsed_time": "0:55:03", "remaining_time": "3:42:48"}
57
+ {"current_steps": 570, "total_steps": 2826, "loss": 0.4725, "lr": 4.845571861194501e-06, "epoch": 0.6047745358090185, "percentage": 20.17, "elapsed_time": "0:56:04", "remaining_time": "3:41:57"}
58
+ {"current_steps": 580, "total_steps": 2826, "loss": 0.4232, "lr": 4.834706554001065e-06, "epoch": 0.6153846153846154, "percentage": 20.52, "elapsed_time": "0:57:04", "remaining_time": "3:41:01"}
59
+ {"current_steps": 590, "total_steps": 2826, "loss": 0.4834, "lr": 4.823484932054937e-06, "epoch": 0.6259946949602122, "percentage": 20.88, "elapsed_time": "0:58:02", "remaining_time": "3:39:58"}
60
+ {"current_steps": 600, "total_steps": 2826, "loss": 0.5302, "lr": 4.811908707960832e-06, "epoch": 0.636604774535809, "percentage": 21.23, "elapsed_time": "0:58:58", "remaining_time": "3:38:46"}
61
+ {"current_steps": 610, "total_steps": 2826, "loss": 0.494, "lr": 4.799979648441602e-06, "epoch": 0.6472148541114059, "percentage": 21.59, "elapsed_time": "0:59:55", "remaining_time": "3:37:40"}
62
+ {"current_steps": 620, "total_steps": 2826, "loss": 0.487, "lr": 4.787699574068611e-06, "epoch": 0.6578249336870027, "percentage": 21.94, "elapsed_time": "1:00:52", "remaining_time": "3:36:36"}
63
+ {"current_steps": 630, "total_steps": 2826, "loss": 0.4911, "lr": 4.775070358983881e-06, "epoch": 0.6684350132625995, "percentage": 22.29, "elapsed_time": "1:01:52", "remaining_time": "3:35:42"}
64
+ {"current_steps": 640, "total_steps": 2826, "loss": 0.4744, "lr": 4.7620939306140696e-06, "epoch": 0.6790450928381963, "percentage": 22.65, "elapsed_time": "1:02:49", "remaining_time": "3:34:36"}
65
+ {"current_steps": 650, "total_steps": 2826, "loss": 0.4789, "lr": 4.748772269376312e-06, "epoch": 0.6896551724137931, "percentage": 23.0, "elapsed_time": "1:03:47", "remaining_time": "3:33:34"}
66
+ {"current_steps": 660, "total_steps": 2826, "loss": 0.488, "lr": 4.735107408375977e-06, "epoch": 0.7002652519893899, "percentage": 23.35, "elapsed_time": "1:04:46", "remaining_time": "3:32:34"}
67
+ {"current_steps": 670, "total_steps": 2826, "loss": 0.4462, "lr": 4.721101433096381e-06, "epoch": 0.7108753315649867, "percentage": 23.71, "elapsed_time": "1:05:40", "remaining_time": "3:31:21"}
68
+ {"current_steps": 680, "total_steps": 2826, "loss": 0.5087, "lr": 4.706756481080511e-06, "epoch": 0.7214854111405835, "percentage": 24.06, "elapsed_time": "1:06:41", "remaining_time": "3:30:27"}
69
+ {"current_steps": 690, "total_steps": 2826, "loss": 0.5304, "lr": 4.692074741604795e-06, "epoch": 0.7320954907161804, "percentage": 24.42, "elapsed_time": "1:07:36", "remaining_time": "3:29:18"}
70
+ {"current_steps": 700, "total_steps": 2826, "loss": 0.5177, "lr": 4.677058455344989e-06, "epoch": 0.7427055702917772, "percentage": 24.77, "elapsed_time": "1:08:33", "remaining_time": "3:28:13"}
71
+ {"current_steps": 710, "total_steps": 2826, "loss": 0.4841, "lr": 4.661709914034209e-06, "epoch": 0.753315649867374, "percentage": 25.12, "elapsed_time": "1:09:37", "remaining_time": "3:27:29"}
72
+ {"current_steps": 720, "total_steps": 2826, "loss": 0.4544, "lr": 4.646031460113175e-06, "epoch": 0.7639257294429708, "percentage": 25.48, "elapsed_time": "1:10:38", "remaining_time": "3:26:36"}
73
+ {"current_steps": 730, "total_steps": 2826, "loss": 0.4715, "lr": 4.630025486372715e-06, "epoch": 0.7745358090185677, "percentage": 25.83, "elapsed_time": "1:11:32", "remaining_time": "3:25:25"}
74
+ {"current_steps": 740, "total_steps": 2826, "loss": 0.4824, "lr": 4.613694435588589e-06, "epoch": 0.7851458885941645, "percentage": 26.19, "elapsed_time": "1:12:28", "remaining_time": "3:24:17"}
75
+ {"current_steps": 750, "total_steps": 2826, "loss": 0.4852, "lr": 4.597040800148679e-06, "epoch": 0.7957559681697612, "percentage": 26.54, "elapsed_time": "1:13:25", "remaining_time": "3:23:14"}
76
+ {"current_steps": 760, "total_steps": 2826, "loss": 0.4134, "lr": 4.580067121672607e-06, "epoch": 0.8063660477453581, "percentage": 26.89, "elapsed_time": "1:14:21", "remaining_time": "3:22:09"}
77
+ {"current_steps": 770, "total_steps": 2826, "loss": 0.4493, "lr": 4.562775990623847e-06, "epoch": 0.8169761273209549, "percentage": 27.25, "elapsed_time": "1:15:20", "remaining_time": "3:21:11"}
78
+ {"current_steps": 780, "total_steps": 2826, "loss": 0.5255, "lr": 4.5451700459143735e-06, "epoch": 0.8275862068965517, "percentage": 27.6, "elapsed_time": "1:16:18", "remaining_time": "3:20:10"}
79
+ {"current_steps": 790, "total_steps": 2826, "loss": 0.4503, "lr": 4.527251974501923e-06, "epoch": 0.8381962864721485, "percentage": 27.95, "elapsed_time": "1:17:17", "remaining_time": "3:19:12"}
80
+ {"current_steps": 800, "total_steps": 2826, "loss": 0.4636, "lr": 4.509024510979917e-06, "epoch": 0.8488063660477454, "percentage": 28.31, "elapsed_time": "1:18:17", "remaining_time": "3:18:17"}
81
+ {"current_steps": 810, "total_steps": 2826, "loss": 0.4685, "lr": 4.4904904371601176e-06, "epoch": 0.8594164456233422, "percentage": 28.66, "elapsed_time": "1:19:18", "remaining_time": "3:17:23"}
82
+ {"current_steps": 820, "total_steps": 2826, "loss": 0.5248, "lr": 4.4716525816480816e-06, "epoch": 0.870026525198939, "percentage": 29.02, "elapsed_time": "1:20:20", "remaining_time": "3:16:33"}
83
+ {"current_steps": 830, "total_steps": 2826, "loss": 0.4747, "lr": 4.4525138194114644e-06, "epoch": 0.8806366047745358, "percentage": 29.37, "elapsed_time": "1:21:20", "remaining_time": "3:15:37"}
84
+ {"current_steps": 840, "total_steps": 2826, "loss": 0.4198, "lr": 4.4330770713412555e-06, "epoch": 0.8912466843501327, "percentage": 29.72, "elapsed_time": "1:22:27", "remaining_time": "3:14:56"}
85
+ {"current_steps": 850, "total_steps": 2826, "loss": 0.4545, "lr": 4.413345303805996e-06, "epoch": 0.9018567639257294, "percentage": 30.08, "elapsed_time": "1:23:27", "remaining_time": "3:14:02"}
86
+ {"current_steps": 860, "total_steps": 2826, "loss": 0.5003, "lr": 4.393321528199072e-06, "epoch": 0.9124668435013262, "percentage": 30.43, "elapsed_time": "1:24:24", "remaining_time": "3:12:56"}
87
+ {"current_steps": 870, "total_steps": 2826, "loss": 0.472, "lr": 4.373008800479118e-06, "epoch": 0.9230769230769231, "percentage": 30.79, "elapsed_time": "1:25:20", "remaining_time": "3:11:51"}
88
+ {"current_steps": 880, "total_steps": 2826, "loss": 0.4661, "lr": 4.352410220703629e-06, "epoch": 0.9336870026525199, "percentage": 31.14, "elapsed_time": "1:26:17", "remaining_time": "3:10:50"}
89
+ {"current_steps": 890, "total_steps": 2826, "loss": 0.4614, "lr": 4.331528932555844e-06, "epoch": 0.9442970822281167, "percentage": 31.49, "elapsed_time": "1:27:17", "remaining_time": "3:09:53"}
90
+ {"current_steps": 900, "total_steps": 2826, "loss": 0.4623, "lr": 4.3103681228649626e-06, "epoch": 0.9549071618037135, "percentage": 31.85, "elapsed_time": "1:28:15", "remaining_time": "3:08:52"}
91
+ {"current_steps": 910, "total_steps": 2826, "loss": 0.4902, "lr": 4.288931021119788e-06, "epoch": 0.9655172413793104, "percentage": 32.2, "elapsed_time": "1:29:13", "remaining_time": "3:07:52"}
92
+ {"current_steps": 920, "total_steps": 2826, "loss": 0.5047, "lr": 4.267220898975848e-06, "epoch": 0.9761273209549072, "percentage": 32.55, "elapsed_time": "1:30:10", "remaining_time": "3:06:48"}
93
+ {"current_steps": 930, "total_steps": 2826, "loss": 0.5358, "lr": 4.245241069756092e-06, "epoch": 0.986737400530504, "percentage": 32.91, "elapsed_time": "1:31:10", "remaining_time": "3:05:53"}
94
+ {"current_steps": 940, "total_steps": 2826, "loss": 0.4928, "lr": 4.222994887945219e-06, "epoch": 0.9973474801061007, "percentage": 33.26, "elapsed_time": "1:32:11", "remaining_time": "3:04:58"}
95
+ {"current_steps": 950, "total_steps": 2826, "loss": 0.3963, "lr": 4.20048574867773e-06, "epoch": 1.0074270557029177, "percentage": 33.62, "elapsed_time": "1:33:45", "remaining_time": "3:05:08"}
96
+ {"current_steps": 960, "total_steps": 2826, "loss": 0.3125, "lr": 4.1777170872197725e-06, "epoch": 1.0180371352785147, "percentage": 33.97, "elapsed_time": "1:34:43", "remaining_time": "3:04:07"}
97
+ {"current_steps": 970, "total_steps": 2826, "loss": 0.3457, "lr": 4.1546923784448646e-06, "epoch": 1.0286472148541115, "percentage": 34.32, "elapsed_time": "1:35:40", "remaining_time": "3:03:04"}
98
+ {"current_steps": 980, "total_steps": 2826, "loss": 0.3029, "lr": 4.1314151363035705e-06, "epoch": 1.0392572944297083, "percentage": 34.68, "elapsed_time": "1:36:38", "remaining_time": "3:02:02"}
99
+ {"current_steps": 990, "total_steps": 2826, "loss": 0.3289, "lr": 4.1078889132872145e-06, "epoch": 1.049867374005305, "percentage": 35.03, "elapsed_time": "1:37:36", "remaining_time": "3:01:01"}
100
+ {"current_steps": 1000, "total_steps": 2826, "loss": 0.3234, "lr": 4.084117299885712e-06, "epoch": 1.0604774535809018, "percentage": 35.39, "elapsed_time": "1:38:36", "remaining_time": "3:00:04"}
101
+ {"current_steps": 1010, "total_steps": 2826, "loss": 0.3139, "lr": 4.060103924039599e-06, "epoch": 1.0710875331564986, "percentage": 35.74, "elapsed_time": "1:39:35", "remaining_time": "2:59:04"}
102
+ {"current_steps": 1020, "total_steps": 2826, "loss": 0.3144, "lr": 4.035852450586352e-06, "epoch": 1.0816976127320954, "percentage": 36.09, "elapsed_time": "1:40:32", "remaining_time": "2:58:00"}
103
+ {"current_steps": 1030, "total_steps": 2826, "loss": 0.323, "lr": 4.011366580701073e-06, "epoch": 1.0923076923076924, "percentage": 36.45, "elapsed_time": "1:41:29", "remaining_time": "2:56:58"}
104
+ {"current_steps": 1040, "total_steps": 2826, "loss": 0.3694, "lr": 3.9866500513316274e-06, "epoch": 1.1029177718832892, "percentage": 36.8, "elapsed_time": "1:42:28", "remaining_time": "2:55:59"}
105
+ {"current_steps": 1050, "total_steps": 2826, "loss": 0.3351, "lr": 3.961706634628323e-06, "epoch": 1.113527851458886, "percentage": 37.15, "elapsed_time": "1:43:26", "remaining_time": "2:54:57"}
106
+ {"current_steps": 1060, "total_steps": 2826, "loss": 0.3459, "lr": 3.936540137368222e-06, "epoch": 1.1241379310344828, "percentage": 37.51, "elapsed_time": "1:44:26", "remaining_time": "2:54:00"}
107
+ {"current_steps": 1070, "total_steps": 2826, "loss": 0.3186, "lr": 3.911154400374159e-06, "epoch": 1.1347480106100796, "percentage": 37.86, "elapsed_time": "1:45:24", "remaining_time": "2:52:58"}
108
+ {"current_steps": 1080, "total_steps": 2826, "loss": 0.3333, "lr": 3.885553297928573e-06, "epoch": 1.1453580901856764, "percentage": 38.22, "elapsed_time": "1:46:21", "remaining_time": "2:51:56"}
109
+ {"current_steps": 1090, "total_steps": 2826, "loss": 0.3137, "lr": 3.859740737182222e-06, "epoch": 1.1559681697612731, "percentage": 38.57, "elapsed_time": "1:47:19", "remaining_time": "2:50:55"}
110
+ {"current_steps": 1100, "total_steps": 2826, "loss": 0.3426, "lr": 3.833720657557894e-06, "epoch": 1.16657824933687, "percentage": 38.92, "elapsed_time": "1:48:18", "remaining_time": "2:49:56"}
111
+ {"current_steps": 1110, "total_steps": 2826, "loss": 0.3709, "lr": 3.807497030149181e-06, "epoch": 1.1771883289124667, "percentage": 39.28, "elapsed_time": "1:49:13", "remaining_time": "2:48:51"}
112
+ {"current_steps": 1120, "total_steps": 2826, "loss": 0.329, "lr": 3.7810738571144257e-06, "epoch": 1.1877984084880637, "percentage": 39.63, "elapsed_time": "1:50:12", "remaining_time": "2:47:52"}
113
+ {"current_steps": 1130, "total_steps": 2826, "loss": 0.305, "lr": 3.7544551710659296e-06, "epoch": 1.1984084880636605, "percentage": 39.99, "elapsed_time": "1:51:11", "remaining_time": "2:46:53"}
114
+ {"current_steps": 1140, "total_steps": 2826, "loss": 0.3449, "lr": 3.7276450344545024e-06, "epoch": 1.2090185676392573, "percentage": 40.34, "elapsed_time": "1:52:08", "remaining_time": "2:45:50"}
115
+ {"current_steps": 1150, "total_steps": 2826, "loss": 0.3403, "lr": 3.7006475389494723e-06, "epoch": 1.219628647214854, "percentage": 40.69, "elapsed_time": "1:53:07", "remaining_time": "2:44:51"}
116
+ {"current_steps": 1160, "total_steps": 2826, "loss": 0.3342, "lr": 3.6734668048142273e-06, "epoch": 1.2302387267904509, "percentage": 41.05, "elapsed_time": "1:54:06", "remaining_time": "2:43:52"}
117
+ {"current_steps": 1170, "total_steps": 2826, "loss": 0.3589, "lr": 3.646106980277394e-06, "epoch": 1.2408488063660477, "percentage": 41.4, "elapsed_time": "1:55:04", "remaining_time": "2:42:53"}
118
+ {"current_steps": 1180, "total_steps": 2826, "loss": 0.3447, "lr": 3.618572240899748e-06, "epoch": 1.2514588859416444, "percentage": 41.76, "elapsed_time": "1:56:02", "remaining_time": "2:41:52"}
119
+ {"current_steps": 1190, "total_steps": 2826, "loss": 0.3787, "lr": 3.5908667889369603e-06, "epoch": 1.2620689655172415, "percentage": 42.11, "elapsed_time": "1:56:58", "remaining_time": "2:40:48"}
120
+ {"current_steps": 1200, "total_steps": 2826, "loss": 0.3376, "lr": 3.5629948526982563e-06, "epoch": 1.2726790450928382, "percentage": 42.46, "elapsed_time": "1:57:58", "remaining_time": "2:39:50"}
121
+ {"current_steps": 1210, "total_steps": 2826, "loss": 0.3461, "lr": 3.534960685901111e-06, "epoch": 1.283289124668435, "percentage": 42.82, "elapsed_time": "1:58:55", "remaining_time": "2:38:49"}
122
+ {"current_steps": 1220, "total_steps": 2826, "loss": 0.3396, "lr": 3.506768567022062e-06, "epoch": 1.2938992042440318, "percentage": 43.17, "elapsed_time": "1:59:49", "remaining_time": "2:37:44"}
123
+ {"current_steps": 1230, "total_steps": 2826, "loss": 0.3364, "lr": 3.478422798643737e-06, "epoch": 1.3045092838196286, "percentage": 43.52, "elapsed_time": "2:00:49", "remaining_time": "2:36:47"}
124
+ {"current_steps": 1240, "total_steps": 2826, "loss": 0.3126, "lr": 3.4499277067982177e-06, "epoch": 1.3151193633952254, "percentage": 43.88, "elapsed_time": "2:01:46", "remaining_time": "2:35:45"}
125
+ {"current_steps": 1250, "total_steps": 2826, "loss": 0.3092, "lr": 3.421287640306809e-06, "epoch": 1.3257294429708222, "percentage": 44.23, "elapsed_time": "2:02:44", "remaining_time": "2:34:45"}
126
+ {"current_steps": 1260, "total_steps": 2826, "loss": 0.3374, "lr": 3.3925069701163406e-06, "epoch": 1.3363395225464192, "percentage": 44.59, "elapsed_time": "2:03:48", "remaining_time": "2:33:51"}
127
+ {"current_steps": 1270, "total_steps": 2826, "loss": 0.3436, "lr": 3.363590088632085e-06, "epoch": 1.346949602122016, "percentage": 44.94, "elapsed_time": "2:04:49", "remaining_time": "2:32:55"}
128
+ {"current_steps": 1280, "total_steps": 2826, "loss": 0.3283, "lr": 3.334541409047408e-06, "epoch": 1.3575596816976128, "percentage": 45.29, "elapsed_time": "2:05:49", "remaining_time": "2:31:58"}
129
+ {"current_steps": 1290, "total_steps": 2826, "loss": 0.358, "lr": 3.3053653646702422e-06, "epoch": 1.3681697612732096, "percentage": 45.65, "elapsed_time": "2:06:47", "remaining_time": "2:30:58"}
130
+ {"current_steps": 1300, "total_steps": 2826, "loss": 0.3084, "lr": 3.276066408246487e-06, "epoch": 1.3787798408488063, "percentage": 46.0, "elapsed_time": "2:07:45", "remaining_time": "2:29:57"}
131
+ {"current_steps": 1310, "total_steps": 2826, "loss": 0.3508, "lr": 3.2466490112804484e-06, "epoch": 1.3893899204244031, "percentage": 46.36, "elapsed_time": "2:08:45", "remaining_time": "2:29:00"}
132
+ {"current_steps": 1320, "total_steps": 2826, "loss": 0.3215, "lr": 3.217117663352417e-06, "epoch": 1.4, "percentage": 46.71, "elapsed_time": "2:09:41", "remaining_time": "2:27:57"}
133
+ {"current_steps": 1330, "total_steps": 2826, "loss": 0.3193, "lr": 3.187476871433478e-06, "epoch": 1.410610079575597, "percentage": 47.06, "elapsed_time": "2:10:40", "remaining_time": "2:26:58"}
134
+ {"current_steps": 1340, "total_steps": 2826, "loss": 0.3019, "lr": 3.1577311591976766e-06, "epoch": 1.4212201591511937, "percentage": 47.42, "elapsed_time": "2:11:37", "remaining_time": "2:25:57"}
135
+ {"current_steps": 1350, "total_steps": 2826, "loss": 0.3099, "lr": 3.1278850663316307e-06, "epoch": 1.4318302387267905, "percentage": 47.77, "elapsed_time": "2:12:36", "remaining_time": "2:24:59"}
136
+ {"current_steps": 1360, "total_steps": 2826, "loss": 0.3085, "lr": 3.0979431478416987e-06, "epoch": 1.4424403183023873, "percentage": 48.12, "elapsed_time": "2:13:35", "remaining_time": "2:24:00"}
137
+ {"current_steps": 1370, "total_steps": 2826, "loss": 0.3211, "lr": 3.067909973358811e-06, "epoch": 1.453050397877984, "percentage": 48.48, "elapsed_time": "2:14:32", "remaining_time": "2:22:58"}
138
+ {"current_steps": 1380, "total_steps": 2826, "loss": 0.3329, "lr": 3.0377901264410673e-06, "epoch": 1.4636604774535809, "percentage": 48.83, "elapsed_time": "2:15:37", "remaining_time": "2:22:06"}
139
+ {"current_steps": 1390, "total_steps": 2826, "loss": 0.3376, "lr": 3.0075882038742133e-06, "epoch": 1.4742705570291776, "percentage": 49.19, "elapsed_time": "2:16:35", "remaining_time": "2:21:06"}
140
+ {"current_steps": 1400, "total_steps": 2826, "loss": 0.2896, "lr": 2.9773088149700923e-06, "epoch": 1.4848806366047747, "percentage": 49.54, "elapsed_time": "2:17:35", "remaining_time": "2:20:08"}
141
+ {"current_steps": 1410, "total_steps": 2826, "loss": 0.299, "lr": 2.9469565808631888e-06, "epoch": 1.4954907161803712, "percentage": 49.89, "elapsed_time": "2:18:32", "remaining_time": "2:19:07"}
142
+ {"current_steps": 1420, "total_steps": 2826, "loss": 0.3484, "lr": 2.9165361338053683e-06, "epoch": 1.5061007957559682, "percentage": 50.25, "elapsed_time": "2:19:29", "remaining_time": "2:18:06"}
143
+ {"current_steps": 1430, "total_steps": 2826, "loss": 0.3316, "lr": 2.886052116458918e-06, "epoch": 1.516710875331565, "percentage": 50.6, "elapsed_time": "2:20:28", "remaining_time": "2:17:08"}
144
+ {"current_steps": 1440, "total_steps": 2826, "loss": 0.328, "lr": 2.8555091811880004e-06, "epoch": 1.5273209549071618, "percentage": 50.96, "elapsed_time": "2:21:25", "remaining_time": "2:16:07"}
145
+ {"current_steps": 1450, "total_steps": 2826, "loss": 0.3215, "lr": 2.8249119893486252e-06, "epoch": 1.5379310344827586, "percentage": 51.31, "elapsed_time": "2:22:28", "remaining_time": "2:15:12"}
146
+ {"current_steps": 1460, "total_steps": 2826, "loss": 0.3118, "lr": 2.7942652105772516e-06, "epoch": 1.5485411140583554, "percentage": 51.66, "elapsed_time": "2:23:28", "remaining_time": "2:14:14"}
147
+ {"current_steps": 1470, "total_steps": 2826, "loss": 0.2973, "lr": 2.7635735220781214e-06, "epoch": 1.5591511936339524, "percentage": 52.02, "elapsed_time": "2:24:31", "remaining_time": "2:13:19"}
148
+ {"current_steps": 1480, "total_steps": 2826, "loss": 0.3423, "lr": 2.7328416079094412e-06, "epoch": 1.569761273209549, "percentage": 52.37, "elapsed_time": "2:25:31", "remaining_time": "2:12:20"}
149
+ {"current_steps": 1490, "total_steps": 2826, "loss": 0.3211, "lr": 2.7020741582685217e-06, "epoch": 1.580371352785146, "percentage": 52.72, "elapsed_time": "2:26:27", "remaining_time": "2:11:19"}
150
+ {"current_steps": 1500, "total_steps": 2826, "loss": 0.2733, "lr": 2.6712758687759706e-06, "epoch": 1.5909814323607427, "percentage": 53.08, "elapsed_time": "2:27:26", "remaining_time": "2:10:20"}
151
+ {"current_steps": 1510, "total_steps": 2826, "loss": 0.338, "lr": 2.6404514397590657e-06, "epoch": 1.6015915119363395, "percentage": 53.43, "elapsed_time": "2:28:24", "remaining_time": "2:09:20"}
152
+ {"current_steps": 1520, "total_steps": 2826, "loss": 0.3124, "lr": 2.6096055755344113e-06, "epoch": 1.6122015915119363, "percentage": 53.79, "elapsed_time": "2:29:24", "remaining_time": "2:08:22"}
153
+ {"current_steps": 1530, "total_steps": 2826, "loss": 0.3538, "lr": 2.578742983689973e-06, "epoch": 1.622811671087533, "percentage": 54.14, "elapsed_time": "2:30:18", "remaining_time": "2:07:19"}
154
+ {"current_steps": 1540, "total_steps": 2826, "loss": 0.3353, "lr": 2.547868374366631e-06, "epoch": 1.6334217506631301, "percentage": 54.49, "elapsed_time": "2:31:20", "remaining_time": "2:06:22"}
155
+ {"current_steps": 1550, "total_steps": 2826, "loss": 0.302, "lr": 2.5169864595393295e-06, "epoch": 1.6440318302387267, "percentage": 54.85, "elapsed_time": "2:32:18", "remaining_time": "2:05:22"}
156
+ {"current_steps": 1560, "total_steps": 2826, "loss": 0.3124, "lr": 2.4861019522979537e-06, "epoch": 1.6546419098143237, "percentage": 55.2, "elapsed_time": "2:33:16", "remaining_time": "2:04:22"}
157
+ {"current_steps": 1570, "total_steps": 2826, "loss": 0.3497, "lr": 2.455219566128034e-06, "epoch": 1.6652519893899205, "percentage": 55.56, "elapsed_time": "2:34:13", "remaining_time": "2:03:22"}
158
+ {"current_steps": 1580, "total_steps": 2826, "loss": 0.3233, "lr": 2.4243440141913905e-06, "epoch": 1.6758620689655173, "percentage": 55.91, "elapsed_time": "2:35:12", "remaining_time": "2:02:23"}
159
+ {"current_steps": 1590, "total_steps": 2826, "loss": 0.3067, "lr": 2.393480008606825e-06, "epoch": 1.686472148541114, "percentage": 56.26, "elapsed_time": "2:36:10", "remaining_time": "2:01:24"}
160
+ {"current_steps": 1600, "total_steps": 2826, "loss": 0.2893, "lr": 2.3626322597309774e-06, "epoch": 1.6970822281167108, "percentage": 56.62, "elapsed_time": "2:37:09", "remaining_time": "2:00:25"}
161
+ {"current_steps": 1610, "total_steps": 2826, "loss": 0.2825, "lr": 2.331805475439445e-06, "epoch": 1.7076923076923078, "percentage": 56.97, "elapsed_time": "2:38:07", "remaining_time": "1:59:26"}
162
+ {"current_steps": 1620, "total_steps": 2826, "loss": 0.3379, "lr": 2.3010043604082824e-06, "epoch": 1.7183023872679044, "percentage": 57.32, "elapsed_time": "2:39:01", "remaining_time": "1:58:22"}
163
+ {"current_steps": 1630, "total_steps": 2826, "loss": 0.301, "lr": 2.2702336153959925e-06, "epoch": 1.7289124668435014, "percentage": 57.68, "elapsed_time": "2:40:01", "remaining_time": "1:57:25"}
164
+ {"current_steps": 1640, "total_steps": 2826, "loss": 0.404, "lr": 2.2394979365261134e-06, "epoch": 1.739522546419098, "percentage": 58.03, "elapsed_time": "2:40:58", "remaining_time": "1:56:24"}
165
+ {"current_steps": 1650, "total_steps": 2826, "loss": 0.3242, "lr": 2.208802014570507e-06, "epoch": 1.750132625994695, "percentage": 58.39, "elapsed_time": "2:41:56", "remaining_time": "1:55:25"}
166
+ {"current_steps": 1660, "total_steps": 2826, "loss": 0.3152, "lr": 2.1781505342334775e-06, "epoch": 1.7607427055702918, "percentage": 58.74, "elapsed_time": "2:42:55", "remaining_time": "1:54:26"}
167
+ {"current_steps": 1670, "total_steps": 2826, "loss": 0.3302, "lr": 2.147548173436805e-06, "epoch": 1.7713527851458886, "percentage": 59.09, "elapsed_time": "2:43:56", "remaining_time": "1:53:29"}
168
+ {"current_steps": 1680, "total_steps": 2826, "loss": 0.293, "lr": 2.116999602605814e-06, "epoch": 1.7819628647214856, "percentage": 59.45, "elapsed_time": "2:44:55", "remaining_time": "1:52:29"}
169
+ {"current_steps": 1690, "total_steps": 2826, "loss": 0.2683, "lr": 2.086509483956594e-06, "epoch": 1.7925729442970821, "percentage": 59.8, "elapsed_time": "2:45:54", "remaining_time": "1:51:31"}
170
+ {"current_steps": 1700, "total_steps": 2826, "loss": 0.313, "lr": 2.056082470784469e-06, "epoch": 1.8031830238726791, "percentage": 60.16, "elapsed_time": "2:46:50", "remaining_time": "1:50:30"}
171
+ {"current_steps": 1710, "total_steps": 2826, "loss": 0.262, "lr": 2.0257232067538213e-06, "epoch": 1.8137931034482757, "percentage": 60.51, "elapsed_time": "2:47:47", "remaining_time": "1:49:30"}
172
+ {"current_steps": 1720, "total_steps": 2826, "loss": 0.3457, "lr": 1.9954363251894007e-06, "epoch": 1.8244031830238727, "percentage": 60.86, "elapsed_time": "2:48:43", "remaining_time": "1:48:29"}
173
+ {"current_steps": 1730, "total_steps": 2826, "loss": 0.2739, "lr": 1.9652264483691933e-06, "epoch": 1.8350132625994695, "percentage": 61.22, "elapsed_time": "2:49:43", "remaining_time": "1:47:31"}
174
+ {"current_steps": 1740, "total_steps": 2826, "loss": 0.3109, "lr": 1.9350981868189944e-06, "epoch": 1.8456233421750663, "percentage": 61.57, "elapsed_time": "2:50:40", "remaining_time": "1:46:31"}
175
+ {"current_steps": 1750, "total_steps": 2826, "loss": 0.3269, "lr": 1.9050561386087618e-06, "epoch": 1.856233421750663, "percentage": 61.92, "elapsed_time": "2:51:36", "remaining_time": "1:45:31"}
176
+ {"current_steps": 1760, "total_steps": 2826, "loss": 0.3617, "lr": 1.8751048886508711e-06, "epoch": 1.8668435013262599, "percentage": 62.28, "elapsed_time": "2:52:34", "remaining_time": "1:44:31"}
177
+ {"current_steps": 1770, "total_steps": 2826, "loss": 0.3228, "lr": 1.8452490080003888e-06, "epoch": 1.8774535809018569, "percentage": 62.63, "elapsed_time": "2:53:33", "remaining_time": "1:43:32"}
178
+ {"current_steps": 1780, "total_steps": 2826, "loss": 0.2857, "lr": 1.8154930531574521e-06, "epoch": 1.8880636604774534, "percentage": 62.99, "elapsed_time": "2:54:32", "remaining_time": "1:42:33"}
179
+ {"current_steps": 1790, "total_steps": 2826, "loss": 0.3622, "lr": 1.785841565371868e-06, "epoch": 1.8986737400530505, "percentage": 63.34, "elapsed_time": "2:55:29", "remaining_time": "1:41:34"}
180
+ {"current_steps": 1800, "total_steps": 2826, "loss": 0.3031, "lr": 1.7562990699500482e-06, "epoch": 1.9092838196286472, "percentage": 63.69, "elapsed_time": "2:56:27", "remaining_time": "1:40:34"}
181
+ {"current_steps": 1810, "total_steps": 2826, "loss": 0.3019, "lr": 1.7268700755643708e-06, "epoch": 1.919893899204244, "percentage": 64.05, "elapsed_time": "2:57:25", "remaining_time": "1:39:35"}
182
+ {"current_steps": 1820, "total_steps": 2826, "loss": 0.3047, "lr": 1.6975590735650812e-06, "epoch": 1.9305039787798408, "percentage": 64.4, "elapsed_time": "2:58:25", "remaining_time": "1:38:37"}
183
+ {"current_steps": 1830, "total_steps": 2826, "loss": 0.3048, "lr": 1.668370537294841e-06, "epoch": 1.9411140583554376, "percentage": 64.76, "elapsed_time": "2:59:22", "remaining_time": "1:37:37"}
184
+ {"current_steps": 1840, "total_steps": 2826, "loss": 0.3205, "lr": 1.6393089214060204e-06, "epoch": 1.9517241379310346, "percentage": 65.11, "elapsed_time": "3:00:20", "remaining_time": "1:36:38"}
185
+ {"current_steps": 1850, "total_steps": 2826, "loss": 0.321, "lr": 1.6103786611808414e-06, "epoch": 1.9623342175066312, "percentage": 65.46, "elapsed_time": "3:01:23", "remaining_time": "1:35:41"}
186
+ {"current_steps": 1860, "total_steps": 2826, "loss": 0.2954, "lr": 1.5815841718544884e-06, "epoch": 1.9729442970822282, "percentage": 65.82, "elapsed_time": "3:02:19", "remaining_time": "1:34:41"}
187
+ {"current_steps": 1870, "total_steps": 2826, "loss": 0.2945, "lr": 1.5529298479412636e-06, "epoch": 1.983554376657825, "percentage": 66.17, "elapsed_time": "3:03:16", "remaining_time": "1:33:41"}
188
+ {"current_steps": 1880, "total_steps": 2826, "loss": 0.3291, "lr": 1.524420062563912e-06, "epoch": 1.9941644562334218, "percentage": 66.53, "elapsed_time": "3:04:13", "remaining_time": "1:32:41"}
189
+ {"current_steps": 1890, "total_steps": 2826, "loss": 0.234, "lr": 1.4960591667862163e-06, "epoch": 2.004244031830239, "percentage": 66.88, "elapsed_time": "3:05:53", "remaining_time": "1:32:03"}
190
+ {"current_steps": 1900, "total_steps": 2826, "loss": 0.1943, "lr": 1.4678514889489464e-06, "epoch": 2.0148541114058354, "percentage": 67.23, "elapsed_time": "3:06:54", "remaining_time": "1:31:05"}
191
+ {"current_steps": 1910, "total_steps": 2826, "loss": 0.1911, "lr": 1.4398013340092864e-06, "epoch": 2.0254641909814324, "percentage": 67.59, "elapsed_time": "3:07:56", "remaining_time": "1:30:08"}
192
+ {"current_steps": 1920, "total_steps": 2826, "loss": 0.1895, "lr": 1.4119129828838275e-06, "epoch": 2.0360742705570294, "percentage": 67.94, "elapsed_time": "3:08:56", "remaining_time": "1:29:09"}
193
+ {"current_steps": 1930, "total_steps": 2826, "loss": 0.2177, "lr": 1.384190691795226e-06, "epoch": 2.046684350132626, "percentage": 68.29, "elapsed_time": "3:09:56", "remaining_time": "1:28:10"}
194
+ {"current_steps": 1940, "total_steps": 2826, "loss": 0.2252, "lr": 1.3566386916226373e-06, "epoch": 2.057294429708223, "percentage": 68.65, "elapsed_time": "3:10:53", "remaining_time": "1:27:10"}
195
+ {"current_steps": 1950, "total_steps": 2826, "loss": 0.1982, "lr": 1.3292611872560134e-06, "epoch": 2.0679045092838195, "percentage": 69.0, "elapsed_time": "3:11:57", "remaining_time": "1:26:13"}
196
+ {"current_steps": 1960, "total_steps": 2826, "loss": 0.1696, "lr": 1.302062356954365e-06, "epoch": 2.0785145888594165, "percentage": 69.36, "elapsed_time": "3:12:58", "remaining_time": "1:25:15"}
197
+ {"current_steps": 1970, "total_steps": 2826, "loss": 0.1936, "lr": 1.2750463517080922e-06, "epoch": 2.089124668435013, "percentage": 69.71, "elapsed_time": "3:14:00", "remaining_time": "1:24:17"}
198
+ {"current_steps": 1980, "total_steps": 2826, "loss": 0.1604, "lr": 1.2482172946054753e-06, "epoch": 2.09973474801061, "percentage": 70.06, "elapsed_time": "3:14:56", "remaining_time": "1:23:17"}
199
+ {"current_steps": 1990, "total_steps": 2826, "loss": 0.2069, "lr": 1.2215792802034187e-06, "epoch": 2.110344827586207, "percentage": 70.42, "elapsed_time": "3:15:54", "remaining_time": "1:22:18"}
200
+ {"current_steps": 2000, "total_steps": 2826, "loss": 0.1964, "lr": 1.1951363739025618e-06, "epoch": 2.1209549071618037, "percentage": 70.77, "elapsed_time": "3:16:54", "remaining_time": "1:21:19"}
201
+ {"current_steps": 2010, "total_steps": 2826, "loss": 0.1871, "lr": 1.168892611326827e-06, "epoch": 2.1315649867374007, "percentage": 71.13, "elapsed_time": "3:17:53", "remaining_time": "1:20:20"}
202
+ {"current_steps": 2020, "total_steps": 2826, "loss": 0.2595, "lr": 1.1428519977075136e-06, "epoch": 2.1421750663129973, "percentage": 71.48, "elapsed_time": "3:18:51", "remaining_time": "1:19:20"}
203
+ {"current_steps": 2030, "total_steps": 2826, "loss": 0.185, "lr": 1.1170185072720434e-06, "epoch": 2.1527851458885943, "percentage": 71.83, "elapsed_time": "3:19:49", "remaining_time": "1:18:21"}
204
+ {"current_steps": 2040, "total_steps": 2826, "loss": 0.228, "lr": 1.091396082637419e-06, "epoch": 2.163395225464191, "percentage": 72.19, "elapsed_time": "3:20:47", "remaining_time": "1:17:21"}
205
+ {"current_steps": 2050, "total_steps": 2826, "loss": 0.2098, "lr": 1.065988634208516e-06, "epoch": 2.174005305039788, "percentage": 72.54, "elapsed_time": "3:21:48", "remaining_time": "1:16:23"}
206
+ {"current_steps": 2060, "total_steps": 2826, "loss": 0.1982, "lr": 1.0408000395812961e-06, "epoch": 2.184615384615385, "percentage": 72.89, "elapsed_time": "3:22:48", "remaining_time": "1:15:24"}
207
+ {"current_steps": 2070, "total_steps": 2826, "loss": 0.1844, "lr": 1.0158341429510194e-06, "epoch": 2.1952254641909814, "percentage": 73.25, "elapsed_time": "3:23:45", "remaining_time": "1:14:24"}
208
+ {"current_steps": 2080, "total_steps": 2826, "loss": 0.1654, "lr": 9.910947545255523e-07, "epoch": 2.2058355437665784, "percentage": 73.6, "elapsed_time": "3:24:45", "remaining_time": "1:13:26"}
209
+ {"current_steps": 2090, "total_steps": 2826, "loss": 0.2037, "lr": 9.665856499438744e-07, "epoch": 2.216445623342175, "percentage": 73.96, "elapsed_time": "3:25:43", "remaining_time": "1:12:26"}
210
+ {"current_steps": 2100, "total_steps": 2826, "loss": 0.2087, "lr": 9.423105696998491e-07, "epoch": 2.227055702917772, "percentage": 74.31, "elapsed_time": "3:26:40", "remaining_time": "1:11:26"}
211
+ {"current_steps": 2110, "total_steps": 2826, "loss": 0.2105, "lr": 9.182732185713633e-07, "epoch": 2.2376657824933686, "percentage": 74.66, "elapsed_time": "3:27:38", "remaining_time": "1:10:27"}
212
+ {"current_steps": 2120, "total_steps": 2826, "loss": 0.2186, "lr": 8.94477265054918e-07, "epoch": 2.2482758620689656, "percentage": 75.02, "elapsed_time": "3:28:36", "remaining_time": "1:09:28"}
213
+ {"current_steps": 2130, "total_steps": 2826, "loss": 0.1879, "lr": 8.709263408057522e-07, "epoch": 2.2588859416445626, "percentage": 75.37, "elapsed_time": "3:29:34", "remaining_time": "1:08:28"}
214
+ {"current_steps": 2140, "total_steps": 2826, "loss": 0.2177, "lr": 8.476240400835972e-07, "epoch": 2.269496021220159, "percentage": 75.73, "elapsed_time": "3:30:30", "remaining_time": "1:07:28"}
215
+ {"current_steps": 2150, "total_steps": 2826, "loss": 0.165, "lr": 8.245739192041311e-07, "epoch": 2.280106100795756, "percentage": 76.08, "elapsed_time": "3:31:30", "remaining_time": "1:06:30"}
216
+ {"current_steps": 2160, "total_steps": 2826, "loss": 0.2018, "lr": 8.017794959962225e-07, "epoch": 2.2907161803713527, "percentage": 76.43, "elapsed_time": "3:32:27", "remaining_time": "1:05:30"}
217
+ {"current_steps": 2170, "total_steps": 2826, "loss": 0.1955, "lr": 7.792442492650587e-07, "epoch": 2.3013262599469497, "percentage": 76.79, "elapsed_time": "3:33:28", "remaining_time": "1:04:32"}
218
+ {"current_steps": 2180, "total_steps": 2826, "loss": 0.1976, "lr": 7.569716182612177e-07, "epoch": 2.3119363395225463, "percentage": 77.14, "elapsed_time": "3:34:25", "remaining_time": "1:03:32"}
219
+ {"current_steps": 2190, "total_steps": 2826, "loss": 0.1685, "lr": 7.349650021557839e-07, "epoch": 2.3225464190981433, "percentage": 77.49, "elapsed_time": "3:35:25", "remaining_time": "1:02:33"}
220
+ {"current_steps": 2200, "total_steps": 2826, "loss": 0.1519, "lr": 7.132277595215773e-07, "epoch": 2.33315649867374, "percentage": 77.85, "elapsed_time": "3:36:19", "remaining_time": "1:01:33"}
221
+ {"current_steps": 2210, "total_steps": 2826, "loss": 0.1573, "lr": 6.917632078205805e-07, "epoch": 2.343766578249337, "percentage": 78.2, "elapsed_time": "3:37:18", "remaining_time": "1:00:34"}
222
+ {"current_steps": 2220, "total_steps": 2826, "loss": 0.184, "lr": 6.705746228976387e-07, "epoch": 2.3543766578249334, "percentage": 78.56, "elapsed_time": "3:38:14", "remaining_time": "0:59:34"}
223
+ {"current_steps": 2230, "total_steps": 2826, "loss": 0.1968, "lr": 6.496652384805125e-07, "epoch": 2.3649867374005304, "percentage": 78.91, "elapsed_time": "3:39:11", "remaining_time": "0:58:34"}
224
+ {"current_steps": 2240, "total_steps": 2826, "loss": 0.1846, "lr": 6.290382456863584e-07, "epoch": 2.3755968169761275, "percentage": 79.26, "elapsed_time": "3:40:12", "remaining_time": "0:57:36"}
225
+ {"current_steps": 2250, "total_steps": 2826, "loss": 0.1858, "lr": 6.086967925347075e-07, "epoch": 2.386206896551724, "percentage": 79.62, "elapsed_time": "3:41:13", "remaining_time": "0:56:38"}
226
+ {"current_steps": 2260, "total_steps": 2826, "loss": 0.1837, "lr": 5.88643983467033e-07, "epoch": 2.396816976127321, "percentage": 79.97, "elapsed_time": "3:42:12", "remaining_time": "0:55:39"}
227
+ {"current_steps": 2270, "total_steps": 2826, "loss": 0.1659, "lr": 5.688828788729547e-07, "epoch": 2.4074270557029176, "percentage": 80.33, "elapsed_time": "3:43:09", "remaining_time": "0:54:39"}
228
+ {"current_steps": 2280, "total_steps": 2826, "loss": 0.2095, "lr": 5.494164946231747e-07, "epoch": 2.4180371352785146, "percentage": 80.68, "elapsed_time": "3:44:04", "remaining_time": "0:53:39"}
229
+ {"current_steps": 2290, "total_steps": 2826, "loss": 0.1862, "lr": 5.302478016092075e-07, "epoch": 2.428647214854111, "percentage": 81.03, "elapsed_time": "3:45:03", "remaining_time": "0:52:40"}
230
+ {"current_steps": 2300, "total_steps": 2826, "loss": 0.2085, "lr": 5.113797252899728e-07, "epoch": 2.439257294429708, "percentage": 81.39, "elapsed_time": "3:46:01", "remaining_time": "0:51:41"}
231
+ {"current_steps": 2310, "total_steps": 2826, "loss": 0.1914, "lr": 4.928151452453184e-07, "epoch": 2.449867374005305, "percentage": 81.74, "elapsed_time": "3:46:58", "remaining_time": "0:50:42"}
232
+ {"current_steps": 2320, "total_steps": 2826, "loss": 0.1718, "lr": 4.745568947365542e-07, "epoch": 2.4604774535809018, "percentage": 82.09, "elapsed_time": "3:47:59", "remaining_time": "0:49:43"}
233
+ {"current_steps": 2330, "total_steps": 2826, "loss": 0.1669, "lr": 4.5660776027404654e-07, "epoch": 2.4710875331564988, "percentage": 82.45, "elapsed_time": "3:48:58", "remaining_time": "0:48:44"}
234
+ {"current_steps": 2340, "total_steps": 2826, "loss": 0.1731, "lr": 4.389704811919507e-07, "epoch": 2.4816976127320953, "percentage": 82.8, "elapsed_time": "3:50:00", "remaining_time": "0:47:46"}
235
+ {"current_steps": 2350, "total_steps": 2826, "loss": 0.1802, "lr": 4.216477492301455e-07, "epoch": 2.4923076923076923, "percentage": 83.16, "elapsed_time": "3:50:57", "remaining_time": "0:46:46"}
236
+ {"current_steps": 2360, "total_steps": 2826, "loss": 0.2232, "lr": 4.0464220812342526e-07, "epoch": 2.502917771883289, "percentage": 83.51, "elapsed_time": "3:51:53", "remaining_time": "0:45:47"}
237
+ {"current_steps": 2370, "total_steps": 2826, "loss": 0.1432, "lr": 3.87956453198027e-07, "epoch": 2.513527851458886, "percentage": 83.86, "elapsed_time": "3:52:52", "remaining_time": "0:44:48"}
238
+ {"current_steps": 2380, "total_steps": 2826, "loss": 0.1834, "lr": 3.715930309755389e-07, "epoch": 2.524137931034483, "percentage": 84.22, "elapsed_time": "3:53:51", "remaining_time": "0:43:49"}
239
+ {"current_steps": 2390, "total_steps": 2826, "loss": 0.2123, "lr": 3.5555443878425635e-07, "epoch": 2.5347480106100795, "percentage": 84.57, "elapsed_time": "3:54:47", "remaining_time": "0:42:49"}
240
+ {"current_steps": 2400, "total_steps": 2826, "loss": 0.2034, "lr": 3.398431243780531e-07, "epoch": 2.5453580901856765, "percentage": 84.93, "elapsed_time": "3:55:41", "remaining_time": "0:41:50"}
241
+ {"current_steps": 2410, "total_steps": 2826, "loss": 0.1778, "lr": 3.2446148556281117e-07, "epoch": 2.555968169761273, "percentage": 85.28, "elapsed_time": "3:56:40", "remaining_time": "0:40:51"}
242
+ {"current_steps": 2420, "total_steps": 2826, "loss": 0.1892, "lr": 3.0941186983047543e-07, "epoch": 2.56657824933687, "percentage": 85.63, "elapsed_time": "3:57:36", "remaining_time": "0:39:51"}
243
+ {"current_steps": 2430, "total_steps": 2826, "loss": 0.1935, "lr": 2.9469657400078925e-07, "epoch": 2.5771883289124666, "percentage": 85.99, "elapsed_time": "3:58:33", "remaining_time": "0:38:52"}
244
+ {"current_steps": 2440, "total_steps": 2826, "loss": 0.1858, "lr": 2.8031784387076186e-07, "epoch": 2.5877984084880636, "percentage": 86.34, "elapsed_time": "3:59:29", "remaining_time": "0:37:53"}
245
+ {"current_steps": 2450, "total_steps": 2826, "loss": 0.2118, "lr": 2.6627787387191934e-07, "epoch": 2.5984084880636606, "percentage": 86.69, "elapsed_time": "4:00:25", "remaining_time": "0:36:53"}
246
+ {"current_steps": 2460, "total_steps": 2826, "loss": 0.1929, "lr": 2.5257880673540376e-07, "epoch": 2.609018567639257, "percentage": 87.05, "elapsed_time": "4:01:21", "remaining_time": "0:35:54"}
247
+ {"current_steps": 2470, "total_steps": 2826, "loss": 0.1745, "lr": 2.392227331649527e-07, "epoch": 2.6196286472148542, "percentage": 87.4, "elapsed_time": "4:02:18", "remaining_time": "0:34:55"}
248
+ {"current_steps": 2480, "total_steps": 2826, "loss": 0.1823, "lr": 2.2621169151782417e-07, "epoch": 2.630238726790451, "percentage": 87.76, "elapsed_time": "4:03:15", "remaining_time": "0:33:56"}
249
+ {"current_steps": 2490, "total_steps": 2826, "loss": 0.2037, "lr": 2.1354766749371093e-07, "epoch": 2.640848806366048, "percentage": 88.11, "elapsed_time": "4:04:11", "remaining_time": "0:32:57"}
250
+ {"current_steps": 2500, "total_steps": 2826, "loss": 0.2196, "lr": 2.0123259383169031e-07, "epoch": 2.6514588859416444, "percentage": 88.46, "elapsed_time": "4:05:10", "remaining_time": "0:31:58"}
251
+ {"current_steps": 2510, "total_steps": 2826, "loss": 0.1848, "lr": 1.8926835001525257e-07, "epoch": 2.6620689655172414, "percentage": 88.82, "elapsed_time": "4:06:05", "remaining_time": "0:30:58"}
252
+ {"current_steps": 2520, "total_steps": 2826, "loss": 0.1823, "lr": 1.776567619854655e-07, "epoch": 2.6726790450928384, "percentage": 89.17, "elapsed_time": "4:07:00", "remaining_time": "0:29:59"}
253
+ {"current_steps": 2530, "total_steps": 2826, "loss": 0.2039, "lr": 1.6639960186230293e-07, "epoch": 2.683289124668435, "percentage": 89.53, "elapsed_time": "4:08:00", "remaining_time": "0:29:00"}
254
+ {"current_steps": 2540, "total_steps": 2826, "loss": 0.1796, "lr": 1.5549858767419018e-07, "epoch": 2.693899204244032, "percentage": 89.88, "elapsed_time": "4:08:57", "remaining_time": "0:28:01"}
255
+ {"current_steps": 2550, "total_steps": 2826, "loss": 0.1893, "lr": 1.449553830958053e-07, "epoch": 2.7045092838196285, "percentage": 90.23, "elapsed_time": "4:09:57", "remaining_time": "0:27:03"}
256
+ {"current_steps": 2560, "total_steps": 2826, "loss": 0.1947, "lr": 1.347715971941746e-07, "epoch": 2.7151193633952255, "percentage": 90.59, "elapsed_time": "4:10:56", "remaining_time": "0:26:04"}
257
+ {"current_steps": 2570, "total_steps": 2826, "loss": 0.1744, "lr": 1.2494878418310234e-07, "epoch": 2.725729442970822, "percentage": 90.94, "elapsed_time": "4:11:56", "remaining_time": "0:25:05"}
258
+ {"current_steps": 2580, "total_steps": 2826, "loss": 0.2351, "lr": 1.1548844318597208e-07, "epoch": 2.736339522546419, "percentage": 91.3, "elapsed_time": "4:12:55", "remaining_time": "0:24:06"}
259
+ {"current_steps": 2590, "total_steps": 2826, "loss": 0.2245, "lr": 1.0639201800695553e-07, "epoch": 2.746949602122016, "percentage": 91.65, "elapsed_time": "4:13:51", "remaining_time": "0:23:07"}
260
+ {"current_steps": 2600, "total_steps": 2826, "loss": 0.2014, "lr": 9.76608969106646e-08, "epoch": 2.7575596816976127, "percentage": 92.0, "elapsed_time": "4:14:49", "remaining_time": "0:22:09"}
261
+ {"current_steps": 2610, "total_steps": 2826, "loss": 0.1824, "lr": 8.929641241027937e-08, "epoch": 2.7681697612732097, "percentage": 92.36, "elapsed_time": "4:15:47", "remaining_time": "0:21:10"}
262
+ {"current_steps": 2620, "total_steps": 2826, "loss": 0.1706, "lr": 8.129984106418354e-08, "epoch": 2.7787798408488062, "percentage": 92.71, "elapsed_time": "4:16:45", "remaining_time": "0:20:11"}
263
+ {"current_steps": 2630, "total_steps": 2826, "loss": 0.2195, "lr": 7.3672403281142e-08, "epoch": 2.7893899204244033, "percentage": 93.06, "elapsed_time": "4:17:44", "remaining_time": "0:19:12"}
264
+ {"current_steps": 2640, "total_steps": 2826, "loss": 0.1748, "lr": 6.641526313404534e-08, "epoch": 2.8, "percentage": 93.42, "elapsed_time": "4:18:42", "remaining_time": "0:18:13"}
265
+ {"current_steps": 2650, "total_steps": 2826, "loss": 0.2061, "lr": 5.952952818225416e-08, "epoch": 2.810610079575597, "percentage": 93.77, "elapsed_time": "4:19:40", "remaining_time": "0:17:14"}
266
+ {"current_steps": 2660, "total_steps": 2826, "loss": 0.1742, "lr": 5.3016249302565436e-08, "epoch": 2.821220159151194, "percentage": 94.13, "elapsed_time": "4:20:36", "remaining_time": "0:16:15"}
267
+ {"current_steps": 2670, "total_steps": 2826, "loss": 0.2082, "lr": 4.6876420528833014e-08, "epoch": 2.8318302387267904, "percentage": 94.48, "elapsed_time": "4:21:35", "remaining_time": "0:15:17"}
268
+ {"current_steps": 2680, "total_steps": 2826, "loss": 0.1805, "lr": 4.111097890026089e-08, "epoch": 2.8424403183023874, "percentage": 94.83, "elapsed_time": "4:22:35", "remaining_time": "0:14:18"}
269
+ {"current_steps": 2690, "total_steps": 2826, "loss": 0.2058, "lr": 3.5720804318395976e-08, "epoch": 2.853050397877984, "percentage": 95.19, "elapsed_time": "4:23:34", "remaining_time": "0:13:19"}
270
+ {"current_steps": 2700, "total_steps": 2826, "loss": 0.2027, "lr": 3.0706719412839926e-08, "epoch": 2.863660477453581, "percentage": 95.54, "elapsed_time": "4:24:32", "remaining_time": "0:12:20"}
271
+ {"current_steps": 2710, "total_steps": 2826, "loss": 0.1941, "lr": 2.6069489415703197e-08, "epoch": 2.8742705570291776, "percentage": 95.9, "elapsed_time": "4:25:32", "remaining_time": "0:11:21"}
272
+ {"current_steps": 2720, "total_steps": 2826, "loss": 0.2029, "lr": 2.18098220448168e-08, "epoch": 2.8848806366047746, "percentage": 96.25, "elapsed_time": "4:26:30", "remaining_time": "0:10:23"}
273
+ {"current_steps": 2730, "total_steps": 2826, "loss": 0.2062, "lr": 1.7928367395725066e-08, "epoch": 2.8954907161803716, "percentage": 96.6, "elapsed_time": "4:27:27", "remaining_time": "0:09:24"}
274
+ {"current_steps": 2740, "total_steps": 2826, "loss": 0.1873, "lr": 1.442571784246699e-08, "epoch": 2.906100795755968, "percentage": 96.96, "elapsed_time": "4:28:27", "remaining_time": "0:08:25"}
275
+ {"current_steps": 2750, "total_steps": 2826, "loss": 0.1653, "lr": 1.1302407947173522e-08, "epoch": 2.916710875331565, "percentage": 97.31, "elapsed_time": "4:29:27", "remaining_time": "0:07:26"}
276
+ {"current_steps": 2760, "total_steps": 2826, "loss": 0.1743, "lr": 8.558914378481996e-09, "epoch": 2.9273209549071617, "percentage": 97.66, "elapsed_time": "4:30:28", "remaining_time": "0:06:28"}
277
+ {"current_steps": 2770, "total_steps": 2826, "loss": 0.1821, "lr": 6.195655838790726e-09, "epoch": 2.9379310344827587, "percentage": 98.02, "elapsed_time": "4:31:26", "remaining_time": "0:05:29"}
278
+ {"current_steps": 2780, "total_steps": 2826, "loss": 0.1954, "lr": 4.212993000356491e-09, "epoch": 2.9485411140583553, "percentage": 98.37, "elapsed_time": "4:32:26", "remaining_time": "0:04:30"}
279
+ {"current_steps": 2790, "total_steps": 2826, "loss": 0.1925, "lr": 2.611228450250802e-09, "epoch": 2.9591511936339523, "percentage": 98.73, "elapsed_time": "4:33:21", "remaining_time": "0:03:31"}
280
+ {"current_steps": 2800, "total_steps": 2826, "loss": 0.1805, "lr": 1.3906066441798927e-09, "epoch": 2.9697612732095493, "percentage": 99.08, "elapsed_time": "4:34:23", "remaining_time": "0:02:32"}
281
+ {"current_steps": 2810, "total_steps": 2826, "loss": 0.2084, "lr": 5.513138691767839e-10, "epoch": 2.980371352785146, "percentage": 99.43, "elapsed_time": "4:35:21", "remaining_time": "0:01:34"}
282
+ {"current_steps": 2820, "total_steps": 2826, "loss": 0.2115, "lr": 9.347821517069477e-11, "epoch": 2.9909814323607424, "percentage": 99.79, "elapsed_time": "4:36:24", "remaining_time": "0:00:35"}
283
+ {"current_steps": 2826, "total_steps": 2826, "epoch": 2.9973474801061006, "percentage": 100.0, "elapsed_time": "4:37:51", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2017 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 2.9973474801061006,
6
+ "eval_steps": 500,
7
+ "global_step": 2826,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.010610079575596816,
14
+ "grad_norm": 4.634474754333496,
15
+ "learning_rate": 1.5901060070671379e-07,
16
+ "loss": 0.741,
17
+ "step": 10
18
+ },
19
+ {
20
+ "epoch": 0.021220159151193633,
21
+ "grad_norm": 2.9002726078033447,
22
+ "learning_rate": 3.356890459363958e-07,
23
+ "loss": 0.5551,
24
+ "step": 20
25
+ },
26
+ {
27
+ "epoch": 0.03183023872679045,
28
+ "grad_norm": 4.242003917694092,
29
+ "learning_rate": 5.123674911660778e-07,
30
+ "loss": 0.6185,
31
+ "step": 30
32
+ },
33
+ {
34
+ "epoch": 0.042440318302387266,
35
+ "grad_norm": 3.8156638145446777,
36
+ "learning_rate": 6.890459363957598e-07,
37
+ "loss": 0.6358,
38
+ "step": 40
39
+ },
40
+ {
41
+ "epoch": 0.05305039787798409,
42
+ "grad_norm": 3.047624349594116,
43
+ "learning_rate": 8.657243816254418e-07,
44
+ "loss": 0.5922,
45
+ "step": 50
46
+ },
47
+ {
48
+ "epoch": 0.0636604774535809,
49
+ "grad_norm": 2.2943954467773438,
50
+ "learning_rate": 1.0424028268551239e-06,
51
+ "loss": 0.6282,
52
+ "step": 60
53
+ },
54
+ {
55
+ "epoch": 0.07427055702917772,
56
+ "grad_norm": 2.831937551498413,
57
+ "learning_rate": 1.2190812720848057e-06,
58
+ "loss": 0.5836,
59
+ "step": 70
60
+ },
61
+ {
62
+ "epoch": 0.08488063660477453,
63
+ "grad_norm": 3.941297769546509,
64
+ "learning_rate": 1.3957597173144876e-06,
65
+ "loss": 0.5836,
66
+ "step": 80
67
+ },
68
+ {
69
+ "epoch": 0.09549071618037135,
70
+ "grad_norm": 2.4598379135131836,
71
+ "learning_rate": 1.5724381625441699e-06,
72
+ "loss": 0.4983,
73
+ "step": 90
74
+ },
75
+ {
76
+ "epoch": 0.10610079575596817,
77
+ "grad_norm": 2.533829927444458,
78
+ "learning_rate": 1.7491166077738517e-06,
79
+ "loss": 0.6057,
80
+ "step": 100
81
+ },
82
+ {
83
+ "epoch": 0.11671087533156499,
84
+ "grad_norm": 2.412334442138672,
85
+ "learning_rate": 1.925795053003534e-06,
86
+ "loss": 0.5135,
87
+ "step": 110
88
+ },
89
+ {
90
+ "epoch": 0.1273209549071618,
91
+ "grad_norm": 2.7505877017974854,
92
+ "learning_rate": 2.1024734982332157e-06,
93
+ "loss": 0.4844,
94
+ "step": 120
95
+ },
96
+ {
97
+ "epoch": 0.13793103448275862,
98
+ "grad_norm": 2.701307535171509,
99
+ "learning_rate": 2.279151943462898e-06,
100
+ "loss": 0.5386,
101
+ "step": 130
102
+ },
103
+ {
104
+ "epoch": 0.14854111405835543,
105
+ "grad_norm": 2.8261961936950684,
106
+ "learning_rate": 2.45583038869258e-06,
107
+ "loss": 0.4774,
108
+ "step": 140
109
+ },
110
+ {
111
+ "epoch": 0.15915119363395225,
112
+ "grad_norm": 2.4490256309509277,
113
+ "learning_rate": 2.6325088339222617e-06,
114
+ "loss": 0.5035,
115
+ "step": 150
116
+ },
117
+ {
118
+ "epoch": 0.16976127320954906,
119
+ "grad_norm": 2.418158769607544,
120
+ "learning_rate": 2.8091872791519436e-06,
121
+ "loss": 0.4897,
122
+ "step": 160
123
+ },
124
+ {
125
+ "epoch": 0.18037135278514588,
126
+ "grad_norm": 3.5972161293029785,
127
+ "learning_rate": 2.985865724381626e-06,
128
+ "loss": 0.5196,
129
+ "step": 170
130
+ },
131
+ {
132
+ "epoch": 0.1909814323607427,
133
+ "grad_norm": 2.814927577972412,
134
+ "learning_rate": 3.162544169611308e-06,
135
+ "loss": 0.4791,
136
+ "step": 180
137
+ },
138
+ {
139
+ "epoch": 0.20159151193633953,
140
+ "grad_norm": 2.6151270866394043,
141
+ "learning_rate": 3.3392226148409896e-06,
142
+ "loss": 0.5024,
143
+ "step": 190
144
+ },
145
+ {
146
+ "epoch": 0.21220159151193635,
147
+ "grad_norm": 2.8331387042999268,
148
+ "learning_rate": 3.5159010600706715e-06,
149
+ "loss": 0.5781,
150
+ "step": 200
151
+ },
152
+ {
153
+ "epoch": 0.22281167108753316,
154
+ "grad_norm": 2.433027744293213,
155
+ "learning_rate": 3.6925795053003538e-06,
156
+ "loss": 0.4186,
157
+ "step": 210
158
+ },
159
+ {
160
+ "epoch": 0.23342175066312998,
161
+ "grad_norm": 2.671696186065674,
162
+ "learning_rate": 3.869257950530036e-06,
163
+ "loss": 0.4819,
164
+ "step": 220
165
+ },
166
+ {
167
+ "epoch": 0.2440318302387268,
168
+ "grad_norm": 2.5337982177734375,
169
+ "learning_rate": 4.045936395759718e-06,
170
+ "loss": 0.547,
171
+ "step": 230
172
+ },
173
+ {
174
+ "epoch": 0.2546419098143236,
175
+ "grad_norm": 2.2034990787506104,
176
+ "learning_rate": 4.222614840989399e-06,
177
+ "loss": 0.5603,
178
+ "step": 240
179
+ },
180
+ {
181
+ "epoch": 0.26525198938992045,
182
+ "grad_norm": 2.2893121242523193,
183
+ "learning_rate": 4.399293286219082e-06,
184
+ "loss": 0.4483,
185
+ "step": 250
186
+ },
187
+ {
188
+ "epoch": 0.27586206896551724,
189
+ "grad_norm": 1.8757219314575195,
190
+ "learning_rate": 4.575971731448763e-06,
191
+ "loss": 0.5178,
192
+ "step": 260
193
+ },
194
+ {
195
+ "epoch": 0.2864721485411141,
196
+ "grad_norm": 2.3748602867126465,
197
+ "learning_rate": 4.752650176678445e-06,
198
+ "loss": 0.5264,
199
+ "step": 270
200
+ },
201
+ {
202
+ "epoch": 0.29708222811671087,
203
+ "grad_norm": 3.0481033325195312,
204
+ "learning_rate": 4.929328621908128e-06,
205
+ "loss": 0.5124,
206
+ "step": 280
207
+ },
208
+ {
209
+ "epoch": 0.3076923076923077,
210
+ "grad_norm": 2.682847023010254,
211
+ "learning_rate": 4.99993132201408e-06,
212
+ "loss": 0.4977,
213
+ "step": 290
214
+ },
215
+ {
216
+ "epoch": 0.3183023872679045,
217
+ "grad_norm": 2.472842216491699,
218
+ "learning_rate": 4.9995116368759e-06,
219
+ "loss": 0.5005,
220
+ "step": 300
221
+ },
222
+ {
223
+ "epoch": 0.32891246684350134,
224
+ "grad_norm": 2.582815647125244,
225
+ "learning_rate": 4.998710485009401e-06,
226
+ "loss": 0.4857,
227
+ "step": 310
228
+ },
229
+ {
230
+ "epoch": 0.3395225464190981,
231
+ "grad_norm": 2.3572824001312256,
232
+ "learning_rate": 4.99752798868358e-06,
233
+ "loss": 0.4637,
234
+ "step": 320
235
+ },
236
+ {
237
+ "epoch": 0.35013262599469497,
238
+ "grad_norm": 2.3432295322418213,
239
+ "learning_rate": 4.99596432836689e-06,
240
+ "loss": 0.4775,
241
+ "step": 330
242
+ },
243
+ {
244
+ "epoch": 0.36074270557029176,
245
+ "grad_norm": 2.7486777305603027,
246
+ "learning_rate": 4.994019742699705e-06,
247
+ "loss": 0.5779,
248
+ "step": 340
249
+ },
250
+ {
251
+ "epoch": 0.3713527851458886,
252
+ "grad_norm": 2.3831562995910645,
253
+ "learning_rate": 4.991694528457891e-06,
254
+ "loss": 0.5057,
255
+ "step": 350
256
+ },
257
+ {
258
+ "epoch": 0.3819628647214854,
259
+ "grad_norm": 2.5414721965789795,
260
+ "learning_rate": 4.988989040507518e-06,
261
+ "loss": 0.5313,
262
+ "step": 360
263
+ },
264
+ {
265
+ "epoch": 0.3925729442970822,
266
+ "grad_norm": 2.4140472412109375,
267
+ "learning_rate": 4.985903691750697e-06,
268
+ "loss": 0.4441,
269
+ "step": 370
270
+ },
271
+ {
272
+ "epoch": 0.40318302387267907,
273
+ "grad_norm": 2.4907593727111816,
274
+ "learning_rate": 4.982438953062572e-06,
275
+ "loss": 0.4778,
276
+ "step": 380
277
+ },
278
+ {
279
+ "epoch": 0.41379310344827586,
280
+ "grad_norm": 2.579932928085327,
281
+ "learning_rate": 4.978595353219449e-06,
282
+ "loss": 0.4848,
283
+ "step": 390
284
+ },
285
+ {
286
+ "epoch": 0.4244031830238727,
287
+ "grad_norm": 2.5512266159057617,
288
+ "learning_rate": 4.974373478818098e-06,
289
+ "loss": 0.4891,
290
+ "step": 400
291
+ },
292
+ {
293
+ "epoch": 0.4350132625994695,
294
+ "grad_norm": 2.3293063640594482,
295
+ "learning_rate": 4.969773974186235e-06,
296
+ "loss": 0.4954,
297
+ "step": 410
298
+ },
299
+ {
300
+ "epoch": 0.44562334217506633,
301
+ "grad_norm": 2.6347479820251465,
302
+ "learning_rate": 4.964797541284175e-06,
303
+ "loss": 0.5353,
304
+ "step": 420
305
+ },
306
+ {
307
+ "epoch": 0.4562334217506631,
308
+ "grad_norm": 2.7719151973724365,
309
+ "learning_rate": 4.959444939597712e-06,
310
+ "loss": 0.5726,
311
+ "step": 430
312
+ },
313
+ {
314
+ "epoch": 0.46684350132625996,
315
+ "grad_norm": 2.1757211685180664,
316
+ "learning_rate": 4.953716986022204e-06,
317
+ "loss": 0.5642,
318
+ "step": 440
319
+ },
320
+ {
321
+ "epoch": 0.47745358090185674,
322
+ "grad_norm": 2.432244300842285,
323
+ "learning_rate": 4.947614554737904e-06,
324
+ "loss": 0.4429,
325
+ "step": 450
326
+ },
327
+ {
328
+ "epoch": 0.4880636604774536,
329
+ "grad_norm": 1.972844123840332,
330
+ "learning_rate": 4.941138577076538e-06,
331
+ "loss": 0.4683,
332
+ "step": 460
333
+ },
334
+ {
335
+ "epoch": 0.4986737400530504,
336
+ "grad_norm": 2.484992742538452,
337
+ "learning_rate": 4.934290041379182e-06,
338
+ "loss": 0.4385,
339
+ "step": 470
340
+ },
341
+ {
342
+ "epoch": 0.5092838196286472,
343
+ "grad_norm": 2.0424418449401855,
344
+ "learning_rate": 4.92706999284541e-06,
345
+ "loss": 0.4935,
346
+ "step": 480
347
+ },
348
+ {
349
+ "epoch": 0.519893899204244,
350
+ "grad_norm": 2.3754308223724365,
351
+ "learning_rate": 4.9194795333737925e-06,
352
+ "loss": 0.4548,
353
+ "step": 490
354
+ },
355
+ {
356
+ "epoch": 0.5305039787798409,
357
+ "grad_norm": 3.0801432132720947,
358
+ "learning_rate": 4.911519821393718e-06,
359
+ "loss": 0.5486,
360
+ "step": 500
361
+ },
362
+ {
363
+ "epoch": 0.5411140583554377,
364
+ "grad_norm": 2.2712507247924805,
365
+ "learning_rate": 4.9031920716886035e-06,
366
+ "loss": 0.5121,
367
+ "step": 510
368
+ },
369
+ {
370
+ "epoch": 0.5517241379310345,
371
+ "grad_norm": 2.0000548362731934,
372
+ "learning_rate": 4.894497555210499e-06,
373
+ "loss": 0.4495,
374
+ "step": 520
375
+ },
376
+ {
377
+ "epoch": 0.5623342175066313,
378
+ "grad_norm": 2.590303897857666,
379
+ "learning_rate": 4.8854375988861134e-06,
380
+ "loss": 0.5028,
381
+ "step": 530
382
+ },
383
+ {
384
+ "epoch": 0.5729442970822282,
385
+ "grad_norm": 2.377298355102539,
386
+ "learning_rate": 4.87601358541431e-06,
387
+ "loss": 0.5193,
388
+ "step": 540
389
+ },
390
+ {
391
+ "epoch": 0.583554376657825,
392
+ "grad_norm": 2.966008186340332,
393
+ "learning_rate": 4.8662269530550825e-06,
394
+ "loss": 0.545,
395
+ "step": 550
396
+ },
397
+ {
398
+ "epoch": 0.5941644562334217,
399
+ "grad_norm": 2.250293254852295,
400
+ "learning_rate": 4.856079195410046e-06,
401
+ "loss": 0.5219,
402
+ "step": 560
403
+ },
404
+ {
405
+ "epoch": 0.6047745358090185,
406
+ "grad_norm": 2.437361240386963,
407
+ "learning_rate": 4.845571861194501e-06,
408
+ "loss": 0.4725,
409
+ "step": 570
410
+ },
411
+ {
412
+ "epoch": 0.6153846153846154,
413
+ "grad_norm": 2.435994863510132,
414
+ "learning_rate": 4.834706554001065e-06,
415
+ "loss": 0.4232,
416
+ "step": 580
417
+ },
418
+ {
419
+ "epoch": 0.6259946949602122,
420
+ "grad_norm": 2.705902099609375,
421
+ "learning_rate": 4.823484932054937e-06,
422
+ "loss": 0.4834,
423
+ "step": 590
424
+ },
425
+ {
426
+ "epoch": 0.636604774535809,
427
+ "grad_norm": 2.1471517086029053,
428
+ "learning_rate": 4.811908707960832e-06,
429
+ "loss": 0.5302,
430
+ "step": 600
431
+ },
432
+ {
433
+ "epoch": 0.6472148541114059,
434
+ "grad_norm": 2.0760443210601807,
435
+ "learning_rate": 4.799979648441602e-06,
436
+ "loss": 0.494,
437
+ "step": 610
438
+ },
439
+ {
440
+ "epoch": 0.6578249336870027,
441
+ "grad_norm": 2.334944009780884,
442
+ "learning_rate": 4.787699574068611e-06,
443
+ "loss": 0.487,
444
+ "step": 620
445
+ },
446
+ {
447
+ "epoch": 0.6684350132625995,
448
+ "grad_norm": 2.3444855213165283,
449
+ "learning_rate": 4.775070358983881e-06,
450
+ "loss": 0.4911,
451
+ "step": 630
452
+ },
453
+ {
454
+ "epoch": 0.6790450928381963,
455
+ "grad_norm": 2.127737045288086,
456
+ "learning_rate": 4.7620939306140696e-06,
457
+ "loss": 0.4744,
458
+ "step": 640
459
+ },
460
+ {
461
+ "epoch": 0.6896551724137931,
462
+ "grad_norm": 2.2132568359375,
463
+ "learning_rate": 4.748772269376312e-06,
464
+ "loss": 0.4789,
465
+ "step": 650
466
+ },
467
+ {
468
+ "epoch": 0.7002652519893899,
469
+ "grad_norm": 1.9452372789382935,
470
+ "learning_rate": 4.735107408375977e-06,
471
+ "loss": 0.488,
472
+ "step": 660
473
+ },
474
+ {
475
+ "epoch": 0.7108753315649867,
476
+ "grad_norm": 2.7268893718719482,
477
+ "learning_rate": 4.721101433096381e-06,
478
+ "loss": 0.4462,
479
+ "step": 670
480
+ },
481
+ {
482
+ "epoch": 0.7214854111405835,
483
+ "grad_norm": 2.1095452308654785,
484
+ "learning_rate": 4.706756481080511e-06,
485
+ "loss": 0.5087,
486
+ "step": 680
487
+ },
488
+ {
489
+ "epoch": 0.7320954907161804,
490
+ "grad_norm": 2.278555154800415,
491
+ "learning_rate": 4.692074741604795e-06,
492
+ "loss": 0.5304,
493
+ "step": 690
494
+ },
495
+ {
496
+ "epoch": 0.7427055702917772,
497
+ "grad_norm": 2.455960512161255,
498
+ "learning_rate": 4.677058455344989e-06,
499
+ "loss": 0.5177,
500
+ "step": 700
501
+ },
502
+ {
503
+ "epoch": 0.753315649867374,
504
+ "grad_norm": 2.1136856079101562,
505
+ "learning_rate": 4.661709914034209e-06,
506
+ "loss": 0.4841,
507
+ "step": 710
508
+ },
509
+ {
510
+ "epoch": 0.7639257294429708,
511
+ "grad_norm": 2.296614646911621,
512
+ "learning_rate": 4.646031460113175e-06,
513
+ "loss": 0.4544,
514
+ "step": 720
515
+ },
516
+ {
517
+ "epoch": 0.7745358090185677,
518
+ "grad_norm": 1.8733782768249512,
519
+ "learning_rate": 4.630025486372715e-06,
520
+ "loss": 0.4715,
521
+ "step": 730
522
+ },
523
+ {
524
+ "epoch": 0.7851458885941645,
525
+ "grad_norm": 2.526837110519409,
526
+ "learning_rate": 4.613694435588589e-06,
527
+ "loss": 0.4824,
528
+ "step": 740
529
+ },
530
+ {
531
+ "epoch": 0.7957559681697612,
532
+ "grad_norm": 2.2026150226593018,
533
+ "learning_rate": 4.597040800148679e-06,
534
+ "loss": 0.4852,
535
+ "step": 750
536
+ },
537
+ {
538
+ "epoch": 0.8063660477453581,
539
+ "grad_norm": 2.214277744293213,
540
+ "learning_rate": 4.580067121672607e-06,
541
+ "loss": 0.4134,
542
+ "step": 760
543
+ },
544
+ {
545
+ "epoch": 0.8169761273209549,
546
+ "grad_norm": 2.623305559158325,
547
+ "learning_rate": 4.562775990623847e-06,
548
+ "loss": 0.4493,
549
+ "step": 770
550
+ },
551
+ {
552
+ "epoch": 0.8275862068965517,
553
+ "grad_norm": 2.9433794021606445,
554
+ "learning_rate": 4.5451700459143735e-06,
555
+ "loss": 0.5255,
556
+ "step": 780
557
+ },
558
+ {
559
+ "epoch": 0.8381962864721485,
560
+ "grad_norm": 2.143739938735962,
561
+ "learning_rate": 4.527251974501923e-06,
562
+ "loss": 0.4503,
563
+ "step": 790
564
+ },
565
+ {
566
+ "epoch": 0.8488063660477454,
567
+ "grad_norm": 2.1592986583709717,
568
+ "learning_rate": 4.509024510979917e-06,
569
+ "loss": 0.4636,
570
+ "step": 800
571
+ },
572
+ {
573
+ "epoch": 0.8594164456233422,
574
+ "grad_norm": 2.2622759342193604,
575
+ "learning_rate": 4.4904904371601176e-06,
576
+ "loss": 0.4685,
577
+ "step": 810
578
+ },
579
+ {
580
+ "epoch": 0.870026525198939,
581
+ "grad_norm": 2.3408522605895996,
582
+ "learning_rate": 4.4716525816480816e-06,
583
+ "loss": 0.5248,
584
+ "step": 820
585
+ },
586
+ {
587
+ "epoch": 0.8806366047745358,
588
+ "grad_norm": 2.5351459980010986,
589
+ "learning_rate": 4.4525138194114644e-06,
590
+ "loss": 0.4747,
591
+ "step": 830
592
+ },
593
+ {
594
+ "epoch": 0.8912466843501327,
595
+ "grad_norm": 2.4038591384887695,
596
+ "learning_rate": 4.4330770713412555e-06,
597
+ "loss": 0.4198,
598
+ "step": 840
599
+ },
600
+ {
601
+ "epoch": 0.9018567639257294,
602
+ "grad_norm": 2.2719292640686035,
603
+ "learning_rate": 4.413345303805996e-06,
604
+ "loss": 0.4545,
605
+ "step": 850
606
+ },
607
+ {
608
+ "epoch": 0.9124668435013262,
609
+ "grad_norm": 3.1209301948547363,
610
+ "learning_rate": 4.393321528199072e-06,
611
+ "loss": 0.5003,
612
+ "step": 860
613
+ },
614
+ {
615
+ "epoch": 0.9230769230769231,
616
+ "grad_norm": 2.414945125579834,
617
+ "learning_rate": 4.373008800479118e-06,
618
+ "loss": 0.472,
619
+ "step": 870
620
+ },
621
+ {
622
+ "epoch": 0.9336870026525199,
623
+ "grad_norm": 2.21144437789917,
624
+ "learning_rate": 4.352410220703629e-06,
625
+ "loss": 0.4661,
626
+ "step": 880
627
+ },
628
+ {
629
+ "epoch": 0.9442970822281167,
630
+ "grad_norm": 2.210827589035034,
631
+ "learning_rate": 4.331528932555844e-06,
632
+ "loss": 0.4614,
633
+ "step": 890
634
+ },
635
+ {
636
+ "epoch": 0.9549071618037135,
637
+ "grad_norm": 2.403038740158081,
638
+ "learning_rate": 4.3103681228649626e-06,
639
+ "loss": 0.4623,
640
+ "step": 900
641
+ },
642
+ {
643
+ "epoch": 0.9655172413793104,
644
+ "grad_norm": 2.588114023208618,
645
+ "learning_rate": 4.288931021119788e-06,
646
+ "loss": 0.4902,
647
+ "step": 910
648
+ },
649
+ {
650
+ "epoch": 0.9761273209549072,
651
+ "grad_norm": 2.288691997528076,
652
+ "learning_rate": 4.267220898975848e-06,
653
+ "loss": 0.5047,
654
+ "step": 920
655
+ },
656
+ {
657
+ "epoch": 0.986737400530504,
658
+ "grad_norm": 2.2487804889678955,
659
+ "learning_rate": 4.245241069756092e-06,
660
+ "loss": 0.5358,
661
+ "step": 930
662
+ },
663
+ {
664
+ "epoch": 0.9973474801061007,
665
+ "grad_norm": 2.5266008377075195,
666
+ "learning_rate": 4.222994887945219e-06,
667
+ "loss": 0.4928,
668
+ "step": 940
669
+ },
670
+ {
671
+ "epoch": 1.0074270557029177,
672
+ "grad_norm": 2.5962352752685547,
673
+ "learning_rate": 4.20048574867773e-06,
674
+ "loss": 0.3963,
675
+ "step": 950
676
+ },
677
+ {
678
+ "epoch": 1.0180371352785147,
679
+ "grad_norm": 2.707613229751587,
680
+ "learning_rate": 4.1777170872197725e-06,
681
+ "loss": 0.3125,
682
+ "step": 960
683
+ },
684
+ {
685
+ "epoch": 1.0286472148541115,
686
+ "grad_norm": 2.4237964153289795,
687
+ "learning_rate": 4.1546923784448646e-06,
688
+ "loss": 0.3457,
689
+ "step": 970
690
+ },
691
+ {
692
+ "epoch": 1.0392572944297083,
693
+ "grad_norm": 1.6531928777694702,
694
+ "learning_rate": 4.1314151363035705e-06,
695
+ "loss": 0.3029,
696
+ "step": 980
697
+ },
698
+ {
699
+ "epoch": 1.049867374005305,
700
+ "grad_norm": 2.1669981479644775,
701
+ "learning_rate": 4.1078889132872145e-06,
702
+ "loss": 0.3289,
703
+ "step": 990
704
+ },
705
+ {
706
+ "epoch": 1.0604774535809018,
707
+ "grad_norm": 2.445012092590332,
708
+ "learning_rate": 4.084117299885712e-06,
709
+ "loss": 0.3234,
710
+ "step": 1000
711
+ },
712
+ {
713
+ "epoch": 1.0710875331564986,
714
+ "grad_norm": 2.0615527629852295,
715
+ "learning_rate": 4.060103924039599e-06,
716
+ "loss": 0.3139,
717
+ "step": 1010
718
+ },
719
+ {
720
+ "epoch": 1.0816976127320954,
721
+ "grad_norm": 1.990400791168213,
722
+ "learning_rate": 4.035852450586352e-06,
723
+ "loss": 0.3144,
724
+ "step": 1020
725
+ },
726
+ {
727
+ "epoch": 1.0923076923076924,
728
+ "grad_norm": 2.5510122776031494,
729
+ "learning_rate": 4.011366580701073e-06,
730
+ "loss": 0.323,
731
+ "step": 1030
732
+ },
733
+ {
734
+ "epoch": 1.1029177718832892,
735
+ "grad_norm": 2.462083101272583,
736
+ "learning_rate": 3.9866500513316274e-06,
737
+ "loss": 0.3694,
738
+ "step": 1040
739
+ },
740
+ {
741
+ "epoch": 1.113527851458886,
742
+ "grad_norm": 2.4385085105895996,
743
+ "learning_rate": 3.961706634628323e-06,
744
+ "loss": 0.3351,
745
+ "step": 1050
746
+ },
747
+ {
748
+ "epoch": 1.1241379310344828,
749
+ "grad_norm": 1.7553578615188599,
750
+ "learning_rate": 3.936540137368222e-06,
751
+ "loss": 0.3459,
752
+ "step": 1060
753
+ },
754
+ {
755
+ "epoch": 1.1347480106100796,
756
+ "grad_norm": 2.513950824737549,
757
+ "learning_rate": 3.911154400374159e-06,
758
+ "loss": 0.3186,
759
+ "step": 1070
760
+ },
761
+ {
762
+ "epoch": 1.1453580901856764,
763
+ "grad_norm": 2.6273515224456787,
764
+ "learning_rate": 3.885553297928573e-06,
765
+ "loss": 0.3333,
766
+ "step": 1080
767
+ },
768
+ {
769
+ "epoch": 1.1559681697612731,
770
+ "grad_norm": 2.4155592918395996,
771
+ "learning_rate": 3.859740737182222e-06,
772
+ "loss": 0.3137,
773
+ "step": 1090
774
+ },
775
+ {
776
+ "epoch": 1.16657824933687,
777
+ "grad_norm": 2.719611644744873,
778
+ "learning_rate": 3.833720657557894e-06,
779
+ "loss": 0.3426,
780
+ "step": 1100
781
+ },
782
+ {
783
+ "epoch": 1.1771883289124667,
784
+ "grad_norm": 2.5729358196258545,
785
+ "learning_rate": 3.807497030149181e-06,
786
+ "loss": 0.3709,
787
+ "step": 1110
788
+ },
789
+ {
790
+ "epoch": 1.1877984084880637,
791
+ "grad_norm": 1.9626141786575317,
792
+ "learning_rate": 3.7810738571144257e-06,
793
+ "loss": 0.329,
794
+ "step": 1120
795
+ },
796
+ {
797
+ "epoch": 1.1984084880636605,
798
+ "grad_norm": 2.601951837539673,
799
+ "learning_rate": 3.7544551710659296e-06,
800
+ "loss": 0.305,
801
+ "step": 1130
802
+ },
803
+ {
804
+ "epoch": 1.2090185676392573,
805
+ "grad_norm": 2.4118540287017822,
806
+ "learning_rate": 3.7276450344545024e-06,
807
+ "loss": 0.3449,
808
+ "step": 1140
809
+ },
810
+ {
811
+ "epoch": 1.219628647214854,
812
+ "grad_norm": 2.5080604553222656,
813
+ "learning_rate": 3.7006475389494723e-06,
814
+ "loss": 0.3403,
815
+ "step": 1150
816
+ },
817
+ {
818
+ "epoch": 1.2302387267904509,
819
+ "grad_norm": 2.6882951259613037,
820
+ "learning_rate": 3.6734668048142273e-06,
821
+ "loss": 0.3342,
822
+ "step": 1160
823
+ },
824
+ {
825
+ "epoch": 1.2408488063660477,
826
+ "grad_norm": 2.3755247592926025,
827
+ "learning_rate": 3.646106980277394e-06,
828
+ "loss": 0.3589,
829
+ "step": 1170
830
+ },
831
+ {
832
+ "epoch": 1.2514588859416444,
833
+ "grad_norm": 2.4138166904449463,
834
+ "learning_rate": 3.618572240899748e-06,
835
+ "loss": 0.3447,
836
+ "step": 1180
837
+ },
838
+ {
839
+ "epoch": 1.2620689655172415,
840
+ "grad_norm": 2.6930105686187744,
841
+ "learning_rate": 3.5908667889369603e-06,
842
+ "loss": 0.3787,
843
+ "step": 1190
844
+ },
845
+ {
846
+ "epoch": 1.2726790450928382,
847
+ "grad_norm": 2.732795476913452,
848
+ "learning_rate": 3.5629948526982563e-06,
849
+ "loss": 0.3376,
850
+ "step": 1200
851
+ },
852
+ {
853
+ "epoch": 1.283289124668435,
854
+ "grad_norm": 1.8468087911605835,
855
+ "learning_rate": 3.534960685901111e-06,
856
+ "loss": 0.3461,
857
+ "step": 1210
858
+ },
859
+ {
860
+ "epoch": 1.2938992042440318,
861
+ "grad_norm": 2.3408284187316895,
862
+ "learning_rate": 3.506768567022062e-06,
863
+ "loss": 0.3396,
864
+ "step": 1220
865
+ },
866
+ {
867
+ "epoch": 1.3045092838196286,
868
+ "grad_norm": 2.7420434951782227,
869
+ "learning_rate": 3.478422798643737e-06,
870
+ "loss": 0.3364,
871
+ "step": 1230
872
+ },
873
+ {
874
+ "epoch": 1.3151193633952254,
875
+ "grad_norm": 2.634403705596924,
876
+ "learning_rate": 3.4499277067982177e-06,
877
+ "loss": 0.3126,
878
+ "step": 1240
879
+ },
880
+ {
881
+ "epoch": 1.3257294429708222,
882
+ "grad_norm": 2.4217336177825928,
883
+ "learning_rate": 3.421287640306809e-06,
884
+ "loss": 0.3092,
885
+ "step": 1250
886
+ },
887
+ {
888
+ "epoch": 1.3363395225464192,
889
+ "grad_norm": 1.7107937335968018,
890
+ "learning_rate": 3.3925069701163406e-06,
891
+ "loss": 0.3374,
892
+ "step": 1260
893
+ },
894
+ {
895
+ "epoch": 1.346949602122016,
896
+ "grad_norm": 2.1515822410583496,
897
+ "learning_rate": 3.363590088632085e-06,
898
+ "loss": 0.3436,
899
+ "step": 1270
900
+ },
901
+ {
902
+ "epoch": 1.3575596816976128,
903
+ "grad_norm": 2.0105717182159424,
904
+ "learning_rate": 3.334541409047408e-06,
905
+ "loss": 0.3283,
906
+ "step": 1280
907
+ },
908
+ {
909
+ "epoch": 1.3681697612732096,
910
+ "grad_norm": 1.8952791690826416,
911
+ "learning_rate": 3.3053653646702422e-06,
912
+ "loss": 0.358,
913
+ "step": 1290
914
+ },
915
+ {
916
+ "epoch": 1.3787798408488063,
917
+ "grad_norm": 1.8639928102493286,
918
+ "learning_rate": 3.276066408246487e-06,
919
+ "loss": 0.3084,
920
+ "step": 1300
921
+ },
922
+ {
923
+ "epoch": 1.3893899204244031,
924
+ "grad_norm": 2.563251256942749,
925
+ "learning_rate": 3.2466490112804484e-06,
926
+ "loss": 0.3508,
927
+ "step": 1310
928
+ },
929
+ {
930
+ "epoch": 1.4,
931
+ "grad_norm": 2.214616060256958,
932
+ "learning_rate": 3.217117663352417e-06,
933
+ "loss": 0.3215,
934
+ "step": 1320
935
+ },
936
+ {
937
+ "epoch": 1.410610079575597,
938
+ "grad_norm": 1.793468952178955,
939
+ "learning_rate": 3.187476871433478e-06,
940
+ "loss": 0.3193,
941
+ "step": 1330
942
+ },
943
+ {
944
+ "epoch": 1.4212201591511937,
945
+ "grad_norm": 2.204789638519287,
946
+ "learning_rate": 3.1577311591976766e-06,
947
+ "loss": 0.3019,
948
+ "step": 1340
949
+ },
950
+ {
951
+ "epoch": 1.4318302387267905,
952
+ "grad_norm": 2.307568311691284,
953
+ "learning_rate": 3.1278850663316307e-06,
954
+ "loss": 0.3099,
955
+ "step": 1350
956
+ },
957
+ {
958
+ "epoch": 1.4424403183023873,
959
+ "grad_norm": 2.485848903656006,
960
+ "learning_rate": 3.0979431478416987e-06,
961
+ "loss": 0.3085,
962
+ "step": 1360
963
+ },
964
+ {
965
+ "epoch": 1.453050397877984,
966
+ "grad_norm": 1.953053593635559,
967
+ "learning_rate": 3.067909973358811e-06,
968
+ "loss": 0.3211,
969
+ "step": 1370
970
+ },
971
+ {
972
+ "epoch": 1.4636604774535809,
973
+ "grad_norm": 2.2350101470947266,
974
+ "learning_rate": 3.0377901264410673e-06,
975
+ "loss": 0.3329,
976
+ "step": 1380
977
+ },
978
+ {
979
+ "epoch": 1.4742705570291776,
980
+ "grad_norm": 2.542452335357666,
981
+ "learning_rate": 3.0075882038742133e-06,
982
+ "loss": 0.3376,
983
+ "step": 1390
984
+ },
985
+ {
986
+ "epoch": 1.4848806366047747,
987
+ "grad_norm": 2.3203530311584473,
988
+ "learning_rate": 2.9773088149700923e-06,
989
+ "loss": 0.2896,
990
+ "step": 1400
991
+ },
992
+ {
993
+ "epoch": 1.4954907161803712,
994
+ "grad_norm": 1.9708584547042847,
995
+ "learning_rate": 2.9469565808631888e-06,
996
+ "loss": 0.299,
997
+ "step": 1410
998
+ },
999
+ {
1000
+ "epoch": 1.5061007957559682,
1001
+ "grad_norm": 2.63698148727417,
1002
+ "learning_rate": 2.9165361338053683e-06,
1003
+ "loss": 0.3484,
1004
+ "step": 1420
1005
+ },
1006
+ {
1007
+ "epoch": 1.516710875331565,
1008
+ "grad_norm": 2.091648578643799,
1009
+ "learning_rate": 2.886052116458918e-06,
1010
+ "loss": 0.3316,
1011
+ "step": 1430
1012
+ },
1013
+ {
1014
+ "epoch": 1.5273209549071618,
1015
+ "grad_norm": 1.955355167388916,
1016
+ "learning_rate": 2.8555091811880004e-06,
1017
+ "loss": 0.328,
1018
+ "step": 1440
1019
+ },
1020
+ {
1021
+ "epoch": 1.5379310344827586,
1022
+ "grad_norm": 1.6724951267242432,
1023
+ "learning_rate": 2.8249119893486252e-06,
1024
+ "loss": 0.3215,
1025
+ "step": 1450
1026
+ },
1027
+ {
1028
+ "epoch": 1.5485411140583554,
1029
+ "grad_norm": 2.1872570514678955,
1030
+ "learning_rate": 2.7942652105772516e-06,
1031
+ "loss": 0.3118,
1032
+ "step": 1460
1033
+ },
1034
+ {
1035
+ "epoch": 1.5591511936339524,
1036
+ "grad_norm": 3.0710208415985107,
1037
+ "learning_rate": 2.7635735220781214e-06,
1038
+ "loss": 0.2973,
1039
+ "step": 1470
1040
+ },
1041
+ {
1042
+ "epoch": 1.569761273209549,
1043
+ "grad_norm": 2.357663631439209,
1044
+ "learning_rate": 2.7328416079094412e-06,
1045
+ "loss": 0.3423,
1046
+ "step": 1480
1047
+ },
1048
+ {
1049
+ "epoch": 1.580371352785146,
1050
+ "grad_norm": 2.2559144496917725,
1051
+ "learning_rate": 2.7020741582685217e-06,
1052
+ "loss": 0.3211,
1053
+ "step": 1490
1054
+ },
1055
+ {
1056
+ "epoch": 1.5909814323607427,
1057
+ "grad_norm": 2.0730817317962646,
1058
+ "learning_rate": 2.6712758687759706e-06,
1059
+ "loss": 0.2733,
1060
+ "step": 1500
1061
+ },
1062
+ {
1063
+ "epoch": 1.6015915119363395,
1064
+ "grad_norm": 2.6119141578674316,
1065
+ "learning_rate": 2.6404514397590657e-06,
1066
+ "loss": 0.338,
1067
+ "step": 1510
1068
+ },
1069
+ {
1070
+ "epoch": 1.6122015915119363,
1071
+ "grad_norm": 2.315875768661499,
1072
+ "learning_rate": 2.6096055755344113e-06,
1073
+ "loss": 0.3124,
1074
+ "step": 1520
1075
+ },
1076
+ {
1077
+ "epoch": 1.622811671087533,
1078
+ "grad_norm": 2.2880892753601074,
1079
+ "learning_rate": 2.578742983689973e-06,
1080
+ "loss": 0.3538,
1081
+ "step": 1530
1082
+ },
1083
+ {
1084
+ "epoch": 1.6334217506631301,
1085
+ "grad_norm": 2.2615041732788086,
1086
+ "learning_rate": 2.547868374366631e-06,
1087
+ "loss": 0.3353,
1088
+ "step": 1540
1089
+ },
1090
+ {
1091
+ "epoch": 1.6440318302387267,
1092
+ "grad_norm": 1.9062315225601196,
1093
+ "learning_rate": 2.5169864595393295e-06,
1094
+ "loss": 0.302,
1095
+ "step": 1550
1096
+ },
1097
+ {
1098
+ "epoch": 1.6546419098143237,
1099
+ "grad_norm": 2.7016942501068115,
1100
+ "learning_rate": 2.4861019522979537e-06,
1101
+ "loss": 0.3124,
1102
+ "step": 1560
1103
+ },
1104
+ {
1105
+ "epoch": 1.6652519893899205,
1106
+ "grad_norm": 2.4618184566497803,
1107
+ "learning_rate": 2.455219566128034e-06,
1108
+ "loss": 0.3497,
1109
+ "step": 1570
1110
+ },
1111
+ {
1112
+ "epoch": 1.6758620689655173,
1113
+ "grad_norm": 2.8924951553344727,
1114
+ "learning_rate": 2.4243440141913905e-06,
1115
+ "loss": 0.3233,
1116
+ "step": 1580
1117
+ },
1118
+ {
1119
+ "epoch": 1.686472148541114,
1120
+ "grad_norm": 2.32255482673645,
1121
+ "learning_rate": 2.393480008606825e-06,
1122
+ "loss": 0.3067,
1123
+ "step": 1590
1124
+ },
1125
+ {
1126
+ "epoch": 1.6970822281167108,
1127
+ "grad_norm": 1.8984359502792358,
1128
+ "learning_rate": 2.3626322597309774e-06,
1129
+ "loss": 0.2893,
1130
+ "step": 1600
1131
+ },
1132
+ {
1133
+ "epoch": 1.7076923076923078,
1134
+ "grad_norm": 1.8360289335250854,
1135
+ "learning_rate": 2.331805475439445e-06,
1136
+ "loss": 0.2825,
1137
+ "step": 1610
1138
+ },
1139
+ {
1140
+ "epoch": 1.7183023872679044,
1141
+ "grad_norm": 2.331998109817505,
1142
+ "learning_rate": 2.3010043604082824e-06,
1143
+ "loss": 0.3379,
1144
+ "step": 1620
1145
+ },
1146
+ {
1147
+ "epoch": 1.7289124668435014,
1148
+ "grad_norm": 2.3304574489593506,
1149
+ "learning_rate": 2.2702336153959925e-06,
1150
+ "loss": 0.301,
1151
+ "step": 1630
1152
+ },
1153
+ {
1154
+ "epoch": 1.739522546419098,
1155
+ "grad_norm": 2.534090518951416,
1156
+ "learning_rate": 2.2394979365261134e-06,
1157
+ "loss": 0.404,
1158
+ "step": 1640
1159
+ },
1160
+ {
1161
+ "epoch": 1.750132625994695,
1162
+ "grad_norm": 2.273122549057007,
1163
+ "learning_rate": 2.208802014570507e-06,
1164
+ "loss": 0.3242,
1165
+ "step": 1650
1166
+ },
1167
+ {
1168
+ "epoch": 1.7607427055702918,
1169
+ "grad_norm": 1.8859643936157227,
1170
+ "learning_rate": 2.1781505342334775e-06,
1171
+ "loss": 0.3152,
1172
+ "step": 1660
1173
+ },
1174
+ {
1175
+ "epoch": 1.7713527851458886,
1176
+ "grad_norm": 2.567715644836426,
1177
+ "learning_rate": 2.147548173436805e-06,
1178
+ "loss": 0.3302,
1179
+ "step": 1670
1180
+ },
1181
+ {
1182
+ "epoch": 1.7819628647214856,
1183
+ "grad_norm": 2.7930519580841064,
1184
+ "learning_rate": 2.116999602605814e-06,
1185
+ "loss": 0.293,
1186
+ "step": 1680
1187
+ },
1188
+ {
1189
+ "epoch": 1.7925729442970821,
1190
+ "grad_norm": 2.646296262741089,
1191
+ "learning_rate": 2.086509483956594e-06,
1192
+ "loss": 0.2683,
1193
+ "step": 1690
1194
+ },
1195
+ {
1196
+ "epoch": 1.8031830238726791,
1197
+ "grad_norm": 2.3010053634643555,
1198
+ "learning_rate": 2.056082470784469e-06,
1199
+ "loss": 0.313,
1200
+ "step": 1700
1201
+ },
1202
+ {
1203
+ "epoch": 1.8137931034482757,
1204
+ "grad_norm": 2.3864669799804688,
1205
+ "learning_rate": 2.0257232067538213e-06,
1206
+ "loss": 0.262,
1207
+ "step": 1710
1208
+ },
1209
+ {
1210
+ "epoch": 1.8244031830238727,
1211
+ "grad_norm": 2.63028883934021,
1212
+ "learning_rate": 1.9954363251894007e-06,
1213
+ "loss": 0.3457,
1214
+ "step": 1720
1215
+ },
1216
+ {
1217
+ "epoch": 1.8350132625994695,
1218
+ "grad_norm": 2.0011484622955322,
1219
+ "learning_rate": 1.9652264483691933e-06,
1220
+ "loss": 0.2739,
1221
+ "step": 1730
1222
+ },
1223
+ {
1224
+ "epoch": 1.8456233421750663,
1225
+ "grad_norm": 2.6818690299987793,
1226
+ "learning_rate": 1.9350981868189944e-06,
1227
+ "loss": 0.3109,
1228
+ "step": 1740
1229
+ },
1230
+ {
1231
+ "epoch": 1.856233421750663,
1232
+ "grad_norm": 2.6978225708007812,
1233
+ "learning_rate": 1.9050561386087618e-06,
1234
+ "loss": 0.3269,
1235
+ "step": 1750
1236
+ },
1237
+ {
1238
+ "epoch": 1.8668435013262599,
1239
+ "grad_norm": 2.578031301498413,
1240
+ "learning_rate": 1.8751048886508711e-06,
1241
+ "loss": 0.3617,
1242
+ "step": 1760
1243
+ },
1244
+ {
1245
+ "epoch": 1.8774535809018569,
1246
+ "grad_norm": 2.5525052547454834,
1247
+ "learning_rate": 1.8452490080003888e-06,
1248
+ "loss": 0.3228,
1249
+ "step": 1770
1250
+ },
1251
+ {
1252
+ "epoch": 1.8880636604774534,
1253
+ "grad_norm": 2.1095635890960693,
1254
+ "learning_rate": 1.8154930531574521e-06,
1255
+ "loss": 0.2857,
1256
+ "step": 1780
1257
+ },
1258
+ {
1259
+ "epoch": 1.8986737400530505,
1260
+ "grad_norm": 2.3965845108032227,
1261
+ "learning_rate": 1.785841565371868e-06,
1262
+ "loss": 0.3622,
1263
+ "step": 1790
1264
+ },
1265
+ {
1266
+ "epoch": 1.9092838196286472,
1267
+ "grad_norm": 2.293715238571167,
1268
+ "learning_rate": 1.7562990699500482e-06,
1269
+ "loss": 0.3031,
1270
+ "step": 1800
1271
+ },
1272
+ {
1273
+ "epoch": 1.919893899204244,
1274
+ "grad_norm": 2.026015281677246,
1275
+ "learning_rate": 1.7268700755643708e-06,
1276
+ "loss": 0.3019,
1277
+ "step": 1810
1278
+ },
1279
+ {
1280
+ "epoch": 1.9305039787798408,
1281
+ "grad_norm": 1.7175791263580322,
1282
+ "learning_rate": 1.6975590735650812e-06,
1283
+ "loss": 0.3047,
1284
+ "step": 1820
1285
+ },
1286
+ {
1287
+ "epoch": 1.9411140583554376,
1288
+ "grad_norm": 2.0024490356445312,
1289
+ "learning_rate": 1.668370537294841e-06,
1290
+ "loss": 0.3048,
1291
+ "step": 1830
1292
+ },
1293
+ {
1294
+ "epoch": 1.9517241379310346,
1295
+ "grad_norm": 2.8226239681243896,
1296
+ "learning_rate": 1.6393089214060204e-06,
1297
+ "loss": 0.3205,
1298
+ "step": 1840
1299
+ },
1300
+ {
1301
+ "epoch": 1.9623342175066312,
1302
+ "grad_norm": 1.9452221393585205,
1303
+ "learning_rate": 1.6103786611808414e-06,
1304
+ "loss": 0.321,
1305
+ "step": 1850
1306
+ },
1307
+ {
1308
+ "epoch": 1.9729442970822282,
1309
+ "grad_norm": 2.304274320602417,
1310
+ "learning_rate": 1.5815841718544884e-06,
1311
+ "loss": 0.2954,
1312
+ "step": 1860
1313
+ },
1314
+ {
1315
+ "epoch": 1.983554376657825,
1316
+ "grad_norm": 2.502206802368164,
1317
+ "learning_rate": 1.5529298479412636e-06,
1318
+ "loss": 0.2945,
1319
+ "step": 1870
1320
+ },
1321
+ {
1322
+ "epoch": 1.9941644562334218,
1323
+ "grad_norm": 2.5796189308166504,
1324
+ "learning_rate": 1.524420062563912e-06,
1325
+ "loss": 0.3291,
1326
+ "step": 1880
1327
+ },
1328
+ {
1329
+ "epoch": 2.004244031830239,
1330
+ "grad_norm": 1.9198871850967407,
1331
+ "learning_rate": 1.4960591667862163e-06,
1332
+ "loss": 0.234,
1333
+ "step": 1890
1334
+ },
1335
+ {
1336
+ "epoch": 2.0148541114058354,
1337
+ "grad_norm": 1.7082706689834595,
1338
+ "learning_rate": 1.4678514889489464e-06,
1339
+ "loss": 0.1943,
1340
+ "step": 1900
1341
+ },
1342
+ {
1343
+ "epoch": 2.0254641909814324,
1344
+ "grad_norm": 1.8571817874908447,
1345
+ "learning_rate": 1.4398013340092864e-06,
1346
+ "loss": 0.1911,
1347
+ "step": 1910
1348
+ },
1349
+ {
1350
+ "epoch": 2.0360742705570294,
1351
+ "grad_norm": 2.454561233520508,
1352
+ "learning_rate": 1.4119129828838275e-06,
1353
+ "loss": 0.1895,
1354
+ "step": 1920
1355
+ },
1356
+ {
1357
+ "epoch": 2.046684350132626,
1358
+ "grad_norm": 2.3714683055877686,
1359
+ "learning_rate": 1.384190691795226e-06,
1360
+ "loss": 0.2177,
1361
+ "step": 1930
1362
+ },
1363
+ {
1364
+ "epoch": 2.057294429708223,
1365
+ "grad_norm": 2.1356313228607178,
1366
+ "learning_rate": 1.3566386916226373e-06,
1367
+ "loss": 0.2252,
1368
+ "step": 1940
1369
+ },
1370
+ {
1371
+ "epoch": 2.0679045092838195,
1372
+ "grad_norm": 2.446906089782715,
1373
+ "learning_rate": 1.3292611872560134e-06,
1374
+ "loss": 0.1982,
1375
+ "step": 1950
1376
+ },
1377
+ {
1378
+ "epoch": 2.0785145888594165,
1379
+ "grad_norm": 2.1040875911712646,
1380
+ "learning_rate": 1.302062356954365e-06,
1381
+ "loss": 0.1696,
1382
+ "step": 1960
1383
+ },
1384
+ {
1385
+ "epoch": 2.089124668435013,
1386
+ "grad_norm": 2.220742702484131,
1387
+ "learning_rate": 1.2750463517080922e-06,
1388
+ "loss": 0.1936,
1389
+ "step": 1970
1390
+ },
1391
+ {
1392
+ "epoch": 2.09973474801061,
1393
+ "grad_norm": 2.7784054279327393,
1394
+ "learning_rate": 1.2482172946054753e-06,
1395
+ "loss": 0.1604,
1396
+ "step": 1980
1397
+ },
1398
+ {
1399
+ "epoch": 2.110344827586207,
1400
+ "grad_norm": 2.0539498329162598,
1401
+ "learning_rate": 1.2215792802034187e-06,
1402
+ "loss": 0.2069,
1403
+ "step": 1990
1404
+ },
1405
+ {
1406
+ "epoch": 2.1209549071618037,
1407
+ "grad_norm": 1.8337138891220093,
1408
+ "learning_rate": 1.1951363739025618e-06,
1409
+ "loss": 0.1964,
1410
+ "step": 2000
1411
+ },
1412
+ {
1413
+ "epoch": 2.1315649867374007,
1414
+ "grad_norm": 1.7631642818450928,
1415
+ "learning_rate": 1.168892611326827e-06,
1416
+ "loss": 0.1871,
1417
+ "step": 2010
1418
+ },
1419
+ {
1420
+ "epoch": 2.1421750663129973,
1421
+ "grad_norm": 2.386589527130127,
1422
+ "learning_rate": 1.1428519977075136e-06,
1423
+ "loss": 0.2595,
1424
+ "step": 2020
1425
+ },
1426
+ {
1427
+ "epoch": 2.1527851458885943,
1428
+ "grad_norm": 2.553382635116577,
1429
+ "learning_rate": 1.1170185072720434e-06,
1430
+ "loss": 0.185,
1431
+ "step": 2030
1432
+ },
1433
+ {
1434
+ "epoch": 2.163395225464191,
1435
+ "grad_norm": 2.870973825454712,
1436
+ "learning_rate": 1.091396082637419e-06,
1437
+ "loss": 0.228,
1438
+ "step": 2040
1439
+ },
1440
+ {
1441
+ "epoch": 2.174005305039788,
1442
+ "grad_norm": 2.643745183944702,
1443
+ "learning_rate": 1.065988634208516e-06,
1444
+ "loss": 0.2098,
1445
+ "step": 2050
1446
+ },
1447
+ {
1448
+ "epoch": 2.184615384615385,
1449
+ "grad_norm": 2.369596481323242,
1450
+ "learning_rate": 1.0408000395812961e-06,
1451
+ "loss": 0.1982,
1452
+ "step": 2060
1453
+ },
1454
+ {
1455
+ "epoch": 2.1952254641909814,
1456
+ "grad_norm": 2.1093883514404297,
1457
+ "learning_rate": 1.0158341429510194e-06,
1458
+ "loss": 0.1844,
1459
+ "step": 2070
1460
+ },
1461
+ {
1462
+ "epoch": 2.2058355437665784,
1463
+ "grad_norm": 1.951935052871704,
1464
+ "learning_rate": 9.910947545255523e-07,
1465
+ "loss": 0.1654,
1466
+ "step": 2080
1467
+ },
1468
+ {
1469
+ "epoch": 2.216445623342175,
1470
+ "grad_norm": 2.230781078338623,
1471
+ "learning_rate": 9.665856499438744e-07,
1472
+ "loss": 0.2037,
1473
+ "step": 2090
1474
+ },
1475
+ {
1476
+ "epoch": 2.227055702917772,
1477
+ "grad_norm": 2.6240904331207275,
1478
+ "learning_rate": 9.423105696998491e-07,
1479
+ "loss": 0.2087,
1480
+ "step": 2100
1481
+ },
1482
+ {
1483
+ "epoch": 2.2376657824933686,
1484
+ "grad_norm": 1.712857723236084,
1485
+ "learning_rate": 9.182732185713633e-07,
1486
+ "loss": 0.2105,
1487
+ "step": 2110
1488
+ },
1489
+ {
1490
+ "epoch": 2.2482758620689656,
1491
+ "grad_norm": 2.036086082458496,
1492
+ "learning_rate": 8.94477265054918e-07,
1493
+ "loss": 0.2186,
1494
+ "step": 2120
1495
+ },
1496
+ {
1497
+ "epoch": 2.2588859416445626,
1498
+ "grad_norm": 2.3545398712158203,
1499
+ "learning_rate": 8.709263408057522e-07,
1500
+ "loss": 0.1879,
1501
+ "step": 2130
1502
+ },
1503
+ {
1504
+ "epoch": 2.269496021220159,
1505
+ "grad_norm": 1.9098992347717285,
1506
+ "learning_rate": 8.476240400835972e-07,
1507
+ "loss": 0.2177,
1508
+ "step": 2140
1509
+ },
1510
+ {
1511
+ "epoch": 2.280106100795756,
1512
+ "grad_norm": 2.107959270477295,
1513
+ "learning_rate": 8.245739192041311e-07,
1514
+ "loss": 0.165,
1515
+ "step": 2150
1516
+ },
1517
+ {
1518
+ "epoch": 2.2907161803713527,
1519
+ "grad_norm": 2.550719976425171,
1520
+ "learning_rate": 8.017794959962225e-07,
1521
+ "loss": 0.2018,
1522
+ "step": 2160
1523
+ },
1524
+ {
1525
+ "epoch": 2.3013262599469497,
1526
+ "grad_norm": 2.354701280593872,
1527
+ "learning_rate": 7.792442492650587e-07,
1528
+ "loss": 0.1955,
1529
+ "step": 2170
1530
+ },
1531
+ {
1532
+ "epoch": 2.3119363395225463,
1533
+ "grad_norm": 2.3547091484069824,
1534
+ "learning_rate": 7.569716182612177e-07,
1535
+ "loss": 0.1976,
1536
+ "step": 2180
1537
+ },
1538
+ {
1539
+ "epoch": 2.3225464190981433,
1540
+ "grad_norm": 1.4048022031784058,
1541
+ "learning_rate": 7.349650021557839e-07,
1542
+ "loss": 0.1685,
1543
+ "step": 2190
1544
+ },
1545
+ {
1546
+ "epoch": 2.33315649867374,
1547
+ "grad_norm": 2.568500280380249,
1548
+ "learning_rate": 7.132277595215773e-07,
1549
+ "loss": 0.1519,
1550
+ "step": 2200
1551
+ },
1552
+ {
1553
+ "epoch": 2.343766578249337,
1554
+ "grad_norm": 2.205993413925171,
1555
+ "learning_rate": 6.917632078205805e-07,
1556
+ "loss": 0.1573,
1557
+ "step": 2210
1558
+ },
1559
+ {
1560
+ "epoch": 2.3543766578249334,
1561
+ "grad_norm": 2.067505121231079,
1562
+ "learning_rate": 6.705746228976387e-07,
1563
+ "loss": 0.184,
1564
+ "step": 2220
1565
+ },
1566
+ {
1567
+ "epoch": 2.3649867374005304,
1568
+ "grad_norm": 2.4360201358795166,
1569
+ "learning_rate": 6.496652384805125e-07,
1570
+ "loss": 0.1968,
1571
+ "step": 2230
1572
+ },
1573
+ {
1574
+ "epoch": 2.3755968169761275,
1575
+ "grad_norm": 2.042179584503174,
1576
+ "learning_rate": 6.290382456863584e-07,
1577
+ "loss": 0.1846,
1578
+ "step": 2240
1579
+ },
1580
+ {
1581
+ "epoch": 2.386206896551724,
1582
+ "grad_norm": 2.849271535873413,
1583
+ "learning_rate": 6.086967925347075e-07,
1584
+ "loss": 0.1858,
1585
+ "step": 2250
1586
+ },
1587
+ {
1588
+ "epoch": 2.396816976127321,
1589
+ "grad_norm": 2.0765082836151123,
1590
+ "learning_rate": 5.88643983467033e-07,
1591
+ "loss": 0.1837,
1592
+ "step": 2260
1593
+ },
1594
+ {
1595
+ "epoch": 2.4074270557029176,
1596
+ "grad_norm": 1.9958840608596802,
1597
+ "learning_rate": 5.688828788729547e-07,
1598
+ "loss": 0.1659,
1599
+ "step": 2270
1600
+ },
1601
+ {
1602
+ "epoch": 2.4180371352785146,
1603
+ "grad_norm": 2.253602981567383,
1604
+ "learning_rate": 5.494164946231747e-07,
1605
+ "loss": 0.2095,
1606
+ "step": 2280
1607
+ },
1608
+ {
1609
+ "epoch": 2.428647214854111,
1610
+ "grad_norm": 1.5552992820739746,
1611
+ "learning_rate": 5.302478016092075e-07,
1612
+ "loss": 0.1862,
1613
+ "step": 2290
1614
+ },
1615
+ {
1616
+ "epoch": 2.439257294429708,
1617
+ "grad_norm": 2.721445322036743,
1618
+ "learning_rate": 5.113797252899728e-07,
1619
+ "loss": 0.2085,
1620
+ "step": 2300
1621
+ },
1622
+ {
1623
+ "epoch": 2.449867374005305,
1624
+ "grad_norm": 2.3488707542419434,
1625
+ "learning_rate": 4.928151452453184e-07,
1626
+ "loss": 0.1914,
1627
+ "step": 2310
1628
+ },
1629
+ {
1630
+ "epoch": 2.4604774535809018,
1631
+ "grad_norm": 2.49068021774292,
1632
+ "learning_rate": 4.745568947365542e-07,
1633
+ "loss": 0.1718,
1634
+ "step": 2320
1635
+ },
1636
+ {
1637
+ "epoch": 2.4710875331564988,
1638
+ "grad_norm": 1.4638549089431763,
1639
+ "learning_rate": 4.5660776027404654e-07,
1640
+ "loss": 0.1669,
1641
+ "step": 2330
1642
+ },
1643
+ {
1644
+ "epoch": 2.4816976127320953,
1645
+ "grad_norm": 2.288776159286499,
1646
+ "learning_rate": 4.389704811919507e-07,
1647
+ "loss": 0.1731,
1648
+ "step": 2340
1649
+ },
1650
+ {
1651
+ "epoch": 2.4923076923076923,
1652
+ "grad_norm": 2.385162115097046,
1653
+ "learning_rate": 4.216477492301455e-07,
1654
+ "loss": 0.1802,
1655
+ "step": 2350
1656
+ },
1657
+ {
1658
+ "epoch": 2.502917771883289,
1659
+ "grad_norm": 2.0100815296173096,
1660
+ "learning_rate": 4.0464220812342526e-07,
1661
+ "loss": 0.2232,
1662
+ "step": 2360
1663
+ },
1664
+ {
1665
+ "epoch": 2.513527851458886,
1666
+ "grad_norm": 1.8439091444015503,
1667
+ "learning_rate": 3.87956453198027e-07,
1668
+ "loss": 0.1432,
1669
+ "step": 2370
1670
+ },
1671
+ {
1672
+ "epoch": 2.524137931034483,
1673
+ "grad_norm": 2.3093338012695312,
1674
+ "learning_rate": 3.715930309755389e-07,
1675
+ "loss": 0.1834,
1676
+ "step": 2380
1677
+ },
1678
+ {
1679
+ "epoch": 2.5347480106100795,
1680
+ "grad_norm": 2.3250088691711426,
1681
+ "learning_rate": 3.5555443878425635e-07,
1682
+ "loss": 0.2123,
1683
+ "step": 2390
1684
+ },
1685
+ {
1686
+ "epoch": 2.5453580901856765,
1687
+ "grad_norm": 1.8003133535385132,
1688
+ "learning_rate": 3.398431243780531e-07,
1689
+ "loss": 0.2034,
1690
+ "step": 2400
1691
+ },
1692
+ {
1693
+ "epoch": 2.555968169761273,
1694
+ "grad_norm": 2.8948135375976562,
1695
+ "learning_rate": 3.2446148556281117e-07,
1696
+ "loss": 0.1778,
1697
+ "step": 2410
1698
+ },
1699
+ {
1700
+ "epoch": 2.56657824933687,
1701
+ "grad_norm": 1.8556360006332397,
1702
+ "learning_rate": 3.0941186983047543e-07,
1703
+ "loss": 0.1892,
1704
+ "step": 2420
1705
+ },
1706
+ {
1707
+ "epoch": 2.5771883289124666,
1708
+ "grad_norm": 2.771932363510132,
1709
+ "learning_rate": 2.9469657400078925e-07,
1710
+ "loss": 0.1935,
1711
+ "step": 2430
1712
+ },
1713
+ {
1714
+ "epoch": 2.5877984084880636,
1715
+ "grad_norm": 2.5325114727020264,
1716
+ "learning_rate": 2.8031784387076186e-07,
1717
+ "loss": 0.1858,
1718
+ "step": 2440
1719
+ },
1720
+ {
1721
+ "epoch": 2.5984084880636606,
1722
+ "grad_norm": 2.4069302082061768,
1723
+ "learning_rate": 2.6627787387191934e-07,
1724
+ "loss": 0.2118,
1725
+ "step": 2450
1726
+ },
1727
+ {
1728
+ "epoch": 2.609018567639257,
1729
+ "grad_norm": 2.053656816482544,
1730
+ "learning_rate": 2.5257880673540376e-07,
1731
+ "loss": 0.1929,
1732
+ "step": 2460
1733
+ },
1734
+ {
1735
+ "epoch": 2.6196286472148542,
1736
+ "grad_norm": 1.8820626735687256,
1737
+ "learning_rate": 2.392227331649527e-07,
1738
+ "loss": 0.1745,
1739
+ "step": 2470
1740
+ },
1741
+ {
1742
+ "epoch": 2.630238726790451,
1743
+ "grad_norm": 1.9418586492538452,
1744
+ "learning_rate": 2.2621169151782417e-07,
1745
+ "loss": 0.1823,
1746
+ "step": 2480
1747
+ },
1748
+ {
1749
+ "epoch": 2.640848806366048,
1750
+ "grad_norm": 2.519037961959839,
1751
+ "learning_rate": 2.1354766749371093e-07,
1752
+ "loss": 0.2037,
1753
+ "step": 2490
1754
+ },
1755
+ {
1756
+ "epoch": 2.6514588859416444,
1757
+ "grad_norm": 2.010211944580078,
1758
+ "learning_rate": 2.0123259383169031e-07,
1759
+ "loss": 0.2196,
1760
+ "step": 2500
1761
+ },
1762
+ {
1763
+ "epoch": 2.6620689655172414,
1764
+ "grad_norm": 1.9838532209396362,
1765
+ "learning_rate": 1.8926835001525257e-07,
1766
+ "loss": 0.1848,
1767
+ "step": 2510
1768
+ },
1769
+ {
1770
+ "epoch": 2.6726790450928384,
1771
+ "grad_norm": 2.3488149642944336,
1772
+ "learning_rate": 1.776567619854655e-07,
1773
+ "loss": 0.1823,
1774
+ "step": 2520
1775
+ },
1776
+ {
1777
+ "epoch": 2.683289124668435,
1778
+ "grad_norm": 2.839651584625244,
1779
+ "learning_rate": 1.6639960186230293e-07,
1780
+ "loss": 0.2039,
1781
+ "step": 2530
1782
+ },
1783
+ {
1784
+ "epoch": 2.693899204244032,
1785
+ "grad_norm": 2.050480842590332,
1786
+ "learning_rate": 1.5549858767419018e-07,
1787
+ "loss": 0.1796,
1788
+ "step": 2540
1789
+ },
1790
+ {
1791
+ "epoch": 2.7045092838196285,
1792
+ "grad_norm": 1.2738044261932373,
1793
+ "learning_rate": 1.449553830958053e-07,
1794
+ "loss": 0.1893,
1795
+ "step": 2550
1796
+ },
1797
+ {
1798
+ "epoch": 2.7151193633952255,
1799
+ "grad_norm": 1.8912787437438965,
1800
+ "learning_rate": 1.347715971941746e-07,
1801
+ "loss": 0.1947,
1802
+ "step": 2560
1803
+ },
1804
+ {
1805
+ "epoch": 2.725729442970822,
1806
+ "grad_norm": 1.8385730981826782,
1807
+ "learning_rate": 1.2494878418310234e-07,
1808
+ "loss": 0.1744,
1809
+ "step": 2570
1810
+ },
1811
+ {
1812
+ "epoch": 2.736339522546419,
1813
+ "grad_norm": 2.1071712970733643,
1814
+ "learning_rate": 1.1548844318597208e-07,
1815
+ "loss": 0.2351,
1816
+ "step": 2580
1817
+ },
1818
+ {
1819
+ "epoch": 2.746949602122016,
1820
+ "grad_norm": 2.054392099380493,
1821
+ "learning_rate": 1.0639201800695553e-07,
1822
+ "loss": 0.2245,
1823
+ "step": 2590
1824
+ },
1825
+ {
1826
+ "epoch": 2.7575596816976127,
1827
+ "grad_norm": 1.656562328338623,
1828
+ "learning_rate": 9.76608969106646e-08,
1829
+ "loss": 0.2014,
1830
+ "step": 2600
1831
+ },
1832
+ {
1833
+ "epoch": 2.7681697612732097,
1834
+ "grad_norm": 2.6887638568878174,
1835
+ "learning_rate": 8.929641241027937e-08,
1836
+ "loss": 0.1824,
1837
+ "step": 2610
1838
+ },
1839
+ {
1840
+ "epoch": 2.7787798408488062,
1841
+ "grad_norm": 2.4606659412384033,
1842
+ "learning_rate": 8.129984106418354e-08,
1843
+ "loss": 0.1706,
1844
+ "step": 2620
1845
+ },
1846
+ {
1847
+ "epoch": 2.7893899204244033,
1848
+ "grad_norm": 2.5548455715179443,
1849
+ "learning_rate": 7.3672403281142e-08,
1850
+ "loss": 0.2195,
1851
+ "step": 2630
1852
+ },
1853
+ {
1854
+ "epoch": 2.8,
1855
+ "grad_norm": 1.7952167987823486,
1856
+ "learning_rate": 6.641526313404534e-08,
1857
+ "loss": 0.1748,
1858
+ "step": 2640
1859
+ },
1860
+ {
1861
+ "epoch": 2.810610079575597,
1862
+ "grad_norm": 2.376830816268921,
1863
+ "learning_rate": 5.952952818225416e-08,
1864
+ "loss": 0.2061,
1865
+ "step": 2650
1866
+ },
1867
+ {
1868
+ "epoch": 2.821220159151194,
1869
+ "grad_norm": 1.7183632850646973,
1870
+ "learning_rate": 5.3016249302565436e-08,
1871
+ "loss": 0.1742,
1872
+ "step": 2660
1873
+ },
1874
+ {
1875
+ "epoch": 2.8318302387267904,
1876
+ "grad_norm": 2.11011004447937,
1877
+ "learning_rate": 4.6876420528833014e-08,
1878
+ "loss": 0.2082,
1879
+ "step": 2670
1880
+ },
1881
+ {
1882
+ "epoch": 2.8424403183023874,
1883
+ "grad_norm": 1.8799868822097778,
1884
+ "learning_rate": 4.111097890026089e-08,
1885
+ "loss": 0.1805,
1886
+ "step": 2680
1887
+ },
1888
+ {
1889
+ "epoch": 2.853050397877984,
1890
+ "grad_norm": 2.5171291828155518,
1891
+ "learning_rate": 3.5720804318395976e-08,
1892
+ "loss": 0.2058,
1893
+ "step": 2690
1894
+ },
1895
+ {
1896
+ "epoch": 2.863660477453581,
1897
+ "grad_norm": 2.142263650894165,
1898
+ "learning_rate": 3.0706719412839926e-08,
1899
+ "loss": 0.2027,
1900
+ "step": 2700
1901
+ },
1902
+ {
1903
+ "epoch": 2.8742705570291776,
1904
+ "grad_norm": 2.2124040126800537,
1905
+ "learning_rate": 2.6069489415703197e-08,
1906
+ "loss": 0.1941,
1907
+ "step": 2710
1908
+ },
1909
+ {
1910
+ "epoch": 2.8848806366047746,
1911
+ "grad_norm": 2.033259153366089,
1912
+ "learning_rate": 2.18098220448168e-08,
1913
+ "loss": 0.2029,
1914
+ "step": 2720
1915
+ },
1916
+ {
1917
+ "epoch": 2.8954907161803716,
1918
+ "grad_norm": 2.416912794113159,
1919
+ "learning_rate": 1.7928367395725066e-08,
1920
+ "loss": 0.2062,
1921
+ "step": 2730
1922
+ },
1923
+ {
1924
+ "epoch": 2.906100795755968,
1925
+ "grad_norm": 2.193751096725464,
1926
+ "learning_rate": 1.442571784246699e-08,
1927
+ "loss": 0.1873,
1928
+ "step": 2740
1929
+ },
1930
+ {
1931
+ "epoch": 2.916710875331565,
1932
+ "grad_norm": 1.5729731321334839,
1933
+ "learning_rate": 1.1302407947173522e-08,
1934
+ "loss": 0.1653,
1935
+ "step": 2750
1936
+ },
1937
+ {
1938
+ "epoch": 2.9273209549071617,
1939
+ "grad_norm": 1.7562044858932495,
1940
+ "learning_rate": 8.558914378481996e-09,
1941
+ "loss": 0.1743,
1942
+ "step": 2760
1943
+ },
1944
+ {
1945
+ "epoch": 2.9379310344827587,
1946
+ "grad_norm": 2.183967351913452,
1947
+ "learning_rate": 6.195655838790726e-09,
1948
+ "loss": 0.1821,
1949
+ "step": 2770
1950
+ },
1951
+ {
1952
+ "epoch": 2.9485411140583553,
1953
+ "grad_norm": 1.9312433004379272,
1954
+ "learning_rate": 4.212993000356491e-09,
1955
+ "loss": 0.1954,
1956
+ "step": 2780
1957
+ },
1958
+ {
1959
+ "epoch": 2.9591511936339523,
1960
+ "grad_norm": 2.2055087089538574,
1961
+ "learning_rate": 2.611228450250802e-09,
1962
+ "loss": 0.1925,
1963
+ "step": 2790
1964
+ },
1965
+ {
1966
+ "epoch": 2.9697612732095493,
1967
+ "grad_norm": 1.6606404781341553,
1968
+ "learning_rate": 1.3906066441798927e-09,
1969
+ "loss": 0.1805,
1970
+ "step": 2800
1971
+ },
1972
+ {
1973
+ "epoch": 2.980371352785146,
1974
+ "grad_norm": 2.594404458999634,
1975
+ "learning_rate": 5.513138691767839e-10,
1976
+ "loss": 0.2084,
1977
+ "step": 2810
1978
+ },
1979
+ {
1980
+ "epoch": 2.9909814323607424,
1981
+ "grad_norm": 2.007861375808716,
1982
+ "learning_rate": 9.347821517069477e-11,
1983
+ "loss": 0.2115,
1984
+ "step": 2820
1985
+ },
1986
+ {
1987
+ "epoch": 2.9973474801061006,
1988
+ "step": 2826,
1989
+ "total_flos": 1.0915292825780224e+17,
1990
+ "train_loss": 0.34044326600333263,
1991
+ "train_runtime": 16671.2674,
1992
+ "train_samples_per_second": 2.713,
1993
+ "train_steps_per_second": 0.17
1994
+ }
1995
+ ],
1996
+ "logging_steps": 10,
1997
+ "max_steps": 2826,
1998
+ "num_input_tokens_seen": 0,
1999
+ "num_train_epochs": 3,
2000
+ "save_steps": 943,
2001
+ "stateful_callbacks": {
2002
+ "TrainerControl": {
2003
+ "args": {
2004
+ "should_epoch_stop": false,
2005
+ "should_evaluate": false,
2006
+ "should_log": false,
2007
+ "should_save": true,
2008
+ "should_training_stop": true
2009
+ },
2010
+ "attributes": {}
2011
+ }
2012
+ },
2013
+ "total_flos": 1.0915292825780224e+17,
2014
+ "train_batch_size": 1,
2015
+ "trial_name": null,
2016
+ "trial_params": null
2017
+ }
training.log ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27d6ca1d781c71f5e68ee48cf8a625fd3eefbff277275730d357f7f6a9116727
3
+ size 7928
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff