pvrancx commited on
Commit
2dc8576
·
verified ·
1 Parent(s): 832c96f

Upload 14 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,3 +1,210 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: unsloth/gemma-3-270m-it
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:unsloth/gemma-3-270m-it
7
+ - lora
8
+ - sft
9
+ - transformers
10
+ - trl
11
+ - unsloth
12
+ ---
13
+
14
+ # Model Card for Model ID
15
+
16
+ <!-- Provide a quick summary of what the model is/does. -->
17
+
18
+
19
+
20
+ ## Model Details
21
+
22
+ ### Model Description
23
+
24
+ <!-- Provide a longer summary of what this model is. -->
25
+
26
+
27
+
28
+ - **Developed by:** [More Information Needed]
29
+ - **Funded by [optional]:** [More Information Needed]
30
+ - **Shared by [optional]:** [More Information Needed]
31
+ - **Model type:** [More Information Needed]
32
+ - **Language(s) (NLP):** [More Information Needed]
33
+ - **License:** [More Information Needed]
34
+ - **Finetuned from model [optional]:** [More Information Needed]
35
+
36
+ ### Model Sources [optional]
37
+
38
+ <!-- Provide the basic links for the model. -->
39
+
40
+ - **Repository:** [More Information Needed]
41
+ - **Paper [optional]:** [More Information Needed]
42
+ - **Demo [optional]:** [More Information Needed]
43
+
44
+ ## Uses
45
+
46
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
47
+
48
+ ### Direct Use
49
+
50
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
51
+
52
+ [More Information Needed]
53
+
54
+ ### Downstream Use [optional]
55
+
56
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
57
+
58
+ [More Information Needed]
59
+
60
+ ### Out-of-Scope Use
61
+
62
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
63
+
64
+ [More Information Needed]
65
+
66
+ ## Bias, Risks, and Limitations
67
+
68
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
69
+
70
+ [More Information Needed]
71
+
72
+ ### Recommendations
73
+
74
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
75
+
76
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
77
+
78
+ ## How to Get Started with the Model
79
+
80
+ Use the code below to get started with the model.
81
+
82
+ [More Information Needed]
83
+
84
+ ## Training Details
85
+
86
+ ### Training Data
87
+
88
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
89
+
90
+ [More Information Needed]
91
+
92
+ ### Training Procedure
93
+
94
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
95
+
96
+ #### Preprocessing [optional]
97
+
98
+ [More Information Needed]
99
+
100
+
101
+ #### Training Hyperparameters
102
+
103
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
104
+
105
+ #### Speeds, Sizes, Times [optional]
106
+
107
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
108
+
109
+ [More Information Needed]
110
+
111
+ ## Evaluation
112
+
113
+ <!-- This section describes the evaluation protocols and provides the results. -->
114
+
115
+ ### Testing Data, Factors & Metrics
116
+
117
+ #### Testing Data
118
+
119
+ <!-- This should link to a Dataset Card if possible. -->
120
+
121
+ [More Information Needed]
122
+
123
+ #### Factors
124
+
125
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
126
+
127
+ [More Information Needed]
128
+
129
+ #### Metrics
130
+
131
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
132
+
133
+ [More Information Needed]
134
+
135
+ ### Results
136
+
137
+ [More Information Needed]
138
+
139
+ #### Summary
140
+
141
+
142
+
143
+ ## Model Examination [optional]
144
+
145
+ <!-- Relevant interpretability work for the model goes here -->
146
+
147
+ [More Information Needed]
148
+
149
+ ## Environmental Impact
150
+
151
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
152
+
153
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
154
+
155
+ - **Hardware Type:** [More Information Needed]
156
+ - **Hours used:** [More Information Needed]
157
+ - **Cloud Provider:** [More Information Needed]
158
+ - **Compute Region:** [More Information Needed]
159
+ - **Carbon Emitted:** [More Information Needed]
160
+
161
+ ## Technical Specifications [optional]
162
+
163
+ ### Model Architecture and Objective
164
+
165
+ [More Information Needed]
166
+
167
+ ### Compute Infrastructure
168
+
169
+ [More Information Needed]
170
+
171
+ #### Hardware
172
+
173
+ [More Information Needed]
174
+
175
+ #### Software
176
+
177
+ [More Information Needed]
178
+
179
+ ## Citation [optional]
180
+
181
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
182
+
183
+ **BibTeX:**
184
+
185
+ [More Information Needed]
186
+
187
+ **APA:**
188
+
189
+ [More Information Needed]
190
+
191
+ ## Glossary [optional]
192
+
193
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
194
+
195
+ [More Information Needed]
196
+
197
+ ## More Information [optional]
198
+
199
+ [More Information Needed]
200
+
201
+ ## Model Card Authors [optional]
202
+
203
+ [More Information Needed]
204
+
205
+ ## Model Card Contact
206
+
207
+ [More Information Needed]
208
+ ### Framework versions
209
+
210
+ - PEFT 0.18.1
adapter_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": {
6
+ "base_model_class": "Gemma3ForCausalLM",
7
+ "parent_library": "transformers.models.gemma3.modeling_gemma3",
8
+ "unsloth_fixed": true
9
+ },
10
+ "base_model_name_or_path": "unsloth/gemma-3-270m-it",
11
+ "bias": "none",
12
+ "corda_config": null,
13
+ "ensure_weight_tying": false,
14
+ "eva_config": null,
15
+ "exclude_modules": null,
16
+ "fan_in_fan_out": false,
17
+ "inference_mode": true,
18
+ "init_lora_weights": true,
19
+ "layer_replication": null,
20
+ "layers_pattern": null,
21
+ "layers_to_transform": null,
22
+ "loftq_config": {},
23
+ "lora_alpha": 32,
24
+ "lora_bias": false,
25
+ "lora_dropout": 0.1,
26
+ "megatron_config": null,
27
+ "megatron_core": "megatron.core",
28
+ "modules_to_save": null,
29
+ "peft_type": "LORA",
30
+ "peft_version": "0.18.1",
31
+ "qalora_group_size": 16,
32
+ "r": 16,
33
+ "rank_pattern": {},
34
+ "revision": null,
35
+ "target_modules": [
36
+ "gate_proj",
37
+ "down_proj",
38
+ "v_proj",
39
+ "up_proj",
40
+ "o_proj",
41
+ "q_proj",
42
+ "k_proj"
43
+ ],
44
+ "target_parameters": null,
45
+ "task_type": "CAUSAL_LM",
46
+ "trainable_token_indices": null,
47
+ "use_dora": false,
48
+ "use_qalora": false,
49
+ "use_rslora": false
50
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:312c39b50e2f8722dfb9cf71b93346a2efa0ad52d606de8067af55eebec1209a
3
+ size 15220968
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<image_soft_token>": 262144
3
+ }
chat_template.jinja ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {# Unsloth Chat template fixes #}
2
+ {{ bos_token }}
3
+ {%- if messages[0]['role'] == 'system' -%}
4
+ {%- if messages[0]['content'] is string -%}
5
+ {%- set first_user_prefix = messages[0]['content'] + '
6
+
7
+ ' -%}
8
+ {%- else -%}
9
+ {%- set first_user_prefix = messages[0]['content'][0]['text'] + '
10
+
11
+ ' -%}
12
+ {%- endif -%}
13
+ {%- set loop_messages = messages[1:] -%}
14
+ {%- else -%}
15
+ {%- set first_user_prefix = "" -%}
16
+ {%- set loop_messages = messages -%}
17
+ {%- endif -%}
18
+ {%- for message in loop_messages -%}
19
+ {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) -%}
20
+ {{ raise_exception("Conversation roles must alternate user/assistant/user/assistant/...") }}
21
+ {%- endif -%}
22
+ {%- if (message['role'] == 'assistant') -%}
23
+ {%- set role = "model" -%}
24
+ {%- else -%}
25
+ {%- set role = message['role'] -%}
26
+ {%- endif -%}
27
+ {{ '<start_of_turn>' + role + '
28
+ ' + (first_user_prefix if loop.first else "") }}
29
+ {%- if message['content'] is string -%}
30
+ {{ message['content'] | trim }}
31
+ {%- elif message['content'] is iterable -%}
32
+ {%- for item in message['content'] -%}
33
+ {%- if item['type'] == 'image' -%}
34
+ {{ '<start_of_image>' }}
35
+ {%- elif item['type'] == 'text' -%}
36
+ {{ item['text'] | trim }}
37
+ {%- endif -%}
38
+ {%- endfor -%}
39
+ {%- elif message['content'] is defined -%}
40
+ {{ raise_exception("Invalid content type") }}
41
+ {%- endif -%}
42
+ {{ '<end_of_turn>
43
+ ' }}
44
+ {%- endfor -%}
45
+ {%- if add_generation_prompt -%}
46
+ {{'<start_of_turn>model
47
+ '}}
48
+ {%- endif -%}
49
+
50
+ {# Copyright 2025-present Unsloth. Apache 2.0 License. #}
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82ac39cc454c2725adaace18c7698c7d5201bd38302ed700147944c3b34f4b03
3
+ size 8002453
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1e22af442246d4b474138f5b8f9cb9fe9a8101b3fc21d13964a4b1eefd3af6a
3
+ size 14645
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37181ddfcde4e5bb6a24802bb1314c1ad420a32bdea12a339b12b932577cdd86
3
+ size 1465
special_tokens_map.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "boi_token": "<start_of_image>",
3
+ "bos_token": {
4
+ "content": "<bos>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ "eoi_token": "<end_of_image>",
11
+ "eos_token": {
12
+ "content": "<end_of_turn>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "image_token": "<image_soft_token>",
19
+ "pad_token": {
20
+ "content": "<pad>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ },
26
+ "unk_token": {
27
+ "content": "<unk>",
28
+ "lstrip": false,
29
+ "normalized": false,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ }
33
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4667f2089529e8e7657cfb6d1c19910ae71ff5f28aa7ab2ff2763330affad795
3
+ size 33384568
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1299c11d7cf632ef3b4e11937501358ada021bbdf7c47638d13c0ee982f2e79c
3
+ size 4689074
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
@@ -0,0 +1,4234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 0.19515368352577656,
6
+ "eval_steps": 500,
7
+ "global_step": 600,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0003252561392096276,
14
+ "grad_norm": 7.644000053405762,
15
+ "learning_rate": 5e-05,
16
+ "loss": 3.7348,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.0006505122784192552,
21
+ "grad_norm": 6.572476387023926,
22
+ "learning_rate": 5e-05,
23
+ "loss": 3.5202,
24
+ "step": 2
25
+ },
26
+ {
27
+ "epoch": 0.0009757684176288828,
28
+ "grad_norm": 5.915395736694336,
29
+ "learning_rate": 5e-05,
30
+ "loss": 3.2423,
31
+ "step": 3
32
+ },
33
+ {
34
+ "epoch": 0.0013010245568385104,
35
+ "grad_norm": 5.39382266998291,
36
+ "learning_rate": 5e-05,
37
+ "loss": 3.2308,
38
+ "step": 4
39
+ },
40
+ {
41
+ "epoch": 0.0016262806960481379,
42
+ "grad_norm": 5.474543571472168,
43
+ "learning_rate": 5e-05,
44
+ "loss": 3.3753,
45
+ "step": 5
46
+ },
47
+ {
48
+ "epoch": 0.0019515368352577655,
49
+ "grad_norm": 5.402736663818359,
50
+ "learning_rate": 5e-05,
51
+ "loss": 3.3042,
52
+ "step": 6
53
+ },
54
+ {
55
+ "epoch": 0.002276792974467393,
56
+ "grad_norm": 5.727195739746094,
57
+ "learning_rate": 5e-05,
58
+ "loss": 3.2722,
59
+ "step": 7
60
+ },
61
+ {
62
+ "epoch": 0.002602049113677021,
63
+ "grad_norm": 5.997256755828857,
64
+ "learning_rate": 5e-05,
65
+ "loss": 3.2708,
66
+ "step": 8
67
+ },
68
+ {
69
+ "epoch": 0.002927305252886648,
70
+ "grad_norm": 8.144789695739746,
71
+ "learning_rate": 5e-05,
72
+ "loss": 3.4531,
73
+ "step": 9
74
+ },
75
+ {
76
+ "epoch": 0.0032525613920962758,
77
+ "grad_norm": 11.264220237731934,
78
+ "learning_rate": 5e-05,
79
+ "loss": 3.3755,
80
+ "step": 10
81
+ },
82
+ {
83
+ "epoch": 0.0035778175313059034,
84
+ "grad_norm": 10.535292625427246,
85
+ "learning_rate": 5e-05,
86
+ "loss": 3.5809,
87
+ "step": 11
88
+ },
89
+ {
90
+ "epoch": 0.003903073670515531,
91
+ "grad_norm": 8.04301929473877,
92
+ "learning_rate": 5e-05,
93
+ "loss": 3.3787,
94
+ "step": 12
95
+ },
96
+ {
97
+ "epoch": 0.004228329809725159,
98
+ "grad_norm": 9.441449165344238,
99
+ "learning_rate": 5e-05,
100
+ "loss": 3.4919,
101
+ "step": 13
102
+ },
103
+ {
104
+ "epoch": 0.004553585948934786,
105
+ "grad_norm": 9.322367668151855,
106
+ "learning_rate": 5e-05,
107
+ "loss": 3.6405,
108
+ "step": 14
109
+ },
110
+ {
111
+ "epoch": 0.004878842088144414,
112
+ "grad_norm": 10.059698104858398,
113
+ "learning_rate": 5e-05,
114
+ "loss": 3.425,
115
+ "step": 15
116
+ },
117
+ {
118
+ "epoch": 0.005204098227354042,
119
+ "grad_norm": 11.285538673400879,
120
+ "learning_rate": 5e-05,
121
+ "loss": 3.6643,
122
+ "step": 16
123
+ },
124
+ {
125
+ "epoch": 0.0055293543665636685,
126
+ "grad_norm": 10.433178901672363,
127
+ "learning_rate": 5e-05,
128
+ "loss": 3.4485,
129
+ "step": 17
130
+ },
131
+ {
132
+ "epoch": 0.005854610505773296,
133
+ "grad_norm": 11.724845886230469,
134
+ "learning_rate": 5e-05,
135
+ "loss": 3.8379,
136
+ "step": 18
137
+ },
138
+ {
139
+ "epoch": 0.006179866644982924,
140
+ "grad_norm": 11.558403968811035,
141
+ "learning_rate": 5e-05,
142
+ "loss": 3.8067,
143
+ "step": 19
144
+ },
145
+ {
146
+ "epoch": 0.0065051227841925515,
147
+ "grad_norm": 9.274937629699707,
148
+ "learning_rate": 5e-05,
149
+ "loss": 3.5213,
150
+ "step": 20
151
+ },
152
+ {
153
+ "epoch": 0.006830378923402179,
154
+ "grad_norm": 11.487302780151367,
155
+ "learning_rate": 5e-05,
156
+ "loss": 3.8464,
157
+ "step": 21
158
+ },
159
+ {
160
+ "epoch": 0.007155635062611807,
161
+ "grad_norm": 11.910959243774414,
162
+ "learning_rate": 5e-05,
163
+ "loss": 3.7159,
164
+ "step": 22
165
+ },
166
+ {
167
+ "epoch": 0.0074808912018214345,
168
+ "grad_norm": 10.441876411437988,
169
+ "learning_rate": 5e-05,
170
+ "loss": 4.3052,
171
+ "step": 23
172
+ },
173
+ {
174
+ "epoch": 0.007806147341031062,
175
+ "grad_norm": 11.492648124694824,
176
+ "learning_rate": 5e-05,
177
+ "loss": 4.0298,
178
+ "step": 24
179
+ },
180
+ {
181
+ "epoch": 0.00813140348024069,
182
+ "grad_norm": 11.218420028686523,
183
+ "learning_rate": 5e-05,
184
+ "loss": 3.9014,
185
+ "step": 25
186
+ },
187
+ {
188
+ "epoch": 0.008456659619450317,
189
+ "grad_norm": 9.615971565246582,
190
+ "learning_rate": 5e-05,
191
+ "loss": 3.8972,
192
+ "step": 26
193
+ },
194
+ {
195
+ "epoch": 0.008781915758659945,
196
+ "grad_norm": 9.325116157531738,
197
+ "learning_rate": 5e-05,
198
+ "loss": 4.0339,
199
+ "step": 27
200
+ },
201
+ {
202
+ "epoch": 0.009107171897869573,
203
+ "grad_norm": 10.537034034729004,
204
+ "learning_rate": 5e-05,
205
+ "loss": 4.3475,
206
+ "step": 28
207
+ },
208
+ {
209
+ "epoch": 0.0094324280370792,
210
+ "grad_norm": 9.163812637329102,
211
+ "learning_rate": 5e-05,
212
+ "loss": 4.4702,
213
+ "step": 29
214
+ },
215
+ {
216
+ "epoch": 0.009757684176288828,
217
+ "grad_norm": 8.232535362243652,
218
+ "learning_rate": 5e-05,
219
+ "loss": 4.5234,
220
+ "step": 30
221
+ },
222
+ {
223
+ "epoch": 0.010082940315498456,
224
+ "grad_norm": 7.85621976852417,
225
+ "learning_rate": 5e-05,
226
+ "loss": 4.665,
227
+ "step": 31
228
+ },
229
+ {
230
+ "epoch": 0.010408196454708083,
231
+ "grad_norm": 7.693171501159668,
232
+ "learning_rate": 5e-05,
233
+ "loss": 4.5524,
234
+ "step": 32
235
+ },
236
+ {
237
+ "epoch": 0.01073345259391771,
238
+ "grad_norm": 6.418099403381348,
239
+ "learning_rate": 5e-05,
240
+ "loss": 4.3471,
241
+ "step": 33
242
+ },
243
+ {
244
+ "epoch": 0.011058708733127337,
245
+ "grad_norm": 9.231449127197266,
246
+ "learning_rate": 5e-05,
247
+ "loss": 4.5491,
248
+ "step": 34
249
+ },
250
+ {
251
+ "epoch": 0.011383964872336965,
252
+ "grad_norm": 8.351770401000977,
253
+ "learning_rate": 5e-05,
254
+ "loss": 4.6851,
255
+ "step": 35
256
+ },
257
+ {
258
+ "epoch": 0.011709221011546592,
259
+ "grad_norm": 8.412186622619629,
260
+ "learning_rate": 5e-05,
261
+ "loss": 4.4453,
262
+ "step": 36
263
+ },
264
+ {
265
+ "epoch": 0.01203447715075622,
266
+ "grad_norm": 7.4988932609558105,
267
+ "learning_rate": 5e-05,
268
+ "loss": 4.2595,
269
+ "step": 37
270
+ },
271
+ {
272
+ "epoch": 0.012359733289965848,
273
+ "grad_norm": 6.0955424308776855,
274
+ "learning_rate": 5e-05,
275
+ "loss": 4.1402,
276
+ "step": 38
277
+ },
278
+ {
279
+ "epoch": 0.012684989429175475,
280
+ "grad_norm": 5.853550434112549,
281
+ "learning_rate": 5e-05,
282
+ "loss": 4.2133,
283
+ "step": 39
284
+ },
285
+ {
286
+ "epoch": 0.013010245568385103,
287
+ "grad_norm": 6.071702480316162,
288
+ "learning_rate": 5e-05,
289
+ "loss": 3.9406,
290
+ "step": 40
291
+ },
292
+ {
293
+ "epoch": 0.01333550170759473,
294
+ "grad_norm": 5.181666374206543,
295
+ "learning_rate": 5e-05,
296
+ "loss": 3.9559,
297
+ "step": 41
298
+ },
299
+ {
300
+ "epoch": 0.013660757846804358,
301
+ "grad_norm": 5.067506313323975,
302
+ "learning_rate": 5e-05,
303
+ "loss": 4.0526,
304
+ "step": 42
305
+ },
306
+ {
307
+ "epoch": 0.013986013986013986,
308
+ "grad_norm": 4.593716621398926,
309
+ "learning_rate": 5e-05,
310
+ "loss": 4.0814,
311
+ "step": 43
312
+ },
313
+ {
314
+ "epoch": 0.014311270125223614,
315
+ "grad_norm": 5.379826545715332,
316
+ "learning_rate": 5e-05,
317
+ "loss": 4.13,
318
+ "step": 44
319
+ },
320
+ {
321
+ "epoch": 0.014636526264433241,
322
+ "grad_norm": 6.113279819488525,
323
+ "learning_rate": 5e-05,
324
+ "loss": 4.0842,
325
+ "step": 45
326
+ },
327
+ {
328
+ "epoch": 0.014961782403642869,
329
+ "grad_norm": 4.885502815246582,
330
+ "learning_rate": 5e-05,
331
+ "loss": 4.147,
332
+ "step": 46
333
+ },
334
+ {
335
+ "epoch": 0.015287038542852497,
336
+ "grad_norm": 5.065276622772217,
337
+ "learning_rate": 5e-05,
338
+ "loss": 4.0597,
339
+ "step": 47
340
+ },
341
+ {
342
+ "epoch": 0.015612294682062124,
343
+ "grad_norm": 5.306569576263428,
344
+ "learning_rate": 5e-05,
345
+ "loss": 4.4288,
346
+ "step": 48
347
+ },
348
+ {
349
+ "epoch": 0.01593755082127175,
350
+ "grad_norm": 6.738716125488281,
351
+ "learning_rate": 5e-05,
352
+ "loss": 4.2153,
353
+ "step": 49
354
+ },
355
+ {
356
+ "epoch": 0.01626280696048138,
357
+ "grad_norm": 7.264622211456299,
358
+ "learning_rate": 5e-05,
359
+ "loss": 4.3567,
360
+ "step": 50
361
+ },
362
+ {
363
+ "epoch": 0.016588063099691006,
364
+ "grad_norm": 4.799393177032471,
365
+ "learning_rate": 5e-05,
366
+ "loss": 4.1491,
367
+ "step": 51
368
+ },
369
+ {
370
+ "epoch": 0.016913319238900635,
371
+ "grad_norm": 4.5021071434021,
372
+ "learning_rate": 5e-05,
373
+ "loss": 4.0725,
374
+ "step": 52
375
+ },
376
+ {
377
+ "epoch": 0.01723857537811026,
378
+ "grad_norm": 5.524833679199219,
379
+ "learning_rate": 5e-05,
380
+ "loss": 4.405,
381
+ "step": 53
382
+ },
383
+ {
384
+ "epoch": 0.01756383151731989,
385
+ "grad_norm": 4.327210426330566,
386
+ "learning_rate": 5e-05,
387
+ "loss": 4.198,
388
+ "step": 54
389
+ },
390
+ {
391
+ "epoch": 0.017889087656529516,
392
+ "grad_norm": 4.141977787017822,
393
+ "learning_rate": 5e-05,
394
+ "loss": 4.0445,
395
+ "step": 55
396
+ },
397
+ {
398
+ "epoch": 0.018214343795739146,
399
+ "grad_norm": 4.746036529541016,
400
+ "learning_rate": 5e-05,
401
+ "loss": 4.5174,
402
+ "step": 56
403
+ },
404
+ {
405
+ "epoch": 0.01853959993494877,
406
+ "grad_norm": 5.5715837478637695,
407
+ "learning_rate": 5e-05,
408
+ "loss": 4.2292,
409
+ "step": 57
410
+ },
411
+ {
412
+ "epoch": 0.0188648560741584,
413
+ "grad_norm": 5.887129306793213,
414
+ "learning_rate": 5e-05,
415
+ "loss": 4.0795,
416
+ "step": 58
417
+ },
418
+ {
419
+ "epoch": 0.019190112213368027,
420
+ "grad_norm": 4.8050150871276855,
421
+ "learning_rate": 5e-05,
422
+ "loss": 4.1816,
423
+ "step": 59
424
+ },
425
+ {
426
+ "epoch": 0.019515368352577656,
427
+ "grad_norm": 4.179840564727783,
428
+ "learning_rate": 5e-05,
429
+ "loss": 4.337,
430
+ "step": 60
431
+ },
432
+ {
433
+ "epoch": 0.019840624491787282,
434
+ "grad_norm": 5.042575359344482,
435
+ "learning_rate": 5e-05,
436
+ "loss": 4.1466,
437
+ "step": 61
438
+ },
439
+ {
440
+ "epoch": 0.02016588063099691,
441
+ "grad_norm": 4.339786052703857,
442
+ "learning_rate": 5e-05,
443
+ "loss": 3.7189,
444
+ "step": 62
445
+ },
446
+ {
447
+ "epoch": 0.020491136770206538,
448
+ "grad_norm": 8.27956771850586,
449
+ "learning_rate": 5e-05,
450
+ "loss": 4.1767,
451
+ "step": 63
452
+ },
453
+ {
454
+ "epoch": 0.020816392909416167,
455
+ "grad_norm": 6.273040294647217,
456
+ "learning_rate": 5e-05,
457
+ "loss": 4.363,
458
+ "step": 64
459
+ },
460
+ {
461
+ "epoch": 0.021141649048625793,
462
+ "grad_norm": 7.342176914215088,
463
+ "learning_rate": 5e-05,
464
+ "loss": 4.3939,
465
+ "step": 65
466
+ },
467
+ {
468
+ "epoch": 0.02146690518783542,
469
+ "grad_norm": 6.060370445251465,
470
+ "learning_rate": 5e-05,
471
+ "loss": 4.1033,
472
+ "step": 66
473
+ },
474
+ {
475
+ "epoch": 0.021792161327045048,
476
+ "grad_norm": 6.683494567871094,
477
+ "learning_rate": 5e-05,
478
+ "loss": 3.9883,
479
+ "step": 67
480
+ },
481
+ {
482
+ "epoch": 0.022117417466254674,
483
+ "grad_norm": 6.418432712554932,
484
+ "learning_rate": 5e-05,
485
+ "loss": 4.1472,
486
+ "step": 68
487
+ },
488
+ {
489
+ "epoch": 0.022442673605464303,
490
+ "grad_norm": 6.342174053192139,
491
+ "learning_rate": 5e-05,
492
+ "loss": 4.414,
493
+ "step": 69
494
+ },
495
+ {
496
+ "epoch": 0.02276792974467393,
497
+ "grad_norm": 10.055042266845703,
498
+ "learning_rate": 5e-05,
499
+ "loss": 4.1925,
500
+ "step": 70
501
+ },
502
+ {
503
+ "epoch": 0.02309318588388356,
504
+ "grad_norm": 6.6624932289123535,
505
+ "learning_rate": 5e-05,
506
+ "loss": 4.1111,
507
+ "step": 71
508
+ },
509
+ {
510
+ "epoch": 0.023418442023093185,
511
+ "grad_norm": 8.38736343383789,
512
+ "learning_rate": 5e-05,
513
+ "loss": 4.5028,
514
+ "step": 72
515
+ },
516
+ {
517
+ "epoch": 0.023743698162302814,
518
+ "grad_norm": 8.479351997375488,
519
+ "learning_rate": 5e-05,
520
+ "loss": 4.2832,
521
+ "step": 73
522
+ },
523
+ {
524
+ "epoch": 0.02406895430151244,
525
+ "grad_norm": 8.613444328308105,
526
+ "learning_rate": 5e-05,
527
+ "loss": 4.5189,
528
+ "step": 74
529
+ },
530
+ {
531
+ "epoch": 0.02439421044072207,
532
+ "grad_norm": 6.932406425476074,
533
+ "learning_rate": 5e-05,
534
+ "loss": 4.1228,
535
+ "step": 75
536
+ },
537
+ {
538
+ "epoch": 0.024719466579931695,
539
+ "grad_norm": 5.989908695220947,
540
+ "learning_rate": 5e-05,
541
+ "loss": 4.0289,
542
+ "step": 76
543
+ },
544
+ {
545
+ "epoch": 0.025044722719141325,
546
+ "grad_norm": 5.118892192840576,
547
+ "learning_rate": 5e-05,
548
+ "loss": 3.8781,
549
+ "step": 77
550
+ },
551
+ {
552
+ "epoch": 0.02536997885835095,
553
+ "grad_norm": 5.232855796813965,
554
+ "learning_rate": 5e-05,
555
+ "loss": 4.2731,
556
+ "step": 78
557
+ },
558
+ {
559
+ "epoch": 0.02569523499756058,
560
+ "grad_norm": 4.9437103271484375,
561
+ "learning_rate": 5e-05,
562
+ "loss": 3.1985,
563
+ "step": 79
564
+ },
565
+ {
566
+ "epoch": 0.026020491136770206,
567
+ "grad_norm": 7.929747581481934,
568
+ "learning_rate": 5e-05,
569
+ "loss": 4.1925,
570
+ "step": 80
571
+ },
572
+ {
573
+ "epoch": 0.026345747275979835,
574
+ "grad_norm": 9.323763847351074,
575
+ "learning_rate": 5e-05,
576
+ "loss": 4.2925,
577
+ "step": 81
578
+ },
579
+ {
580
+ "epoch": 0.02667100341518946,
581
+ "grad_norm": 6.18411111831665,
582
+ "learning_rate": 5e-05,
583
+ "loss": 4.0956,
584
+ "step": 82
585
+ },
586
+ {
587
+ "epoch": 0.02699625955439909,
588
+ "grad_norm": 4.424221038818359,
589
+ "learning_rate": 5e-05,
590
+ "loss": 4.1803,
591
+ "step": 83
592
+ },
593
+ {
594
+ "epoch": 0.027321515693608717,
595
+ "grad_norm": 6.044656276702881,
596
+ "learning_rate": 5e-05,
597
+ "loss": 4.0551,
598
+ "step": 84
599
+ },
600
+ {
601
+ "epoch": 0.027646771832818346,
602
+ "grad_norm": 9.380243301391602,
603
+ "learning_rate": 5e-05,
604
+ "loss": 4.2753,
605
+ "step": 85
606
+ },
607
+ {
608
+ "epoch": 0.027972027972027972,
609
+ "grad_norm": 10.203062057495117,
610
+ "learning_rate": 5e-05,
611
+ "loss": 4.5481,
612
+ "step": 86
613
+ },
614
+ {
615
+ "epoch": 0.028297284111237598,
616
+ "grad_norm": 12.21309757232666,
617
+ "learning_rate": 5e-05,
618
+ "loss": 4.2921,
619
+ "step": 87
620
+ },
621
+ {
622
+ "epoch": 0.028622540250447227,
623
+ "grad_norm": 7.666494369506836,
624
+ "learning_rate": 5e-05,
625
+ "loss": 4.2344,
626
+ "step": 88
627
+ },
628
+ {
629
+ "epoch": 0.028947796389656853,
630
+ "grad_norm": 5.388767719268799,
631
+ "learning_rate": 5e-05,
632
+ "loss": 4.3914,
633
+ "step": 89
634
+ },
635
+ {
636
+ "epoch": 0.029273052528866483,
637
+ "grad_norm": 7.598433971405029,
638
+ "learning_rate": 5e-05,
639
+ "loss": 4.242,
640
+ "step": 90
641
+ },
642
+ {
643
+ "epoch": 0.02959830866807611,
644
+ "grad_norm": 11.387866020202637,
645
+ "learning_rate": 5e-05,
646
+ "loss": 4.0014,
647
+ "step": 91
648
+ },
649
+ {
650
+ "epoch": 0.029923564807285738,
651
+ "grad_norm": 10.232786178588867,
652
+ "learning_rate": 5e-05,
653
+ "loss": 4.025,
654
+ "step": 92
655
+ },
656
+ {
657
+ "epoch": 0.030248820946495364,
658
+ "grad_norm": 9.431289672851562,
659
+ "learning_rate": 5e-05,
660
+ "loss": 4.2743,
661
+ "step": 93
662
+ },
663
+ {
664
+ "epoch": 0.030574077085704993,
665
+ "grad_norm": 6.723931789398193,
666
+ "learning_rate": 5e-05,
667
+ "loss": 4.111,
668
+ "step": 94
669
+ },
670
+ {
671
+ "epoch": 0.03089933322491462,
672
+ "grad_norm": 5.171010971069336,
673
+ "learning_rate": 5e-05,
674
+ "loss": 3.8832,
675
+ "step": 95
676
+ },
677
+ {
678
+ "epoch": 0.03122458936412425,
679
+ "grad_norm": 7.667996883392334,
680
+ "learning_rate": 5e-05,
681
+ "loss": 4.1839,
682
+ "step": 96
683
+ },
684
+ {
685
+ "epoch": 0.031549845503333875,
686
+ "grad_norm": 9.853692054748535,
687
+ "learning_rate": 5e-05,
688
+ "loss": 4.2596,
689
+ "step": 97
690
+ },
691
+ {
692
+ "epoch": 0.0318751016425435,
693
+ "grad_norm": 9.22080135345459,
694
+ "learning_rate": 5e-05,
695
+ "loss": 3.7551,
696
+ "step": 98
697
+ },
698
+ {
699
+ "epoch": 0.03220035778175313,
700
+ "grad_norm": 8.230822563171387,
701
+ "learning_rate": 5e-05,
702
+ "loss": 4.1223,
703
+ "step": 99
704
+ },
705
+ {
706
+ "epoch": 0.03252561392096276,
707
+ "grad_norm": 4.844912052154541,
708
+ "learning_rate": 5e-05,
709
+ "loss": 3.9264,
710
+ "step": 100
711
+ },
712
+ {
713
+ "epoch": 0.032850870060172385,
714
+ "grad_norm": 5.341676712036133,
715
+ "learning_rate": 5e-05,
716
+ "loss": 4.2083,
717
+ "step": 101
718
+ },
719
+ {
720
+ "epoch": 0.03317612619938201,
721
+ "grad_norm": 10.346305847167969,
722
+ "learning_rate": 5e-05,
723
+ "loss": 4.6745,
724
+ "step": 102
725
+ },
726
+ {
727
+ "epoch": 0.033501382338591644,
728
+ "grad_norm": 5.822200298309326,
729
+ "learning_rate": 5e-05,
730
+ "loss": 3.9329,
731
+ "step": 103
732
+ },
733
+ {
734
+ "epoch": 0.03382663847780127,
735
+ "grad_norm": 4.412308692932129,
736
+ "learning_rate": 5e-05,
737
+ "loss": 4.2581,
738
+ "step": 104
739
+ },
740
+ {
741
+ "epoch": 0.034151894617010896,
742
+ "grad_norm": 6.643288612365723,
743
+ "learning_rate": 5e-05,
744
+ "loss": 4.0638,
745
+ "step": 105
746
+ },
747
+ {
748
+ "epoch": 0.03447715075622052,
749
+ "grad_norm": 4.771259784698486,
750
+ "learning_rate": 5e-05,
751
+ "loss": 3.9147,
752
+ "step": 106
753
+ },
754
+ {
755
+ "epoch": 0.034802406895430155,
756
+ "grad_norm": 4.471869945526123,
757
+ "learning_rate": 5e-05,
758
+ "loss": 3.8011,
759
+ "step": 107
760
+ },
761
+ {
762
+ "epoch": 0.03512766303463978,
763
+ "grad_norm": 6.949775218963623,
764
+ "learning_rate": 5e-05,
765
+ "loss": 4.3229,
766
+ "step": 108
767
+ },
768
+ {
769
+ "epoch": 0.035452919173849406,
770
+ "grad_norm": 5.095446586608887,
771
+ "learning_rate": 5e-05,
772
+ "loss": 3.616,
773
+ "step": 109
774
+ },
775
+ {
776
+ "epoch": 0.03577817531305903,
777
+ "grad_norm": 6.592041015625,
778
+ "learning_rate": 5e-05,
779
+ "loss": 3.7697,
780
+ "step": 110
781
+ },
782
+ {
783
+ "epoch": 0.03610343145226866,
784
+ "grad_norm": 7.455766677856445,
785
+ "learning_rate": 5e-05,
786
+ "loss": 4.0375,
787
+ "step": 111
788
+ },
789
+ {
790
+ "epoch": 0.03642868759147829,
791
+ "grad_norm": 4.540219306945801,
792
+ "learning_rate": 5e-05,
793
+ "loss": 3.7951,
794
+ "step": 112
795
+ },
796
+ {
797
+ "epoch": 0.03675394373068792,
798
+ "grad_norm": 5.230220794677734,
799
+ "learning_rate": 5e-05,
800
+ "loss": 4.0304,
801
+ "step": 113
802
+ },
803
+ {
804
+ "epoch": 0.03707919986989754,
805
+ "grad_norm": 5.179874420166016,
806
+ "learning_rate": 5e-05,
807
+ "loss": 4.0191,
808
+ "step": 114
809
+ },
810
+ {
811
+ "epoch": 0.03740445600910717,
812
+ "grad_norm": 6.374222755432129,
813
+ "learning_rate": 5e-05,
814
+ "loss": 4.4934,
815
+ "step": 115
816
+ },
817
+ {
818
+ "epoch": 0.0377297121483168,
819
+ "grad_norm": 6.96058988571167,
820
+ "learning_rate": 5e-05,
821
+ "loss": 4.1446,
822
+ "step": 116
823
+ },
824
+ {
825
+ "epoch": 0.03805496828752643,
826
+ "grad_norm": 6.299279689788818,
827
+ "learning_rate": 5e-05,
828
+ "loss": 3.9077,
829
+ "step": 117
830
+ },
831
+ {
832
+ "epoch": 0.038380224426736054,
833
+ "grad_norm": 6.169437408447266,
834
+ "learning_rate": 5e-05,
835
+ "loss": 4.1457,
836
+ "step": 118
837
+ },
838
+ {
839
+ "epoch": 0.03870548056594568,
840
+ "grad_norm": 5.159611701965332,
841
+ "learning_rate": 5e-05,
842
+ "loss": 3.7067,
843
+ "step": 119
844
+ },
845
+ {
846
+ "epoch": 0.03903073670515531,
847
+ "grad_norm": 6.676630973815918,
848
+ "learning_rate": 5e-05,
849
+ "loss": 4.0493,
850
+ "step": 120
851
+ },
852
+ {
853
+ "epoch": 0.03935599284436494,
854
+ "grad_norm": 6.488524436950684,
855
+ "learning_rate": 5e-05,
856
+ "loss": 4.109,
857
+ "step": 121
858
+ },
859
+ {
860
+ "epoch": 0.039681248983574564,
861
+ "grad_norm": 6.670077800750732,
862
+ "learning_rate": 5e-05,
863
+ "loss": 3.6645,
864
+ "step": 122
865
+ },
866
+ {
867
+ "epoch": 0.04000650512278419,
868
+ "grad_norm": 6.173693656921387,
869
+ "learning_rate": 5e-05,
870
+ "loss": 4.4163,
871
+ "step": 123
872
+ },
873
+ {
874
+ "epoch": 0.04033176126199382,
875
+ "grad_norm": 6.306183815002441,
876
+ "learning_rate": 5e-05,
877
+ "loss": 3.9382,
878
+ "step": 124
879
+ },
880
+ {
881
+ "epoch": 0.04065701740120345,
882
+ "grad_norm": 6.007297039031982,
883
+ "learning_rate": 5e-05,
884
+ "loss": 3.956,
885
+ "step": 125
886
+ },
887
+ {
888
+ "epoch": 0.040982273540413075,
889
+ "grad_norm": 6.0243730545043945,
890
+ "learning_rate": 5e-05,
891
+ "loss": 3.9613,
892
+ "step": 126
893
+ },
894
+ {
895
+ "epoch": 0.0413075296796227,
896
+ "grad_norm": 5.6909871101379395,
897
+ "learning_rate": 5e-05,
898
+ "loss": 3.9213,
899
+ "step": 127
900
+ },
901
+ {
902
+ "epoch": 0.041632785818832334,
903
+ "grad_norm": 5.5652265548706055,
904
+ "learning_rate": 5e-05,
905
+ "loss": 3.9325,
906
+ "step": 128
907
+ },
908
+ {
909
+ "epoch": 0.04195804195804196,
910
+ "grad_norm": 7.6173272132873535,
911
+ "learning_rate": 5e-05,
912
+ "loss": 4.0422,
913
+ "step": 129
914
+ },
915
+ {
916
+ "epoch": 0.042283298097251586,
917
+ "grad_norm": 10.900376319885254,
918
+ "learning_rate": 5e-05,
919
+ "loss": 3.9173,
920
+ "step": 130
921
+ },
922
+ {
923
+ "epoch": 0.04260855423646121,
924
+ "grad_norm": 12.899847984313965,
925
+ "learning_rate": 5e-05,
926
+ "loss": 4.0328,
927
+ "step": 131
928
+ },
929
+ {
930
+ "epoch": 0.04293381037567084,
931
+ "grad_norm": 11.928502082824707,
932
+ "learning_rate": 5e-05,
933
+ "loss": 3.9763,
934
+ "step": 132
935
+ },
936
+ {
937
+ "epoch": 0.04325906651488047,
938
+ "grad_norm": 8.4597749710083,
939
+ "learning_rate": 5e-05,
940
+ "loss": 4.0895,
941
+ "step": 133
942
+ },
943
+ {
944
+ "epoch": 0.043584322654090096,
945
+ "grad_norm": 5.162694931030273,
946
+ "learning_rate": 5e-05,
947
+ "loss": 4.1564,
948
+ "step": 134
949
+ },
950
+ {
951
+ "epoch": 0.04390957879329972,
952
+ "grad_norm": 13.066299438476562,
953
+ "learning_rate": 5e-05,
954
+ "loss": 4.1774,
955
+ "step": 135
956
+ },
957
+ {
958
+ "epoch": 0.04423483493250935,
959
+ "grad_norm": 14.013510704040527,
960
+ "learning_rate": 5e-05,
961
+ "loss": 3.5203,
962
+ "step": 136
963
+ },
964
+ {
965
+ "epoch": 0.04456009107171898,
966
+ "grad_norm": 15.885542869567871,
967
+ "learning_rate": 5e-05,
968
+ "loss": 4.1405,
969
+ "step": 137
970
+ },
971
+ {
972
+ "epoch": 0.04488534721092861,
973
+ "grad_norm": 7.15226411819458,
974
+ "learning_rate": 5e-05,
975
+ "loss": 4.043,
976
+ "step": 138
977
+ },
978
+ {
979
+ "epoch": 0.04521060335013823,
980
+ "grad_norm": 5.1085686683654785,
981
+ "learning_rate": 5e-05,
982
+ "loss": 3.7764,
983
+ "step": 139
984
+ },
985
+ {
986
+ "epoch": 0.04553585948934786,
987
+ "grad_norm": 6.9343390464782715,
988
+ "learning_rate": 5e-05,
989
+ "loss": 4.2056,
990
+ "step": 140
991
+ },
992
+ {
993
+ "epoch": 0.04586111562855749,
994
+ "grad_norm": 11.624869346618652,
995
+ "learning_rate": 5e-05,
996
+ "loss": 3.7097,
997
+ "step": 141
998
+ },
999
+ {
1000
+ "epoch": 0.04618637176776712,
1001
+ "grad_norm": 12.546487808227539,
1002
+ "learning_rate": 5e-05,
1003
+ "loss": 4.2067,
1004
+ "step": 142
1005
+ },
1006
+ {
1007
+ "epoch": 0.046511627906976744,
1008
+ "grad_norm": 5.573001861572266,
1009
+ "learning_rate": 5e-05,
1010
+ "loss": 3.6916,
1011
+ "step": 143
1012
+ },
1013
+ {
1014
+ "epoch": 0.04683688404618637,
1015
+ "grad_norm": 6.0869140625,
1016
+ "learning_rate": 5e-05,
1017
+ "loss": 4.1239,
1018
+ "step": 144
1019
+ },
1020
+ {
1021
+ "epoch": 0.047162140185396,
1022
+ "grad_norm": 7.625162124633789,
1023
+ "learning_rate": 5e-05,
1024
+ "loss": 3.9154,
1025
+ "step": 145
1026
+ },
1027
+ {
1028
+ "epoch": 0.04748739632460563,
1029
+ "grad_norm": 5.8476762771606445,
1030
+ "learning_rate": 5e-05,
1031
+ "loss": 3.9673,
1032
+ "step": 146
1033
+ },
1034
+ {
1035
+ "epoch": 0.047812652463815254,
1036
+ "grad_norm": 6.097865104675293,
1037
+ "learning_rate": 5e-05,
1038
+ "loss": 3.994,
1039
+ "step": 147
1040
+ },
1041
+ {
1042
+ "epoch": 0.04813790860302488,
1043
+ "grad_norm": 5.200497150421143,
1044
+ "learning_rate": 5e-05,
1045
+ "loss": 4.1332,
1046
+ "step": 148
1047
+ },
1048
+ {
1049
+ "epoch": 0.04846316474223451,
1050
+ "grad_norm": 8.92606258392334,
1051
+ "learning_rate": 5e-05,
1052
+ "loss": 4.1005,
1053
+ "step": 149
1054
+ },
1055
+ {
1056
+ "epoch": 0.04878842088144414,
1057
+ "grad_norm": 5.433960437774658,
1058
+ "learning_rate": 5e-05,
1059
+ "loss": 4.099,
1060
+ "step": 150
1061
+ },
1062
+ {
1063
+ "epoch": 0.049113677020653765,
1064
+ "grad_norm": 4.350966453552246,
1065
+ "learning_rate": 5e-05,
1066
+ "loss": 3.7492,
1067
+ "step": 151
1068
+ },
1069
+ {
1070
+ "epoch": 0.04943893315986339,
1071
+ "grad_norm": 8.3677978515625,
1072
+ "learning_rate": 5e-05,
1073
+ "loss": 3.8048,
1074
+ "step": 152
1075
+ },
1076
+ {
1077
+ "epoch": 0.04976418929907302,
1078
+ "grad_norm": 10.74728012084961,
1079
+ "learning_rate": 5e-05,
1080
+ "loss": 4.3122,
1081
+ "step": 153
1082
+ },
1083
+ {
1084
+ "epoch": 0.05008944543828265,
1085
+ "grad_norm": 8.89576530456543,
1086
+ "learning_rate": 5e-05,
1087
+ "loss": 4.432,
1088
+ "step": 154
1089
+ },
1090
+ {
1091
+ "epoch": 0.050414701577492275,
1092
+ "grad_norm": 6.710874080657959,
1093
+ "learning_rate": 5e-05,
1094
+ "loss": 3.9353,
1095
+ "step": 155
1096
+ },
1097
+ {
1098
+ "epoch": 0.0507399577167019,
1099
+ "grad_norm": 10.725092887878418,
1100
+ "learning_rate": 5e-05,
1101
+ "loss": 4.2776,
1102
+ "step": 156
1103
+ },
1104
+ {
1105
+ "epoch": 0.05106521385591153,
1106
+ "grad_norm": 11.6733980178833,
1107
+ "learning_rate": 5e-05,
1108
+ "loss": 4.1354,
1109
+ "step": 157
1110
+ },
1111
+ {
1112
+ "epoch": 0.05139046999512116,
1113
+ "grad_norm": 9.846784591674805,
1114
+ "learning_rate": 5e-05,
1115
+ "loss": 3.9646,
1116
+ "step": 158
1117
+ },
1118
+ {
1119
+ "epoch": 0.051715726134330786,
1120
+ "grad_norm": 8.646893501281738,
1121
+ "learning_rate": 5e-05,
1122
+ "loss": 3.9413,
1123
+ "step": 159
1124
+ },
1125
+ {
1126
+ "epoch": 0.05204098227354041,
1127
+ "grad_norm": 7.0959062576293945,
1128
+ "learning_rate": 5e-05,
1129
+ "loss": 4.0745,
1130
+ "step": 160
1131
+ },
1132
+ {
1133
+ "epoch": 0.05236623841275004,
1134
+ "grad_norm": 7.378256797790527,
1135
+ "learning_rate": 5e-05,
1136
+ "loss": 4.1712,
1137
+ "step": 161
1138
+ },
1139
+ {
1140
+ "epoch": 0.05269149455195967,
1141
+ "grad_norm": 7.3625640869140625,
1142
+ "learning_rate": 5e-05,
1143
+ "loss": 3.9466,
1144
+ "step": 162
1145
+ },
1146
+ {
1147
+ "epoch": 0.0530167506911693,
1148
+ "grad_norm": 4.918821811676025,
1149
+ "learning_rate": 5e-05,
1150
+ "loss": 3.8243,
1151
+ "step": 163
1152
+ },
1153
+ {
1154
+ "epoch": 0.05334200683037892,
1155
+ "grad_norm": 6.653145790100098,
1156
+ "learning_rate": 5e-05,
1157
+ "loss": 3.8895,
1158
+ "step": 164
1159
+ },
1160
+ {
1161
+ "epoch": 0.05366726296958855,
1162
+ "grad_norm": 8.699049949645996,
1163
+ "learning_rate": 5e-05,
1164
+ "loss": 4.1705,
1165
+ "step": 165
1166
+ },
1167
+ {
1168
+ "epoch": 0.05399251910879818,
1169
+ "grad_norm": 7.855594158172607,
1170
+ "learning_rate": 5e-05,
1171
+ "loss": 3.6343,
1172
+ "step": 166
1173
+ },
1174
+ {
1175
+ "epoch": 0.05431777524800781,
1176
+ "grad_norm": 5.597055435180664,
1177
+ "learning_rate": 5e-05,
1178
+ "loss": 3.7043,
1179
+ "step": 167
1180
+ },
1181
+ {
1182
+ "epoch": 0.05464303138721743,
1183
+ "grad_norm": 9.524121284484863,
1184
+ "learning_rate": 5e-05,
1185
+ "loss": 3.8942,
1186
+ "step": 168
1187
+ },
1188
+ {
1189
+ "epoch": 0.05496828752642706,
1190
+ "grad_norm": 5.389048099517822,
1191
+ "learning_rate": 5e-05,
1192
+ "loss": 3.4256,
1193
+ "step": 169
1194
+ },
1195
+ {
1196
+ "epoch": 0.05529354366563669,
1197
+ "grad_norm": 5.568866729736328,
1198
+ "learning_rate": 5e-05,
1199
+ "loss": 3.9784,
1200
+ "step": 170
1201
+ },
1202
+ {
1203
+ "epoch": 0.05561879980484632,
1204
+ "grad_norm": 12.199308395385742,
1205
+ "learning_rate": 5e-05,
1206
+ "loss": 3.7894,
1207
+ "step": 171
1208
+ },
1209
+ {
1210
+ "epoch": 0.055944055944055944,
1211
+ "grad_norm": 15.115793228149414,
1212
+ "learning_rate": 5e-05,
1213
+ "loss": 3.6162,
1214
+ "step": 172
1215
+ },
1216
+ {
1217
+ "epoch": 0.05626931208326557,
1218
+ "grad_norm": 5.6520538330078125,
1219
+ "learning_rate": 5e-05,
1220
+ "loss": 3.8633,
1221
+ "step": 173
1222
+ },
1223
+ {
1224
+ "epoch": 0.056594568222475196,
1225
+ "grad_norm": 5.969440937042236,
1226
+ "learning_rate": 5e-05,
1227
+ "loss": 4.0392,
1228
+ "step": 174
1229
+ },
1230
+ {
1231
+ "epoch": 0.05691982436168483,
1232
+ "grad_norm": 8.82860279083252,
1233
+ "learning_rate": 5e-05,
1234
+ "loss": 4.1596,
1235
+ "step": 175
1236
+ },
1237
+ {
1238
+ "epoch": 0.057245080500894455,
1239
+ "grad_norm": 8.133511543273926,
1240
+ "learning_rate": 5e-05,
1241
+ "loss": 4.3075,
1242
+ "step": 176
1243
+ },
1244
+ {
1245
+ "epoch": 0.05757033664010408,
1246
+ "grad_norm": 5.794802665710449,
1247
+ "learning_rate": 5e-05,
1248
+ "loss": 3.7082,
1249
+ "step": 177
1250
+ },
1251
+ {
1252
+ "epoch": 0.057895592779313707,
1253
+ "grad_norm": 6.0018744468688965,
1254
+ "learning_rate": 5e-05,
1255
+ "loss": 3.67,
1256
+ "step": 178
1257
+ },
1258
+ {
1259
+ "epoch": 0.05822084891852334,
1260
+ "grad_norm": 9.123400688171387,
1261
+ "learning_rate": 5e-05,
1262
+ "loss": 3.6293,
1263
+ "step": 179
1264
+ },
1265
+ {
1266
+ "epoch": 0.058546105057732965,
1267
+ "grad_norm": 12.262410163879395,
1268
+ "learning_rate": 5e-05,
1269
+ "loss": 4.0337,
1270
+ "step": 180
1271
+ },
1272
+ {
1273
+ "epoch": 0.05887136119694259,
1274
+ "grad_norm": 5.367374897003174,
1275
+ "learning_rate": 5e-05,
1276
+ "loss": 3.7243,
1277
+ "step": 181
1278
+ },
1279
+ {
1280
+ "epoch": 0.05919661733615222,
1281
+ "grad_norm": 5.942975997924805,
1282
+ "learning_rate": 5e-05,
1283
+ "loss": 3.691,
1284
+ "step": 182
1285
+ },
1286
+ {
1287
+ "epoch": 0.05952187347536185,
1288
+ "grad_norm": 5.772192001342773,
1289
+ "learning_rate": 5e-05,
1290
+ "loss": 3.8556,
1291
+ "step": 183
1292
+ },
1293
+ {
1294
+ "epoch": 0.059847129614571476,
1295
+ "grad_norm": 6.091885566711426,
1296
+ "learning_rate": 5e-05,
1297
+ "loss": 3.8396,
1298
+ "step": 184
1299
+ },
1300
+ {
1301
+ "epoch": 0.0601723857537811,
1302
+ "grad_norm": 6.4458231925964355,
1303
+ "learning_rate": 5e-05,
1304
+ "loss": 4.3947,
1305
+ "step": 185
1306
+ },
1307
+ {
1308
+ "epoch": 0.06049764189299073,
1309
+ "grad_norm": 6.378884315490723,
1310
+ "learning_rate": 5e-05,
1311
+ "loss": 4.375,
1312
+ "step": 186
1313
+ },
1314
+ {
1315
+ "epoch": 0.06082289803220036,
1316
+ "grad_norm": 7.179290294647217,
1317
+ "learning_rate": 5e-05,
1318
+ "loss": 3.6237,
1319
+ "step": 187
1320
+ },
1321
+ {
1322
+ "epoch": 0.06114815417140999,
1323
+ "grad_norm": 5.786200046539307,
1324
+ "learning_rate": 5e-05,
1325
+ "loss": 4.2066,
1326
+ "step": 188
1327
+ },
1328
+ {
1329
+ "epoch": 0.06147341031061961,
1330
+ "grad_norm": 5.186939239501953,
1331
+ "learning_rate": 5e-05,
1332
+ "loss": 3.9935,
1333
+ "step": 189
1334
+ },
1335
+ {
1336
+ "epoch": 0.06179866644982924,
1337
+ "grad_norm": 4.02333402633667,
1338
+ "learning_rate": 5e-05,
1339
+ "loss": 3.6273,
1340
+ "step": 190
1341
+ },
1342
+ {
1343
+ "epoch": 0.06212392258903887,
1344
+ "grad_norm": 6.622715473175049,
1345
+ "learning_rate": 5e-05,
1346
+ "loss": 4.2028,
1347
+ "step": 191
1348
+ },
1349
+ {
1350
+ "epoch": 0.0624491787282485,
1351
+ "grad_norm": 9.464071273803711,
1352
+ "learning_rate": 5e-05,
1353
+ "loss": 4.0506,
1354
+ "step": 192
1355
+ },
1356
+ {
1357
+ "epoch": 0.06277443486745812,
1358
+ "grad_norm": 6.995242595672607,
1359
+ "learning_rate": 5e-05,
1360
+ "loss": 4.1047,
1361
+ "step": 193
1362
+ },
1363
+ {
1364
+ "epoch": 0.06309969100666775,
1365
+ "grad_norm": 9.435445785522461,
1366
+ "learning_rate": 5e-05,
1367
+ "loss": 3.8718,
1368
+ "step": 194
1369
+ },
1370
+ {
1371
+ "epoch": 0.06342494714587738,
1372
+ "grad_norm": 7.273919582366943,
1373
+ "learning_rate": 5e-05,
1374
+ "loss": 3.9176,
1375
+ "step": 195
1376
+ },
1377
+ {
1378
+ "epoch": 0.063750203285087,
1379
+ "grad_norm": 6.896090030670166,
1380
+ "learning_rate": 5e-05,
1381
+ "loss": 3.8408,
1382
+ "step": 196
1383
+ },
1384
+ {
1385
+ "epoch": 0.06407545942429663,
1386
+ "grad_norm": 7.282253265380859,
1387
+ "learning_rate": 5e-05,
1388
+ "loss": 3.6903,
1389
+ "step": 197
1390
+ },
1391
+ {
1392
+ "epoch": 0.06440071556350627,
1393
+ "grad_norm": 9.39031982421875,
1394
+ "learning_rate": 5e-05,
1395
+ "loss": 3.9139,
1396
+ "step": 198
1397
+ },
1398
+ {
1399
+ "epoch": 0.06472597170271589,
1400
+ "grad_norm": 7.485379695892334,
1401
+ "learning_rate": 5e-05,
1402
+ "loss": 3.8522,
1403
+ "step": 199
1404
+ },
1405
+ {
1406
+ "epoch": 0.06505122784192552,
1407
+ "grad_norm": 7.848803997039795,
1408
+ "learning_rate": 5e-05,
1409
+ "loss": 3.879,
1410
+ "step": 200
1411
+ },
1412
+ {
1413
+ "epoch": 0.06537648398113514,
1414
+ "grad_norm": 7.829058647155762,
1415
+ "learning_rate": 5e-05,
1416
+ "loss": 4.0283,
1417
+ "step": 201
1418
+ },
1419
+ {
1420
+ "epoch": 0.06570174012034477,
1421
+ "grad_norm": 8.984028816223145,
1422
+ "learning_rate": 5e-05,
1423
+ "loss": 3.8663,
1424
+ "step": 202
1425
+ },
1426
+ {
1427
+ "epoch": 0.0660269962595544,
1428
+ "grad_norm": 7.604732513427734,
1429
+ "learning_rate": 5e-05,
1430
+ "loss": 3.7871,
1431
+ "step": 203
1432
+ },
1433
+ {
1434
+ "epoch": 0.06635225239876402,
1435
+ "grad_norm": 6.779748916625977,
1436
+ "learning_rate": 5e-05,
1437
+ "loss": 3.9423,
1438
+ "step": 204
1439
+ },
1440
+ {
1441
+ "epoch": 0.06667750853797365,
1442
+ "grad_norm": 8.93659782409668,
1443
+ "learning_rate": 5e-05,
1444
+ "loss": 4.0208,
1445
+ "step": 205
1446
+ },
1447
+ {
1448
+ "epoch": 0.06700276467718329,
1449
+ "grad_norm": 6.093626022338867,
1450
+ "learning_rate": 5e-05,
1451
+ "loss": 3.8828,
1452
+ "step": 206
1453
+ },
1454
+ {
1455
+ "epoch": 0.06732802081639291,
1456
+ "grad_norm": 6.105995178222656,
1457
+ "learning_rate": 5e-05,
1458
+ "loss": 4.0983,
1459
+ "step": 207
1460
+ },
1461
+ {
1462
+ "epoch": 0.06765327695560254,
1463
+ "grad_norm": 7.379316329956055,
1464
+ "learning_rate": 5e-05,
1465
+ "loss": 4.0493,
1466
+ "step": 208
1467
+ },
1468
+ {
1469
+ "epoch": 0.06797853309481217,
1470
+ "grad_norm": 6.404873847961426,
1471
+ "learning_rate": 5e-05,
1472
+ "loss": 3.9271,
1473
+ "step": 209
1474
+ },
1475
+ {
1476
+ "epoch": 0.06830378923402179,
1477
+ "grad_norm": 7.560967445373535,
1478
+ "learning_rate": 5e-05,
1479
+ "loss": 3.8633,
1480
+ "step": 210
1481
+ },
1482
+ {
1483
+ "epoch": 0.06862904537323142,
1484
+ "grad_norm": 6.042522430419922,
1485
+ "learning_rate": 5e-05,
1486
+ "loss": 3.7389,
1487
+ "step": 211
1488
+ },
1489
+ {
1490
+ "epoch": 0.06895430151244104,
1491
+ "grad_norm": 6.4881367683410645,
1492
+ "learning_rate": 5e-05,
1493
+ "loss": 3.8196,
1494
+ "step": 212
1495
+ },
1496
+ {
1497
+ "epoch": 0.06927955765165067,
1498
+ "grad_norm": 6.52613639831543,
1499
+ "learning_rate": 5e-05,
1500
+ "loss": 3.7737,
1501
+ "step": 213
1502
+ },
1503
+ {
1504
+ "epoch": 0.06960481379086031,
1505
+ "grad_norm": 4.999444007873535,
1506
+ "learning_rate": 5e-05,
1507
+ "loss": 3.8722,
1508
+ "step": 214
1509
+ },
1510
+ {
1511
+ "epoch": 0.06993006993006994,
1512
+ "grad_norm": 7.060845851898193,
1513
+ "learning_rate": 5e-05,
1514
+ "loss": 3.774,
1515
+ "step": 215
1516
+ },
1517
+ {
1518
+ "epoch": 0.07025532606927956,
1519
+ "grad_norm": 8.545415878295898,
1520
+ "learning_rate": 5e-05,
1521
+ "loss": 3.8479,
1522
+ "step": 216
1523
+ },
1524
+ {
1525
+ "epoch": 0.07058058220848919,
1526
+ "grad_norm": 7.625663757324219,
1527
+ "learning_rate": 5e-05,
1528
+ "loss": 3.5068,
1529
+ "step": 217
1530
+ },
1531
+ {
1532
+ "epoch": 0.07090583834769881,
1533
+ "grad_norm": 7.191437244415283,
1534
+ "learning_rate": 5e-05,
1535
+ "loss": 3.7885,
1536
+ "step": 218
1537
+ },
1538
+ {
1539
+ "epoch": 0.07123109448690844,
1540
+ "grad_norm": 7.386499881744385,
1541
+ "learning_rate": 5e-05,
1542
+ "loss": 3.9494,
1543
+ "step": 219
1544
+ },
1545
+ {
1546
+ "epoch": 0.07155635062611806,
1547
+ "grad_norm": 5.616601943969727,
1548
+ "learning_rate": 5e-05,
1549
+ "loss": 3.8093,
1550
+ "step": 220
1551
+ },
1552
+ {
1553
+ "epoch": 0.07188160676532769,
1554
+ "grad_norm": 6.17822265625,
1555
+ "learning_rate": 5e-05,
1556
+ "loss": 3.8981,
1557
+ "step": 221
1558
+ },
1559
+ {
1560
+ "epoch": 0.07220686290453732,
1561
+ "grad_norm": 6.138426303863525,
1562
+ "learning_rate": 5e-05,
1563
+ "loss": 4.0091,
1564
+ "step": 222
1565
+ },
1566
+ {
1567
+ "epoch": 0.07253211904374696,
1568
+ "grad_norm": 6.6297831535339355,
1569
+ "learning_rate": 5e-05,
1570
+ "loss": 3.9291,
1571
+ "step": 223
1572
+ },
1573
+ {
1574
+ "epoch": 0.07285737518295658,
1575
+ "grad_norm": 6.557385444641113,
1576
+ "learning_rate": 5e-05,
1577
+ "loss": 3.8338,
1578
+ "step": 224
1579
+ },
1580
+ {
1581
+ "epoch": 0.07318263132216621,
1582
+ "grad_norm": 6.9579291343688965,
1583
+ "learning_rate": 5e-05,
1584
+ "loss": 3.8158,
1585
+ "step": 225
1586
+ },
1587
+ {
1588
+ "epoch": 0.07350788746137583,
1589
+ "grad_norm": 7.129207611083984,
1590
+ "learning_rate": 5e-05,
1591
+ "loss": 4.1731,
1592
+ "step": 226
1593
+ },
1594
+ {
1595
+ "epoch": 0.07383314360058546,
1596
+ "grad_norm": 6.645360946655273,
1597
+ "learning_rate": 5e-05,
1598
+ "loss": 3.6926,
1599
+ "step": 227
1600
+ },
1601
+ {
1602
+ "epoch": 0.07415839973979509,
1603
+ "grad_norm": 8.101895332336426,
1604
+ "learning_rate": 5e-05,
1605
+ "loss": 4.3064,
1606
+ "step": 228
1607
+ },
1608
+ {
1609
+ "epoch": 0.07448365587900471,
1610
+ "grad_norm": 7.812802791595459,
1611
+ "learning_rate": 5e-05,
1612
+ "loss": 3.7983,
1613
+ "step": 229
1614
+ },
1615
+ {
1616
+ "epoch": 0.07480891201821434,
1617
+ "grad_norm": 7.278988838195801,
1618
+ "learning_rate": 5e-05,
1619
+ "loss": 3.9539,
1620
+ "step": 230
1621
+ },
1622
+ {
1623
+ "epoch": 0.07513416815742398,
1624
+ "grad_norm": 7.909803867340088,
1625
+ "learning_rate": 5e-05,
1626
+ "loss": 3.5888,
1627
+ "step": 231
1628
+ },
1629
+ {
1630
+ "epoch": 0.0754594242966336,
1631
+ "grad_norm": 5.668457984924316,
1632
+ "learning_rate": 5e-05,
1633
+ "loss": 3.8219,
1634
+ "step": 232
1635
+ },
1636
+ {
1637
+ "epoch": 0.07578468043584323,
1638
+ "grad_norm": 6.159639358520508,
1639
+ "learning_rate": 5e-05,
1640
+ "loss": 4.0184,
1641
+ "step": 233
1642
+ },
1643
+ {
1644
+ "epoch": 0.07610993657505286,
1645
+ "grad_norm": 6.18869161605835,
1646
+ "learning_rate": 5e-05,
1647
+ "loss": 3.7595,
1648
+ "step": 234
1649
+ },
1650
+ {
1651
+ "epoch": 0.07643519271426248,
1652
+ "grad_norm": 5.471868991851807,
1653
+ "learning_rate": 5e-05,
1654
+ "loss": 3.9409,
1655
+ "step": 235
1656
+ },
1657
+ {
1658
+ "epoch": 0.07676044885347211,
1659
+ "grad_norm": 7.921130180358887,
1660
+ "learning_rate": 5e-05,
1661
+ "loss": 4.1452,
1662
+ "step": 236
1663
+ },
1664
+ {
1665
+ "epoch": 0.07708570499268173,
1666
+ "grad_norm": 6.49941349029541,
1667
+ "learning_rate": 5e-05,
1668
+ "loss": 3.8261,
1669
+ "step": 237
1670
+ },
1671
+ {
1672
+ "epoch": 0.07741096113189136,
1673
+ "grad_norm": 10.190372467041016,
1674
+ "learning_rate": 5e-05,
1675
+ "loss": 3.6134,
1676
+ "step": 238
1677
+ },
1678
+ {
1679
+ "epoch": 0.07773621727110099,
1680
+ "grad_norm": 5.995229244232178,
1681
+ "learning_rate": 5e-05,
1682
+ "loss": 3.7563,
1683
+ "step": 239
1684
+ },
1685
+ {
1686
+ "epoch": 0.07806147341031063,
1687
+ "grad_norm": 8.94497299194336,
1688
+ "learning_rate": 5e-05,
1689
+ "loss": 3.5379,
1690
+ "step": 240
1691
+ },
1692
+ {
1693
+ "epoch": 0.07838672954952025,
1694
+ "grad_norm": 10.990089416503906,
1695
+ "learning_rate": 5e-05,
1696
+ "loss": 3.9176,
1697
+ "step": 241
1698
+ },
1699
+ {
1700
+ "epoch": 0.07871198568872988,
1701
+ "grad_norm": 7.899653434753418,
1702
+ "learning_rate": 5e-05,
1703
+ "loss": 3.7961,
1704
+ "step": 242
1705
+ },
1706
+ {
1707
+ "epoch": 0.0790372418279395,
1708
+ "grad_norm": 7.264082908630371,
1709
+ "learning_rate": 5e-05,
1710
+ "loss": 3.5957,
1711
+ "step": 243
1712
+ },
1713
+ {
1714
+ "epoch": 0.07936249796714913,
1715
+ "grad_norm": 5.855433940887451,
1716
+ "learning_rate": 5e-05,
1717
+ "loss": 3.5913,
1718
+ "step": 244
1719
+ },
1720
+ {
1721
+ "epoch": 0.07968775410635875,
1722
+ "grad_norm": 6.854794979095459,
1723
+ "learning_rate": 5e-05,
1724
+ "loss": 3.6167,
1725
+ "step": 245
1726
+ },
1727
+ {
1728
+ "epoch": 0.08001301024556838,
1729
+ "grad_norm": 7.06243896484375,
1730
+ "learning_rate": 5e-05,
1731
+ "loss": 3.7909,
1732
+ "step": 246
1733
+ },
1734
+ {
1735
+ "epoch": 0.080338266384778,
1736
+ "grad_norm": 8.033863067626953,
1737
+ "learning_rate": 5e-05,
1738
+ "loss": 4.046,
1739
+ "step": 247
1740
+ },
1741
+ {
1742
+ "epoch": 0.08066352252398765,
1743
+ "grad_norm": 6.078402519226074,
1744
+ "learning_rate": 5e-05,
1745
+ "loss": 3.3269,
1746
+ "step": 248
1747
+ },
1748
+ {
1749
+ "epoch": 0.08098877866319727,
1750
+ "grad_norm": 9.511942863464355,
1751
+ "learning_rate": 5e-05,
1752
+ "loss": 3.5749,
1753
+ "step": 249
1754
+ },
1755
+ {
1756
+ "epoch": 0.0813140348024069,
1757
+ "grad_norm": 9.74225902557373,
1758
+ "learning_rate": 5e-05,
1759
+ "loss": 3.9162,
1760
+ "step": 250
1761
+ },
1762
+ {
1763
+ "epoch": 0.08163929094161652,
1764
+ "grad_norm": 6.432509422302246,
1765
+ "learning_rate": 5e-05,
1766
+ "loss": 3.8754,
1767
+ "step": 251
1768
+ },
1769
+ {
1770
+ "epoch": 0.08196454708082615,
1771
+ "grad_norm": 7.885379314422607,
1772
+ "learning_rate": 5e-05,
1773
+ "loss": 3.7214,
1774
+ "step": 252
1775
+ },
1776
+ {
1777
+ "epoch": 0.08228980322003578,
1778
+ "grad_norm": 11.552560806274414,
1779
+ "learning_rate": 5e-05,
1780
+ "loss": 3.7367,
1781
+ "step": 253
1782
+ },
1783
+ {
1784
+ "epoch": 0.0826150593592454,
1785
+ "grad_norm": 9.054500579833984,
1786
+ "learning_rate": 5e-05,
1787
+ "loss": 3.5476,
1788
+ "step": 254
1789
+ },
1790
+ {
1791
+ "epoch": 0.08294031549845503,
1792
+ "grad_norm": 5.916128635406494,
1793
+ "learning_rate": 5e-05,
1794
+ "loss": 3.695,
1795
+ "step": 255
1796
+ },
1797
+ {
1798
+ "epoch": 0.08326557163766467,
1799
+ "grad_norm": 10.65311050415039,
1800
+ "learning_rate": 5e-05,
1801
+ "loss": 4.1597,
1802
+ "step": 256
1803
+ },
1804
+ {
1805
+ "epoch": 0.0835908277768743,
1806
+ "grad_norm": 13.63244342803955,
1807
+ "learning_rate": 5e-05,
1808
+ "loss": 3.5796,
1809
+ "step": 257
1810
+ },
1811
+ {
1812
+ "epoch": 0.08391608391608392,
1813
+ "grad_norm": 10.830595970153809,
1814
+ "learning_rate": 5e-05,
1815
+ "loss": 3.4825,
1816
+ "step": 258
1817
+ },
1818
+ {
1819
+ "epoch": 0.08424134005529355,
1820
+ "grad_norm": 5.9953718185424805,
1821
+ "learning_rate": 5e-05,
1822
+ "loss": 3.792,
1823
+ "step": 259
1824
+ },
1825
+ {
1826
+ "epoch": 0.08456659619450317,
1827
+ "grad_norm": 12.76282787322998,
1828
+ "learning_rate": 5e-05,
1829
+ "loss": 3.5532,
1830
+ "step": 260
1831
+ },
1832
+ {
1833
+ "epoch": 0.0848918523337128,
1834
+ "grad_norm": 18.605255126953125,
1835
+ "learning_rate": 5e-05,
1836
+ "loss": 3.7788,
1837
+ "step": 261
1838
+ },
1839
+ {
1840
+ "epoch": 0.08521710847292242,
1841
+ "grad_norm": 12.753776550292969,
1842
+ "learning_rate": 5e-05,
1843
+ "loss": 3.7542,
1844
+ "step": 262
1845
+ },
1846
+ {
1847
+ "epoch": 0.08554236461213205,
1848
+ "grad_norm": 7.8098673820495605,
1849
+ "learning_rate": 5e-05,
1850
+ "loss": 4.4529,
1851
+ "step": 263
1852
+ },
1853
+ {
1854
+ "epoch": 0.08586762075134168,
1855
+ "grad_norm": 9.642732620239258,
1856
+ "learning_rate": 5e-05,
1857
+ "loss": 3.8515,
1858
+ "step": 264
1859
+ },
1860
+ {
1861
+ "epoch": 0.08619287689055131,
1862
+ "grad_norm": 5.820125102996826,
1863
+ "learning_rate": 5e-05,
1864
+ "loss": 4.3286,
1865
+ "step": 265
1866
+ },
1867
+ {
1868
+ "epoch": 0.08651813302976094,
1869
+ "grad_norm": 9.613585472106934,
1870
+ "learning_rate": 5e-05,
1871
+ "loss": 3.9569,
1872
+ "step": 266
1873
+ },
1874
+ {
1875
+ "epoch": 0.08684338916897057,
1876
+ "grad_norm": 9.211997985839844,
1877
+ "learning_rate": 5e-05,
1878
+ "loss": 3.9244,
1879
+ "step": 267
1880
+ },
1881
+ {
1882
+ "epoch": 0.08716864530818019,
1883
+ "grad_norm": 6.351746559143066,
1884
+ "learning_rate": 5e-05,
1885
+ "loss": 3.6584,
1886
+ "step": 268
1887
+ },
1888
+ {
1889
+ "epoch": 0.08749390144738982,
1890
+ "grad_norm": 6.802426815032959,
1891
+ "learning_rate": 5e-05,
1892
+ "loss": 3.8361,
1893
+ "step": 269
1894
+ },
1895
+ {
1896
+ "epoch": 0.08781915758659944,
1897
+ "grad_norm": 7.498976707458496,
1898
+ "learning_rate": 5e-05,
1899
+ "loss": 3.6589,
1900
+ "step": 270
1901
+ },
1902
+ {
1903
+ "epoch": 0.08814441372580907,
1904
+ "grad_norm": 6.058091163635254,
1905
+ "learning_rate": 5e-05,
1906
+ "loss": 3.6699,
1907
+ "step": 271
1908
+ },
1909
+ {
1910
+ "epoch": 0.0884696698650187,
1911
+ "grad_norm": 5.347617149353027,
1912
+ "learning_rate": 5e-05,
1913
+ "loss": 3.586,
1914
+ "step": 272
1915
+ },
1916
+ {
1917
+ "epoch": 0.08879492600422834,
1918
+ "grad_norm": 6.720355033874512,
1919
+ "learning_rate": 5e-05,
1920
+ "loss": 3.9508,
1921
+ "step": 273
1922
+ },
1923
+ {
1924
+ "epoch": 0.08912018214343796,
1925
+ "grad_norm": 5.8187174797058105,
1926
+ "learning_rate": 5e-05,
1927
+ "loss": 4.1922,
1928
+ "step": 274
1929
+ },
1930
+ {
1931
+ "epoch": 0.08944543828264759,
1932
+ "grad_norm": 4.981271743774414,
1933
+ "learning_rate": 5e-05,
1934
+ "loss": 3.5997,
1935
+ "step": 275
1936
+ },
1937
+ {
1938
+ "epoch": 0.08977069442185721,
1939
+ "grad_norm": 6.238826751708984,
1940
+ "learning_rate": 5e-05,
1941
+ "loss": 3.6929,
1942
+ "step": 276
1943
+ },
1944
+ {
1945
+ "epoch": 0.09009595056106684,
1946
+ "grad_norm": 7.073620796203613,
1947
+ "learning_rate": 5e-05,
1948
+ "loss": 3.7813,
1949
+ "step": 277
1950
+ },
1951
+ {
1952
+ "epoch": 0.09042120670027647,
1953
+ "grad_norm": 7.445998668670654,
1954
+ "learning_rate": 5e-05,
1955
+ "loss": 3.5475,
1956
+ "step": 278
1957
+ },
1958
+ {
1959
+ "epoch": 0.09074646283948609,
1960
+ "grad_norm": 6.120103359222412,
1961
+ "learning_rate": 5e-05,
1962
+ "loss": 3.7499,
1963
+ "step": 279
1964
+ },
1965
+ {
1966
+ "epoch": 0.09107171897869572,
1967
+ "grad_norm": 8.138337135314941,
1968
+ "learning_rate": 5e-05,
1969
+ "loss": 3.8753,
1970
+ "step": 280
1971
+ },
1972
+ {
1973
+ "epoch": 0.09139697511790534,
1974
+ "grad_norm": 7.516664981842041,
1975
+ "learning_rate": 5e-05,
1976
+ "loss": 3.6093,
1977
+ "step": 281
1978
+ },
1979
+ {
1980
+ "epoch": 0.09172223125711498,
1981
+ "grad_norm": 5.630866050720215,
1982
+ "learning_rate": 5e-05,
1983
+ "loss": 3.7269,
1984
+ "step": 282
1985
+ },
1986
+ {
1987
+ "epoch": 0.09204748739632461,
1988
+ "grad_norm": 5.958463668823242,
1989
+ "learning_rate": 5e-05,
1990
+ "loss": 3.7448,
1991
+ "step": 283
1992
+ },
1993
+ {
1994
+ "epoch": 0.09237274353553424,
1995
+ "grad_norm": 6.990318298339844,
1996
+ "learning_rate": 5e-05,
1997
+ "loss": 3.6994,
1998
+ "step": 284
1999
+ },
2000
+ {
2001
+ "epoch": 0.09269799967474386,
2002
+ "grad_norm": 6.364505767822266,
2003
+ "learning_rate": 5e-05,
2004
+ "loss": 3.5316,
2005
+ "step": 285
2006
+ },
2007
+ {
2008
+ "epoch": 0.09302325581395349,
2009
+ "grad_norm": 6.308237552642822,
2010
+ "learning_rate": 5e-05,
2011
+ "loss": 4.1899,
2012
+ "step": 286
2013
+ },
2014
+ {
2015
+ "epoch": 0.09334851195316311,
2016
+ "grad_norm": 8.585831642150879,
2017
+ "learning_rate": 5e-05,
2018
+ "loss": 3.7078,
2019
+ "step": 287
2020
+ },
2021
+ {
2022
+ "epoch": 0.09367376809237274,
2023
+ "grad_norm": 6.02251672744751,
2024
+ "learning_rate": 5e-05,
2025
+ "loss": 3.7081,
2026
+ "step": 288
2027
+ },
2028
+ {
2029
+ "epoch": 0.09399902423158236,
2030
+ "grad_norm": 6.891519546508789,
2031
+ "learning_rate": 5e-05,
2032
+ "loss": 3.9432,
2033
+ "step": 289
2034
+ },
2035
+ {
2036
+ "epoch": 0.094324280370792,
2037
+ "grad_norm": 8.65449047088623,
2038
+ "learning_rate": 5e-05,
2039
+ "loss": 3.4704,
2040
+ "step": 290
2041
+ },
2042
+ {
2043
+ "epoch": 0.09464953651000163,
2044
+ "grad_norm": 6.133912563323975,
2045
+ "learning_rate": 5e-05,
2046
+ "loss": 3.8847,
2047
+ "step": 291
2048
+ },
2049
+ {
2050
+ "epoch": 0.09497479264921126,
2051
+ "grad_norm": 6.619656085968018,
2052
+ "learning_rate": 5e-05,
2053
+ "loss": 3.5818,
2054
+ "step": 292
2055
+ },
2056
+ {
2057
+ "epoch": 0.09530004878842088,
2058
+ "grad_norm": 9.706931114196777,
2059
+ "learning_rate": 5e-05,
2060
+ "loss": 3.6839,
2061
+ "step": 293
2062
+ },
2063
+ {
2064
+ "epoch": 0.09562530492763051,
2065
+ "grad_norm": 6.43947172164917,
2066
+ "learning_rate": 5e-05,
2067
+ "loss": 4.0691,
2068
+ "step": 294
2069
+ },
2070
+ {
2071
+ "epoch": 0.09595056106684013,
2072
+ "grad_norm": 7.45628547668457,
2073
+ "learning_rate": 5e-05,
2074
+ "loss": 3.9546,
2075
+ "step": 295
2076
+ },
2077
+ {
2078
+ "epoch": 0.09627581720604976,
2079
+ "grad_norm": 9.464739799499512,
2080
+ "learning_rate": 5e-05,
2081
+ "loss": 3.8893,
2082
+ "step": 296
2083
+ },
2084
+ {
2085
+ "epoch": 0.09660107334525939,
2086
+ "grad_norm": 9.263232231140137,
2087
+ "learning_rate": 5e-05,
2088
+ "loss": 4.3718,
2089
+ "step": 297
2090
+ },
2091
+ {
2092
+ "epoch": 0.09692632948446903,
2093
+ "grad_norm": 6.793147087097168,
2094
+ "learning_rate": 5e-05,
2095
+ "loss": 3.7036,
2096
+ "step": 298
2097
+ },
2098
+ {
2099
+ "epoch": 0.09725158562367865,
2100
+ "grad_norm": 12.16869831085205,
2101
+ "learning_rate": 5e-05,
2102
+ "loss": 4.2924,
2103
+ "step": 299
2104
+ },
2105
+ {
2106
+ "epoch": 0.09757684176288828,
2107
+ "grad_norm": 10.058348655700684,
2108
+ "learning_rate": 5e-05,
2109
+ "loss": 3.852,
2110
+ "step": 300
2111
+ },
2112
+ {
2113
+ "epoch": 0.0979020979020979,
2114
+ "grad_norm": 5.966858386993408,
2115
+ "learning_rate": 5e-05,
2116
+ "loss": 3.7514,
2117
+ "step": 301
2118
+ },
2119
+ {
2120
+ "epoch": 0.09822735404130753,
2121
+ "grad_norm": 9.075318336486816,
2122
+ "learning_rate": 5e-05,
2123
+ "loss": 3.6808,
2124
+ "step": 302
2125
+ },
2126
+ {
2127
+ "epoch": 0.09855261018051716,
2128
+ "grad_norm": 11.372644424438477,
2129
+ "learning_rate": 5e-05,
2130
+ "loss": 4.0671,
2131
+ "step": 303
2132
+ },
2133
+ {
2134
+ "epoch": 0.09887786631972678,
2135
+ "grad_norm": 12.080697059631348,
2136
+ "learning_rate": 5e-05,
2137
+ "loss": 3.2312,
2138
+ "step": 304
2139
+ },
2140
+ {
2141
+ "epoch": 0.09920312245893641,
2142
+ "grad_norm": 5.336820125579834,
2143
+ "learning_rate": 5e-05,
2144
+ "loss": 3.5947,
2145
+ "step": 305
2146
+ },
2147
+ {
2148
+ "epoch": 0.09952837859814603,
2149
+ "grad_norm": 7.266972064971924,
2150
+ "learning_rate": 5e-05,
2151
+ "loss": 3.1227,
2152
+ "step": 306
2153
+ },
2154
+ {
2155
+ "epoch": 0.09985363473735567,
2156
+ "grad_norm": 6.334729194641113,
2157
+ "learning_rate": 5e-05,
2158
+ "loss": 3.6156,
2159
+ "step": 307
2160
+ },
2161
+ {
2162
+ "epoch": 0.1001788908765653,
2163
+ "grad_norm": 5.643017768859863,
2164
+ "learning_rate": 5e-05,
2165
+ "loss": 3.6258,
2166
+ "step": 308
2167
+ },
2168
+ {
2169
+ "epoch": 0.10050414701577493,
2170
+ "grad_norm": 5.640905380249023,
2171
+ "learning_rate": 5e-05,
2172
+ "loss": 3.9051,
2173
+ "step": 309
2174
+ },
2175
+ {
2176
+ "epoch": 0.10082940315498455,
2177
+ "grad_norm": 6.985749244689941,
2178
+ "learning_rate": 5e-05,
2179
+ "loss": 3.6725,
2180
+ "step": 310
2181
+ },
2182
+ {
2183
+ "epoch": 0.10115465929419418,
2184
+ "grad_norm": 6.893199443817139,
2185
+ "learning_rate": 5e-05,
2186
+ "loss": 3.4541,
2187
+ "step": 311
2188
+ },
2189
+ {
2190
+ "epoch": 0.1014799154334038,
2191
+ "grad_norm": 6.803256511688232,
2192
+ "learning_rate": 5e-05,
2193
+ "loss": 3.0762,
2194
+ "step": 312
2195
+ },
2196
+ {
2197
+ "epoch": 0.10180517157261343,
2198
+ "grad_norm": 8.491405487060547,
2199
+ "learning_rate": 5e-05,
2200
+ "loss": 3.719,
2201
+ "step": 313
2202
+ },
2203
+ {
2204
+ "epoch": 0.10213042771182305,
2205
+ "grad_norm": 5.912895202636719,
2206
+ "learning_rate": 5e-05,
2207
+ "loss": 3.6613,
2208
+ "step": 314
2209
+ },
2210
+ {
2211
+ "epoch": 0.1024556838510327,
2212
+ "grad_norm": 6.211380958557129,
2213
+ "learning_rate": 5e-05,
2214
+ "loss": 3.632,
2215
+ "step": 315
2216
+ },
2217
+ {
2218
+ "epoch": 0.10278093999024232,
2219
+ "grad_norm": 8.083343505859375,
2220
+ "learning_rate": 5e-05,
2221
+ "loss": 3.5327,
2222
+ "step": 316
2223
+ },
2224
+ {
2225
+ "epoch": 0.10310619612945195,
2226
+ "grad_norm": 8.091614723205566,
2227
+ "learning_rate": 5e-05,
2228
+ "loss": 3.8166,
2229
+ "step": 317
2230
+ },
2231
+ {
2232
+ "epoch": 0.10343145226866157,
2233
+ "grad_norm": 5.631373405456543,
2234
+ "learning_rate": 5e-05,
2235
+ "loss": 3.6482,
2236
+ "step": 318
2237
+ },
2238
+ {
2239
+ "epoch": 0.1037567084078712,
2240
+ "grad_norm": 12.532264709472656,
2241
+ "learning_rate": 5e-05,
2242
+ "loss": 3.5283,
2243
+ "step": 319
2244
+ },
2245
+ {
2246
+ "epoch": 0.10408196454708082,
2247
+ "grad_norm": 5.990050315856934,
2248
+ "learning_rate": 5e-05,
2249
+ "loss": 3.9679,
2250
+ "step": 320
2251
+ },
2252
+ {
2253
+ "epoch": 0.10440722068629045,
2254
+ "grad_norm": 6.2988667488098145,
2255
+ "learning_rate": 5e-05,
2256
+ "loss": 3.8285,
2257
+ "step": 321
2258
+ },
2259
+ {
2260
+ "epoch": 0.10473247682550008,
2261
+ "grad_norm": 8.320550918579102,
2262
+ "learning_rate": 5e-05,
2263
+ "loss": 4.0295,
2264
+ "step": 322
2265
+ },
2266
+ {
2267
+ "epoch": 0.10505773296470972,
2268
+ "grad_norm": 6.640725612640381,
2269
+ "learning_rate": 5e-05,
2270
+ "loss": 3.5094,
2271
+ "step": 323
2272
+ },
2273
+ {
2274
+ "epoch": 0.10538298910391934,
2275
+ "grad_norm": 6.340143203735352,
2276
+ "learning_rate": 5e-05,
2277
+ "loss": 3.6144,
2278
+ "step": 324
2279
+ },
2280
+ {
2281
+ "epoch": 0.10570824524312897,
2282
+ "grad_norm": 7.403520584106445,
2283
+ "learning_rate": 5e-05,
2284
+ "loss": 3.9676,
2285
+ "step": 325
2286
+ },
2287
+ {
2288
+ "epoch": 0.1060335013823386,
2289
+ "grad_norm": 7.462515354156494,
2290
+ "learning_rate": 5e-05,
2291
+ "loss": 3.7215,
2292
+ "step": 326
2293
+ },
2294
+ {
2295
+ "epoch": 0.10635875752154822,
2296
+ "grad_norm": 5.612910747528076,
2297
+ "learning_rate": 5e-05,
2298
+ "loss": 3.5211,
2299
+ "step": 327
2300
+ },
2301
+ {
2302
+ "epoch": 0.10668401366075785,
2303
+ "grad_norm": 7.502828598022461,
2304
+ "learning_rate": 5e-05,
2305
+ "loss": 4.0133,
2306
+ "step": 328
2307
+ },
2308
+ {
2309
+ "epoch": 0.10700926979996747,
2310
+ "grad_norm": 8.6078462600708,
2311
+ "learning_rate": 5e-05,
2312
+ "loss": 4.1303,
2313
+ "step": 329
2314
+ },
2315
+ {
2316
+ "epoch": 0.1073345259391771,
2317
+ "grad_norm": 9.176727294921875,
2318
+ "learning_rate": 5e-05,
2319
+ "loss": 3.8978,
2320
+ "step": 330
2321
+ },
2322
+ {
2323
+ "epoch": 0.10765978207838672,
2324
+ "grad_norm": 10.041065216064453,
2325
+ "learning_rate": 5e-05,
2326
+ "loss": 4.0287,
2327
+ "step": 331
2328
+ },
2329
+ {
2330
+ "epoch": 0.10798503821759636,
2331
+ "grad_norm": 9.741332054138184,
2332
+ "learning_rate": 5e-05,
2333
+ "loss": 3.5463,
2334
+ "step": 332
2335
+ },
2336
+ {
2337
+ "epoch": 0.10831029435680599,
2338
+ "grad_norm": 7.8142499923706055,
2339
+ "learning_rate": 5e-05,
2340
+ "loss": 3.7777,
2341
+ "step": 333
2342
+ },
2343
+ {
2344
+ "epoch": 0.10863555049601561,
2345
+ "grad_norm": 8.65985107421875,
2346
+ "learning_rate": 5e-05,
2347
+ "loss": 3.7169,
2348
+ "step": 334
2349
+ },
2350
+ {
2351
+ "epoch": 0.10896080663522524,
2352
+ "grad_norm": 9.955862998962402,
2353
+ "learning_rate": 5e-05,
2354
+ "loss": 3.4762,
2355
+ "step": 335
2356
+ },
2357
+ {
2358
+ "epoch": 0.10928606277443487,
2359
+ "grad_norm": 8.422538757324219,
2360
+ "learning_rate": 5e-05,
2361
+ "loss": 3.8524,
2362
+ "step": 336
2363
+ },
2364
+ {
2365
+ "epoch": 0.10961131891364449,
2366
+ "grad_norm": 6.849399089813232,
2367
+ "learning_rate": 5e-05,
2368
+ "loss": 3.5617,
2369
+ "step": 337
2370
+ },
2371
+ {
2372
+ "epoch": 0.10993657505285412,
2373
+ "grad_norm": 10.709142684936523,
2374
+ "learning_rate": 5e-05,
2375
+ "loss": 3.8691,
2376
+ "step": 338
2377
+ },
2378
+ {
2379
+ "epoch": 0.11026183119206374,
2380
+ "grad_norm": 6.636946678161621,
2381
+ "learning_rate": 5e-05,
2382
+ "loss": 3.9301,
2383
+ "step": 339
2384
+ },
2385
+ {
2386
+ "epoch": 0.11058708733127338,
2387
+ "grad_norm": 7.364269256591797,
2388
+ "learning_rate": 5e-05,
2389
+ "loss": 3.485,
2390
+ "step": 340
2391
+ },
2392
+ {
2393
+ "epoch": 0.11091234347048301,
2394
+ "grad_norm": 12.705086708068848,
2395
+ "learning_rate": 5e-05,
2396
+ "loss": 3.8413,
2397
+ "step": 341
2398
+ },
2399
+ {
2400
+ "epoch": 0.11123759960969264,
2401
+ "grad_norm": 6.6930928230285645,
2402
+ "learning_rate": 5e-05,
2403
+ "loss": 3.8912,
2404
+ "step": 342
2405
+ },
2406
+ {
2407
+ "epoch": 0.11156285574890226,
2408
+ "grad_norm": 6.823209285736084,
2409
+ "learning_rate": 5e-05,
2410
+ "loss": 4.2082,
2411
+ "step": 343
2412
+ },
2413
+ {
2414
+ "epoch": 0.11188811188811189,
2415
+ "grad_norm": 9.133801460266113,
2416
+ "learning_rate": 5e-05,
2417
+ "loss": 4.0229,
2418
+ "step": 344
2419
+ },
2420
+ {
2421
+ "epoch": 0.11221336802732151,
2422
+ "grad_norm": 7.471242904663086,
2423
+ "learning_rate": 5e-05,
2424
+ "loss": 3.6206,
2425
+ "step": 345
2426
+ },
2427
+ {
2428
+ "epoch": 0.11253862416653114,
2429
+ "grad_norm": 7.450990676879883,
2430
+ "learning_rate": 5e-05,
2431
+ "loss": 3.7254,
2432
+ "step": 346
2433
+ },
2434
+ {
2435
+ "epoch": 0.11286388030574077,
2436
+ "grad_norm": 7.53968620300293,
2437
+ "learning_rate": 5e-05,
2438
+ "loss": 4.0496,
2439
+ "step": 347
2440
+ },
2441
+ {
2442
+ "epoch": 0.11318913644495039,
2443
+ "grad_norm": 12.383916854858398,
2444
+ "learning_rate": 5e-05,
2445
+ "loss": 3.6815,
2446
+ "step": 348
2447
+ },
2448
+ {
2449
+ "epoch": 0.11351439258416003,
2450
+ "grad_norm": 8.754898071289062,
2451
+ "learning_rate": 5e-05,
2452
+ "loss": 3.5597,
2453
+ "step": 349
2454
+ },
2455
+ {
2456
+ "epoch": 0.11383964872336966,
2457
+ "grad_norm": 7.65074348449707,
2458
+ "learning_rate": 5e-05,
2459
+ "loss": 4.0558,
2460
+ "step": 350
2461
+ },
2462
+ {
2463
+ "epoch": 0.11416490486257928,
2464
+ "grad_norm": 6.735880374908447,
2465
+ "learning_rate": 5e-05,
2466
+ "loss": 3.5702,
2467
+ "step": 351
2468
+ },
2469
+ {
2470
+ "epoch": 0.11449016100178891,
2471
+ "grad_norm": 6.371280670166016,
2472
+ "learning_rate": 5e-05,
2473
+ "loss": 3.7325,
2474
+ "step": 352
2475
+ },
2476
+ {
2477
+ "epoch": 0.11481541714099854,
2478
+ "grad_norm": 6.2961745262146,
2479
+ "learning_rate": 5e-05,
2480
+ "loss": 3.8299,
2481
+ "step": 353
2482
+ },
2483
+ {
2484
+ "epoch": 0.11514067328020816,
2485
+ "grad_norm": 8.073019027709961,
2486
+ "learning_rate": 5e-05,
2487
+ "loss": 3.77,
2488
+ "step": 354
2489
+ },
2490
+ {
2491
+ "epoch": 0.11546592941941779,
2492
+ "grad_norm": 7.552728176116943,
2493
+ "learning_rate": 5e-05,
2494
+ "loss": 3.6974,
2495
+ "step": 355
2496
+ },
2497
+ {
2498
+ "epoch": 0.11579118555862741,
2499
+ "grad_norm": 6.595133304595947,
2500
+ "learning_rate": 5e-05,
2501
+ "loss": 3.6737,
2502
+ "step": 356
2503
+ },
2504
+ {
2505
+ "epoch": 0.11611644169783705,
2506
+ "grad_norm": 7.287491321563721,
2507
+ "learning_rate": 5e-05,
2508
+ "loss": 3.5135,
2509
+ "step": 357
2510
+ },
2511
+ {
2512
+ "epoch": 0.11644169783704668,
2513
+ "grad_norm": 8.068704605102539,
2514
+ "learning_rate": 5e-05,
2515
+ "loss": 3.9036,
2516
+ "step": 358
2517
+ },
2518
+ {
2519
+ "epoch": 0.1167669539762563,
2520
+ "grad_norm": 6.8040618896484375,
2521
+ "learning_rate": 5e-05,
2522
+ "loss": 3.8034,
2523
+ "step": 359
2524
+ },
2525
+ {
2526
+ "epoch": 0.11709221011546593,
2527
+ "grad_norm": 9.113652229309082,
2528
+ "learning_rate": 5e-05,
2529
+ "loss": 3.4709,
2530
+ "step": 360
2531
+ },
2532
+ {
2533
+ "epoch": 0.11741746625467556,
2534
+ "grad_norm": 8.15011978149414,
2535
+ "learning_rate": 5e-05,
2536
+ "loss": 4.111,
2537
+ "step": 361
2538
+ },
2539
+ {
2540
+ "epoch": 0.11774272239388518,
2541
+ "grad_norm": 6.63869047164917,
2542
+ "learning_rate": 5e-05,
2543
+ "loss": 3.8713,
2544
+ "step": 362
2545
+ },
2546
+ {
2547
+ "epoch": 0.11806797853309481,
2548
+ "grad_norm": 6.785707473754883,
2549
+ "learning_rate": 5e-05,
2550
+ "loss": 3.1996,
2551
+ "step": 363
2552
+ },
2553
+ {
2554
+ "epoch": 0.11839323467230443,
2555
+ "grad_norm": 7.099983215332031,
2556
+ "learning_rate": 5e-05,
2557
+ "loss": 3.4236,
2558
+ "step": 364
2559
+ },
2560
+ {
2561
+ "epoch": 0.11871849081151407,
2562
+ "grad_norm": 7.014822006225586,
2563
+ "learning_rate": 5e-05,
2564
+ "loss": 3.7598,
2565
+ "step": 365
2566
+ },
2567
+ {
2568
+ "epoch": 0.1190437469507237,
2569
+ "grad_norm": 7.138816833496094,
2570
+ "learning_rate": 5e-05,
2571
+ "loss": 3.6681,
2572
+ "step": 366
2573
+ },
2574
+ {
2575
+ "epoch": 0.11936900308993333,
2576
+ "grad_norm": 6.563411235809326,
2577
+ "learning_rate": 5e-05,
2578
+ "loss": 3.82,
2579
+ "step": 367
2580
+ },
2581
+ {
2582
+ "epoch": 0.11969425922914295,
2583
+ "grad_norm": 6.389061450958252,
2584
+ "learning_rate": 5e-05,
2585
+ "loss": 3.7305,
2586
+ "step": 368
2587
+ },
2588
+ {
2589
+ "epoch": 0.12001951536835258,
2590
+ "grad_norm": 8.009288787841797,
2591
+ "learning_rate": 5e-05,
2592
+ "loss": 3.9673,
2593
+ "step": 369
2594
+ },
2595
+ {
2596
+ "epoch": 0.1203447715075622,
2597
+ "grad_norm": 6.436244964599609,
2598
+ "learning_rate": 5e-05,
2599
+ "loss": 3.6934,
2600
+ "step": 370
2601
+ },
2602
+ {
2603
+ "epoch": 0.12067002764677183,
2604
+ "grad_norm": 7.818999767303467,
2605
+ "learning_rate": 5e-05,
2606
+ "loss": 3.8564,
2607
+ "step": 371
2608
+ },
2609
+ {
2610
+ "epoch": 0.12099528378598146,
2611
+ "grad_norm": 7.891193866729736,
2612
+ "learning_rate": 5e-05,
2613
+ "loss": 3.9254,
2614
+ "step": 372
2615
+ },
2616
+ {
2617
+ "epoch": 0.12132053992519108,
2618
+ "grad_norm": 8.381048202514648,
2619
+ "learning_rate": 5e-05,
2620
+ "loss": 3.8966,
2621
+ "step": 373
2622
+ },
2623
+ {
2624
+ "epoch": 0.12164579606440072,
2625
+ "grad_norm": 7.328115940093994,
2626
+ "learning_rate": 5e-05,
2627
+ "loss": 3.4804,
2628
+ "step": 374
2629
+ },
2630
+ {
2631
+ "epoch": 0.12197105220361035,
2632
+ "grad_norm": 7.0525922775268555,
2633
+ "learning_rate": 5e-05,
2634
+ "loss": 4.0177,
2635
+ "step": 375
2636
+ },
2637
+ {
2638
+ "epoch": 0.12229630834281997,
2639
+ "grad_norm": 7.212350845336914,
2640
+ "learning_rate": 5e-05,
2641
+ "loss": 3.4131,
2642
+ "step": 376
2643
+ },
2644
+ {
2645
+ "epoch": 0.1226215644820296,
2646
+ "grad_norm": 6.808897972106934,
2647
+ "learning_rate": 5e-05,
2648
+ "loss": 3.5174,
2649
+ "step": 377
2650
+ },
2651
+ {
2652
+ "epoch": 0.12294682062123923,
2653
+ "grad_norm": 7.094473838806152,
2654
+ "learning_rate": 5e-05,
2655
+ "loss": 3.7852,
2656
+ "step": 378
2657
+ },
2658
+ {
2659
+ "epoch": 0.12327207676044885,
2660
+ "grad_norm": 6.998628616333008,
2661
+ "learning_rate": 5e-05,
2662
+ "loss": 3.8094,
2663
+ "step": 379
2664
+ },
2665
+ {
2666
+ "epoch": 0.12359733289965848,
2667
+ "grad_norm": 7.043560028076172,
2668
+ "learning_rate": 5e-05,
2669
+ "loss": 3.6862,
2670
+ "step": 380
2671
+ },
2672
+ {
2673
+ "epoch": 0.1239225890388681,
2674
+ "grad_norm": 6.198429107666016,
2675
+ "learning_rate": 5e-05,
2676
+ "loss": 3.5462,
2677
+ "step": 381
2678
+ },
2679
+ {
2680
+ "epoch": 0.12424784517807774,
2681
+ "grad_norm": 6.5926513671875,
2682
+ "learning_rate": 5e-05,
2683
+ "loss": 3.9851,
2684
+ "step": 382
2685
+ },
2686
+ {
2687
+ "epoch": 0.12457310131728737,
2688
+ "grad_norm": 5.893482208251953,
2689
+ "learning_rate": 5e-05,
2690
+ "loss": 3.6685,
2691
+ "step": 383
2692
+ },
2693
+ {
2694
+ "epoch": 0.124898357456497,
2695
+ "grad_norm": 5.886164665222168,
2696
+ "learning_rate": 5e-05,
2697
+ "loss": 3.7367,
2698
+ "step": 384
2699
+ },
2700
+ {
2701
+ "epoch": 0.1252236135957066,
2702
+ "grad_norm": 7.275190353393555,
2703
+ "learning_rate": 5e-05,
2704
+ "loss": 3.4105,
2705
+ "step": 385
2706
+ },
2707
+ {
2708
+ "epoch": 0.12554886973491625,
2709
+ "grad_norm": 8.864086151123047,
2710
+ "learning_rate": 5e-05,
2711
+ "loss": 4.0736,
2712
+ "step": 386
2713
+ },
2714
+ {
2715
+ "epoch": 0.1258741258741259,
2716
+ "grad_norm": 9.517216682434082,
2717
+ "learning_rate": 5e-05,
2718
+ "loss": 3.6961,
2719
+ "step": 387
2720
+ },
2721
+ {
2722
+ "epoch": 0.1261993820133355,
2723
+ "grad_norm": 8.437984466552734,
2724
+ "learning_rate": 5e-05,
2725
+ "loss": 4.1894,
2726
+ "step": 388
2727
+ },
2728
+ {
2729
+ "epoch": 0.12652463815254514,
2730
+ "grad_norm": 9.38377571105957,
2731
+ "learning_rate": 5e-05,
2732
+ "loss": 3.9029,
2733
+ "step": 389
2734
+ },
2735
+ {
2736
+ "epoch": 0.12684989429175475,
2737
+ "grad_norm": 8.621910095214844,
2738
+ "learning_rate": 5e-05,
2739
+ "loss": 3.9334,
2740
+ "step": 390
2741
+ },
2742
+ {
2743
+ "epoch": 0.1271751504309644,
2744
+ "grad_norm": 7.772785186767578,
2745
+ "learning_rate": 5e-05,
2746
+ "loss": 3.4678,
2747
+ "step": 391
2748
+ },
2749
+ {
2750
+ "epoch": 0.127500406570174,
2751
+ "grad_norm": 8.752019882202148,
2752
+ "learning_rate": 5e-05,
2753
+ "loss": 3.62,
2754
+ "step": 392
2755
+ },
2756
+ {
2757
+ "epoch": 0.12782566270938364,
2758
+ "grad_norm": 7.4593706130981445,
2759
+ "learning_rate": 5e-05,
2760
+ "loss": 3.907,
2761
+ "step": 393
2762
+ },
2763
+ {
2764
+ "epoch": 0.12815091884859325,
2765
+ "grad_norm": 7.014523983001709,
2766
+ "learning_rate": 5e-05,
2767
+ "loss": 3.7242,
2768
+ "step": 394
2769
+ },
2770
+ {
2771
+ "epoch": 0.1284761749878029,
2772
+ "grad_norm": 7.254335403442383,
2773
+ "learning_rate": 5e-05,
2774
+ "loss": 3.8927,
2775
+ "step": 395
2776
+ },
2777
+ {
2778
+ "epoch": 0.12880143112701253,
2779
+ "grad_norm": 7.555474281311035,
2780
+ "learning_rate": 5e-05,
2781
+ "loss": 4.3168,
2782
+ "step": 396
2783
+ },
2784
+ {
2785
+ "epoch": 0.12912668726622215,
2786
+ "grad_norm": 11.899949073791504,
2787
+ "learning_rate": 5e-05,
2788
+ "loss": 3.6753,
2789
+ "step": 397
2790
+ },
2791
+ {
2792
+ "epoch": 0.12945194340543179,
2793
+ "grad_norm": 11.901144027709961,
2794
+ "learning_rate": 5e-05,
2795
+ "loss": 3.7524,
2796
+ "step": 398
2797
+ },
2798
+ {
2799
+ "epoch": 0.1297771995446414,
2800
+ "grad_norm": 9.584845542907715,
2801
+ "learning_rate": 5e-05,
2802
+ "loss": 3.4015,
2803
+ "step": 399
2804
+ },
2805
+ {
2806
+ "epoch": 0.13010245568385104,
2807
+ "grad_norm": 12.348978042602539,
2808
+ "learning_rate": 5e-05,
2809
+ "loss": 3.6966,
2810
+ "step": 400
2811
+ },
2812
+ {
2813
+ "epoch": 0.13042771182306065,
2814
+ "grad_norm": 12.886831283569336,
2815
+ "learning_rate": 5e-05,
2816
+ "loss": 4.5981,
2817
+ "step": 401
2818
+ },
2819
+ {
2820
+ "epoch": 0.1307529679622703,
2821
+ "grad_norm": 7.066255569458008,
2822
+ "learning_rate": 5e-05,
2823
+ "loss": 3.6408,
2824
+ "step": 402
2825
+ },
2826
+ {
2827
+ "epoch": 0.13107822410147993,
2828
+ "grad_norm": 7.1310014724731445,
2829
+ "learning_rate": 5e-05,
2830
+ "loss": 3.7652,
2831
+ "step": 403
2832
+ },
2833
+ {
2834
+ "epoch": 0.13140348024068954,
2835
+ "grad_norm": 7.658654689788818,
2836
+ "learning_rate": 5e-05,
2837
+ "loss": 3.9908,
2838
+ "step": 404
2839
+ },
2840
+ {
2841
+ "epoch": 0.13172873637989918,
2842
+ "grad_norm": 9.139669418334961,
2843
+ "learning_rate": 5e-05,
2844
+ "loss": 3.7055,
2845
+ "step": 405
2846
+ },
2847
+ {
2848
+ "epoch": 0.1320539925191088,
2849
+ "grad_norm": 7.406591892242432,
2850
+ "learning_rate": 5e-05,
2851
+ "loss": 3.9435,
2852
+ "step": 406
2853
+ },
2854
+ {
2855
+ "epoch": 0.13237924865831843,
2856
+ "grad_norm": 7.888886451721191,
2857
+ "learning_rate": 5e-05,
2858
+ "loss": 3.7047,
2859
+ "step": 407
2860
+ },
2861
+ {
2862
+ "epoch": 0.13270450479752804,
2863
+ "grad_norm": 6.58457088470459,
2864
+ "learning_rate": 5e-05,
2865
+ "loss": 3.5882,
2866
+ "step": 408
2867
+ },
2868
+ {
2869
+ "epoch": 0.13302976093673768,
2870
+ "grad_norm": 6.361485958099365,
2871
+ "learning_rate": 5e-05,
2872
+ "loss": 3.6571,
2873
+ "step": 409
2874
+ },
2875
+ {
2876
+ "epoch": 0.1333550170759473,
2877
+ "grad_norm": 10.977415084838867,
2878
+ "learning_rate": 5e-05,
2879
+ "loss": 3.9192,
2880
+ "step": 410
2881
+ },
2882
+ {
2883
+ "epoch": 0.13368027321515694,
2884
+ "grad_norm": 8.509581565856934,
2885
+ "learning_rate": 5e-05,
2886
+ "loss": 3.7566,
2887
+ "step": 411
2888
+ },
2889
+ {
2890
+ "epoch": 0.13400552935436658,
2891
+ "grad_norm": 7.781307220458984,
2892
+ "learning_rate": 5e-05,
2893
+ "loss": 4.1816,
2894
+ "step": 412
2895
+ },
2896
+ {
2897
+ "epoch": 0.1343307854935762,
2898
+ "grad_norm": 7.275979518890381,
2899
+ "learning_rate": 5e-05,
2900
+ "loss": 3.2031,
2901
+ "step": 413
2902
+ },
2903
+ {
2904
+ "epoch": 0.13465604163278583,
2905
+ "grad_norm": 7.543152332305908,
2906
+ "learning_rate": 5e-05,
2907
+ "loss": 3.9525,
2908
+ "step": 414
2909
+ },
2910
+ {
2911
+ "epoch": 0.13498129777199544,
2912
+ "grad_norm": 9.093851089477539,
2913
+ "learning_rate": 5e-05,
2914
+ "loss": 3.9126,
2915
+ "step": 415
2916
+ },
2917
+ {
2918
+ "epoch": 0.13530655391120508,
2919
+ "grad_norm": 12.760071754455566,
2920
+ "learning_rate": 5e-05,
2921
+ "loss": 3.7383,
2922
+ "step": 416
2923
+ },
2924
+ {
2925
+ "epoch": 0.1356318100504147,
2926
+ "grad_norm": 8.025795936584473,
2927
+ "learning_rate": 5e-05,
2928
+ "loss": 3.5959,
2929
+ "step": 417
2930
+ },
2931
+ {
2932
+ "epoch": 0.13595706618962433,
2933
+ "grad_norm": 7.363426685333252,
2934
+ "learning_rate": 5e-05,
2935
+ "loss": 3.7238,
2936
+ "step": 418
2937
+ },
2938
+ {
2939
+ "epoch": 0.13628232232883394,
2940
+ "grad_norm": 8.554017066955566,
2941
+ "learning_rate": 5e-05,
2942
+ "loss": 3.3142,
2943
+ "step": 419
2944
+ },
2945
+ {
2946
+ "epoch": 0.13660757846804358,
2947
+ "grad_norm": 7.679929256439209,
2948
+ "learning_rate": 5e-05,
2949
+ "loss": 3.6632,
2950
+ "step": 420
2951
+ },
2952
+ {
2953
+ "epoch": 0.13693283460725322,
2954
+ "grad_norm": 7.977594375610352,
2955
+ "learning_rate": 5e-05,
2956
+ "loss": 3.3469,
2957
+ "step": 421
2958
+ },
2959
+ {
2960
+ "epoch": 0.13725809074646284,
2961
+ "grad_norm": 7.612384796142578,
2962
+ "learning_rate": 5e-05,
2963
+ "loss": 3.6385,
2964
+ "step": 422
2965
+ },
2966
+ {
2967
+ "epoch": 0.13758334688567248,
2968
+ "grad_norm": 9.067974090576172,
2969
+ "learning_rate": 5e-05,
2970
+ "loss": 3.7935,
2971
+ "step": 423
2972
+ },
2973
+ {
2974
+ "epoch": 0.1379086030248821,
2975
+ "grad_norm": 9.513571739196777,
2976
+ "learning_rate": 5e-05,
2977
+ "loss": 4.0498,
2978
+ "step": 424
2979
+ },
2980
+ {
2981
+ "epoch": 0.13823385916409173,
2982
+ "grad_norm": 10.234723091125488,
2983
+ "learning_rate": 5e-05,
2984
+ "loss": 3.9196,
2985
+ "step": 425
2986
+ },
2987
+ {
2988
+ "epoch": 0.13855911530330134,
2989
+ "grad_norm": 8.635189056396484,
2990
+ "learning_rate": 5e-05,
2991
+ "loss": 3.8482,
2992
+ "step": 426
2993
+ },
2994
+ {
2995
+ "epoch": 0.13888437144251098,
2996
+ "grad_norm": 10.970733642578125,
2997
+ "learning_rate": 5e-05,
2998
+ "loss": 3.765,
2999
+ "step": 427
3000
+ },
3001
+ {
3002
+ "epoch": 0.13920962758172062,
3003
+ "grad_norm": 9.9661226272583,
3004
+ "learning_rate": 5e-05,
3005
+ "loss": 3.939,
3006
+ "step": 428
3007
+ },
3008
+ {
3009
+ "epoch": 0.13953488372093023,
3010
+ "grad_norm": 9.381548881530762,
3011
+ "learning_rate": 5e-05,
3012
+ "loss": 3.749,
3013
+ "step": 429
3014
+ },
3015
+ {
3016
+ "epoch": 0.13986013986013987,
3017
+ "grad_norm": 8.287264823913574,
3018
+ "learning_rate": 5e-05,
3019
+ "loss": 3.7818,
3020
+ "step": 430
3021
+ },
3022
+ {
3023
+ "epoch": 0.14018539599934948,
3024
+ "grad_norm": 8.97390365600586,
3025
+ "learning_rate": 5e-05,
3026
+ "loss": 4.0317,
3027
+ "step": 431
3028
+ },
3029
+ {
3030
+ "epoch": 0.14051065213855912,
3031
+ "grad_norm": 7.877195835113525,
3032
+ "learning_rate": 5e-05,
3033
+ "loss": 3.5584,
3034
+ "step": 432
3035
+ },
3036
+ {
3037
+ "epoch": 0.14083590827776873,
3038
+ "grad_norm": 9.581697463989258,
3039
+ "learning_rate": 5e-05,
3040
+ "loss": 3.4095,
3041
+ "step": 433
3042
+ },
3043
+ {
3044
+ "epoch": 0.14116116441697837,
3045
+ "grad_norm": 17.695627212524414,
3046
+ "learning_rate": 5e-05,
3047
+ "loss": 4.3531,
3048
+ "step": 434
3049
+ },
3050
+ {
3051
+ "epoch": 0.141486420556188,
3052
+ "grad_norm": 8.333785057067871,
3053
+ "learning_rate": 5e-05,
3054
+ "loss": 3.3827,
3055
+ "step": 435
3056
+ },
3057
+ {
3058
+ "epoch": 0.14181167669539763,
3059
+ "grad_norm": 7.970407009124756,
3060
+ "learning_rate": 5e-05,
3061
+ "loss": 3.5483,
3062
+ "step": 436
3063
+ },
3064
+ {
3065
+ "epoch": 0.14213693283460727,
3066
+ "grad_norm": 9.061053276062012,
3067
+ "learning_rate": 5e-05,
3068
+ "loss": 3.7439,
3069
+ "step": 437
3070
+ },
3071
+ {
3072
+ "epoch": 0.14246218897381688,
3073
+ "grad_norm": 6.50039005279541,
3074
+ "learning_rate": 5e-05,
3075
+ "loss": 3.9181,
3076
+ "step": 438
3077
+ },
3078
+ {
3079
+ "epoch": 0.14278744511302652,
3080
+ "grad_norm": 9.928549766540527,
3081
+ "learning_rate": 5e-05,
3082
+ "loss": 3.5832,
3083
+ "step": 439
3084
+ },
3085
+ {
3086
+ "epoch": 0.14311270125223613,
3087
+ "grad_norm": 12.250447273254395,
3088
+ "learning_rate": 5e-05,
3089
+ "loss": 3.4639,
3090
+ "step": 440
3091
+ },
3092
+ {
3093
+ "epoch": 0.14343795739144577,
3094
+ "grad_norm": 8.427464485168457,
3095
+ "learning_rate": 5e-05,
3096
+ "loss": 3.4289,
3097
+ "step": 441
3098
+ },
3099
+ {
3100
+ "epoch": 0.14376321353065538,
3101
+ "grad_norm": 12.150249481201172,
3102
+ "learning_rate": 5e-05,
3103
+ "loss": 3.8382,
3104
+ "step": 442
3105
+ },
3106
+ {
3107
+ "epoch": 0.14408846966986502,
3108
+ "grad_norm": 7.406175136566162,
3109
+ "learning_rate": 5e-05,
3110
+ "loss": 3.9814,
3111
+ "step": 443
3112
+ },
3113
+ {
3114
+ "epoch": 0.14441372580907463,
3115
+ "grad_norm": 6.988471031188965,
3116
+ "learning_rate": 5e-05,
3117
+ "loss": 4.0547,
3118
+ "step": 444
3119
+ },
3120
+ {
3121
+ "epoch": 0.14473898194828427,
3122
+ "grad_norm": 12.318148612976074,
3123
+ "learning_rate": 5e-05,
3124
+ "loss": 3.9433,
3125
+ "step": 445
3126
+ },
3127
+ {
3128
+ "epoch": 0.1450642380874939,
3129
+ "grad_norm": 10.347890853881836,
3130
+ "learning_rate": 5e-05,
3131
+ "loss": 3.7052,
3132
+ "step": 446
3133
+ },
3134
+ {
3135
+ "epoch": 0.14538949422670353,
3136
+ "grad_norm": 9.213937759399414,
3137
+ "learning_rate": 5e-05,
3138
+ "loss": 3.9906,
3139
+ "step": 447
3140
+ },
3141
+ {
3142
+ "epoch": 0.14571475036591316,
3143
+ "grad_norm": 10.614618301391602,
3144
+ "learning_rate": 5e-05,
3145
+ "loss": 3.276,
3146
+ "step": 448
3147
+ },
3148
+ {
3149
+ "epoch": 0.14604000650512278,
3150
+ "grad_norm": 11.268338203430176,
3151
+ "learning_rate": 5e-05,
3152
+ "loss": 3.5563,
3153
+ "step": 449
3154
+ },
3155
+ {
3156
+ "epoch": 0.14636526264433242,
3157
+ "grad_norm": 8.418384552001953,
3158
+ "learning_rate": 5e-05,
3159
+ "loss": 3.4222,
3160
+ "step": 450
3161
+ },
3162
+ {
3163
+ "epoch": 0.14669051878354203,
3164
+ "grad_norm": 12.904454231262207,
3165
+ "learning_rate": 5e-05,
3166
+ "loss": 3.7737,
3167
+ "step": 451
3168
+ },
3169
+ {
3170
+ "epoch": 0.14701577492275167,
3171
+ "grad_norm": 9.767146110534668,
3172
+ "learning_rate": 5e-05,
3173
+ "loss": 4.0424,
3174
+ "step": 452
3175
+ },
3176
+ {
3177
+ "epoch": 0.1473410310619613,
3178
+ "grad_norm": 8.333344459533691,
3179
+ "learning_rate": 5e-05,
3180
+ "loss": 3.7574,
3181
+ "step": 453
3182
+ },
3183
+ {
3184
+ "epoch": 0.14766628720117092,
3185
+ "grad_norm": 11.907800674438477,
3186
+ "learning_rate": 5e-05,
3187
+ "loss": 3.7266,
3188
+ "step": 454
3189
+ },
3190
+ {
3191
+ "epoch": 0.14799154334038056,
3192
+ "grad_norm": 9.306441307067871,
3193
+ "learning_rate": 5e-05,
3194
+ "loss": 3.8885,
3195
+ "step": 455
3196
+ },
3197
+ {
3198
+ "epoch": 0.14831679947959017,
3199
+ "grad_norm": 10.486589431762695,
3200
+ "learning_rate": 5e-05,
3201
+ "loss": 3.7777,
3202
+ "step": 456
3203
+ },
3204
+ {
3205
+ "epoch": 0.1486420556187998,
3206
+ "grad_norm": 12.291946411132812,
3207
+ "learning_rate": 5e-05,
3208
+ "loss": 3.6718,
3209
+ "step": 457
3210
+ },
3211
+ {
3212
+ "epoch": 0.14896731175800942,
3213
+ "grad_norm": 12.743392944335938,
3214
+ "learning_rate": 5e-05,
3215
+ "loss": 3.6533,
3216
+ "step": 458
3217
+ },
3218
+ {
3219
+ "epoch": 0.14929256789721906,
3220
+ "grad_norm": 9.82790470123291,
3221
+ "learning_rate": 5e-05,
3222
+ "loss": 3.8719,
3223
+ "step": 459
3224
+ },
3225
+ {
3226
+ "epoch": 0.14961782403642868,
3227
+ "grad_norm": 11.749984741210938,
3228
+ "learning_rate": 5e-05,
3229
+ "loss": 3.495,
3230
+ "step": 460
3231
+ },
3232
+ {
3233
+ "epoch": 0.14994308017563832,
3234
+ "grad_norm": 11.35322380065918,
3235
+ "learning_rate": 5e-05,
3236
+ "loss": 4.0119,
3237
+ "step": 461
3238
+ },
3239
+ {
3240
+ "epoch": 0.15026833631484796,
3241
+ "grad_norm": 7.1181159019470215,
3242
+ "learning_rate": 5e-05,
3243
+ "loss": 3.6675,
3244
+ "step": 462
3245
+ },
3246
+ {
3247
+ "epoch": 0.15059359245405757,
3248
+ "grad_norm": 11.3037109375,
3249
+ "learning_rate": 5e-05,
3250
+ "loss": 3.5125,
3251
+ "step": 463
3252
+ },
3253
+ {
3254
+ "epoch": 0.1509188485932672,
3255
+ "grad_norm": 10.003439903259277,
3256
+ "learning_rate": 5e-05,
3257
+ "loss": 3.2109,
3258
+ "step": 464
3259
+ },
3260
+ {
3261
+ "epoch": 0.15124410473247682,
3262
+ "grad_norm": 7.561882019042969,
3263
+ "learning_rate": 5e-05,
3264
+ "loss": 3.566,
3265
+ "step": 465
3266
+ },
3267
+ {
3268
+ "epoch": 0.15156936087168646,
3269
+ "grad_norm": 7.8044586181640625,
3270
+ "learning_rate": 5e-05,
3271
+ "loss": 3.5393,
3272
+ "step": 466
3273
+ },
3274
+ {
3275
+ "epoch": 0.15189461701089607,
3276
+ "grad_norm": 11.766450881958008,
3277
+ "learning_rate": 5e-05,
3278
+ "loss": 3.7168,
3279
+ "step": 467
3280
+ },
3281
+ {
3282
+ "epoch": 0.1522198731501057,
3283
+ "grad_norm": 7.827965259552002,
3284
+ "learning_rate": 5e-05,
3285
+ "loss": 3.7292,
3286
+ "step": 468
3287
+ },
3288
+ {
3289
+ "epoch": 0.15254512928931532,
3290
+ "grad_norm": 9.4542236328125,
3291
+ "learning_rate": 5e-05,
3292
+ "loss": 3.4709,
3293
+ "step": 469
3294
+ },
3295
+ {
3296
+ "epoch": 0.15287038542852496,
3297
+ "grad_norm": 8.595137596130371,
3298
+ "learning_rate": 5e-05,
3299
+ "loss": 4.3444,
3300
+ "step": 470
3301
+ },
3302
+ {
3303
+ "epoch": 0.1531956415677346,
3304
+ "grad_norm": 8.952139854431152,
3305
+ "learning_rate": 5e-05,
3306
+ "loss": 4.3924,
3307
+ "step": 471
3308
+ },
3309
+ {
3310
+ "epoch": 0.15352089770694421,
3311
+ "grad_norm": 9.41843318939209,
3312
+ "learning_rate": 5e-05,
3313
+ "loss": 3.7794,
3314
+ "step": 472
3315
+ },
3316
+ {
3317
+ "epoch": 0.15384615384615385,
3318
+ "grad_norm": 7.201237201690674,
3319
+ "learning_rate": 5e-05,
3320
+ "loss": 3.3527,
3321
+ "step": 473
3322
+ },
3323
+ {
3324
+ "epoch": 0.15417140998536347,
3325
+ "grad_norm": 9.496088981628418,
3326
+ "learning_rate": 5e-05,
3327
+ "loss": 3.3554,
3328
+ "step": 474
3329
+ },
3330
+ {
3331
+ "epoch": 0.1544966661245731,
3332
+ "grad_norm": 8.370413780212402,
3333
+ "learning_rate": 5e-05,
3334
+ "loss": 3.8943,
3335
+ "step": 475
3336
+ },
3337
+ {
3338
+ "epoch": 0.15482192226378272,
3339
+ "grad_norm": 8.910099029541016,
3340
+ "learning_rate": 5e-05,
3341
+ "loss": 3.5867,
3342
+ "step": 476
3343
+ },
3344
+ {
3345
+ "epoch": 0.15514717840299236,
3346
+ "grad_norm": 7.085155963897705,
3347
+ "learning_rate": 5e-05,
3348
+ "loss": 3.3267,
3349
+ "step": 477
3350
+ },
3351
+ {
3352
+ "epoch": 0.15547243454220197,
3353
+ "grad_norm": 8.49357795715332,
3354
+ "learning_rate": 5e-05,
3355
+ "loss": 3.5925,
3356
+ "step": 478
3357
+ },
3358
+ {
3359
+ "epoch": 0.1557976906814116,
3360
+ "grad_norm": 7.811654567718506,
3361
+ "learning_rate": 5e-05,
3362
+ "loss": 3.9464,
3363
+ "step": 479
3364
+ },
3365
+ {
3366
+ "epoch": 0.15612294682062125,
3367
+ "grad_norm": 9.111326217651367,
3368
+ "learning_rate": 5e-05,
3369
+ "loss": 3.5547,
3370
+ "step": 480
3371
+ },
3372
+ {
3373
+ "epoch": 0.15644820295983086,
3374
+ "grad_norm": 7.44144344329834,
3375
+ "learning_rate": 5e-05,
3376
+ "loss": 3.9226,
3377
+ "step": 481
3378
+ },
3379
+ {
3380
+ "epoch": 0.1567734590990405,
3381
+ "grad_norm": 9.64633846282959,
3382
+ "learning_rate": 5e-05,
3383
+ "loss": 3.887,
3384
+ "step": 482
3385
+ },
3386
+ {
3387
+ "epoch": 0.15709871523825011,
3388
+ "grad_norm": 11.60362720489502,
3389
+ "learning_rate": 5e-05,
3390
+ "loss": 3.7125,
3391
+ "step": 483
3392
+ },
3393
+ {
3394
+ "epoch": 0.15742397137745975,
3395
+ "grad_norm": 7.746142864227295,
3396
+ "learning_rate": 5e-05,
3397
+ "loss": 3.9722,
3398
+ "step": 484
3399
+ },
3400
+ {
3401
+ "epoch": 0.15774922751666937,
3402
+ "grad_norm": 7.080160140991211,
3403
+ "learning_rate": 5e-05,
3404
+ "loss": 3.1951,
3405
+ "step": 485
3406
+ },
3407
+ {
3408
+ "epoch": 0.158074483655879,
3409
+ "grad_norm": 17.081741333007812,
3410
+ "learning_rate": 5e-05,
3411
+ "loss": 3.7901,
3412
+ "step": 486
3413
+ },
3414
+ {
3415
+ "epoch": 0.15839973979508865,
3416
+ "grad_norm": 7.917695999145508,
3417
+ "learning_rate": 5e-05,
3418
+ "loss": 3.5357,
3419
+ "step": 487
3420
+ },
3421
+ {
3422
+ "epoch": 0.15872499593429826,
3423
+ "grad_norm": 8.854647636413574,
3424
+ "learning_rate": 5e-05,
3425
+ "loss": 3.7515,
3426
+ "step": 488
3427
+ },
3428
+ {
3429
+ "epoch": 0.1590502520735079,
3430
+ "grad_norm": 9.028191566467285,
3431
+ "learning_rate": 5e-05,
3432
+ "loss": 3.5516,
3433
+ "step": 489
3434
+ },
3435
+ {
3436
+ "epoch": 0.1593755082127175,
3437
+ "grad_norm": 10.008574485778809,
3438
+ "learning_rate": 5e-05,
3439
+ "loss": 3.6189,
3440
+ "step": 490
3441
+ },
3442
+ {
3443
+ "epoch": 0.15970076435192715,
3444
+ "grad_norm": 10.596325874328613,
3445
+ "learning_rate": 5e-05,
3446
+ "loss": 3.4306,
3447
+ "step": 491
3448
+ },
3449
+ {
3450
+ "epoch": 0.16002602049113676,
3451
+ "grad_norm": 12.258661270141602,
3452
+ "learning_rate": 5e-05,
3453
+ "loss": 3.5577,
3454
+ "step": 492
3455
+ },
3456
+ {
3457
+ "epoch": 0.1603512766303464,
3458
+ "grad_norm": 11.506673812866211,
3459
+ "learning_rate": 5e-05,
3460
+ "loss": 3.9079,
3461
+ "step": 493
3462
+ },
3463
+ {
3464
+ "epoch": 0.160676532769556,
3465
+ "grad_norm": 7.967488765716553,
3466
+ "learning_rate": 5e-05,
3467
+ "loss": 3.6066,
3468
+ "step": 494
3469
+ },
3470
+ {
3471
+ "epoch": 0.16100178890876565,
3472
+ "grad_norm": 11.729570388793945,
3473
+ "learning_rate": 5e-05,
3474
+ "loss": 3.9043,
3475
+ "step": 495
3476
+ },
3477
+ {
3478
+ "epoch": 0.1613270450479753,
3479
+ "grad_norm": 8.846243858337402,
3480
+ "learning_rate": 5e-05,
3481
+ "loss": 3.8228,
3482
+ "step": 496
3483
+ },
3484
+ {
3485
+ "epoch": 0.1616523011871849,
3486
+ "grad_norm": 12.016153335571289,
3487
+ "learning_rate": 5e-05,
3488
+ "loss": 3.9159,
3489
+ "step": 497
3490
+ },
3491
+ {
3492
+ "epoch": 0.16197755732639454,
3493
+ "grad_norm": 8.154533386230469,
3494
+ "learning_rate": 5e-05,
3495
+ "loss": 3.7158,
3496
+ "step": 498
3497
+ },
3498
+ {
3499
+ "epoch": 0.16230281346560416,
3500
+ "grad_norm": 11.766820907592773,
3501
+ "learning_rate": 5e-05,
3502
+ "loss": 3.5189,
3503
+ "step": 499
3504
+ },
3505
+ {
3506
+ "epoch": 0.1626280696048138,
3507
+ "grad_norm": 11.467671394348145,
3508
+ "learning_rate": 5e-05,
3509
+ "loss": 3.8887,
3510
+ "step": 500
3511
+ },
3512
+ {
3513
+ "epoch": 0.1629533257440234,
3514
+ "grad_norm": 7.477685451507568,
3515
+ "learning_rate": 5e-05,
3516
+ "loss": 4.0159,
3517
+ "step": 501
3518
+ },
3519
+ {
3520
+ "epoch": 0.16327858188323305,
3521
+ "grad_norm": 6.95028018951416,
3522
+ "learning_rate": 5e-05,
3523
+ "loss": 3.7446,
3524
+ "step": 502
3525
+ },
3526
+ {
3527
+ "epoch": 0.16360383802244266,
3528
+ "grad_norm": 11.298054695129395,
3529
+ "learning_rate": 5e-05,
3530
+ "loss": 3.692,
3531
+ "step": 503
3532
+ },
3533
+ {
3534
+ "epoch": 0.1639290941616523,
3535
+ "grad_norm": 7.807789325714111,
3536
+ "learning_rate": 5e-05,
3537
+ "loss": 3.6819,
3538
+ "step": 504
3539
+ },
3540
+ {
3541
+ "epoch": 0.16425435030086194,
3542
+ "grad_norm": 7.94120454788208,
3543
+ "learning_rate": 5e-05,
3544
+ "loss": 3.6853,
3545
+ "step": 505
3546
+ },
3547
+ {
3548
+ "epoch": 0.16457960644007155,
3549
+ "grad_norm": 8.74360466003418,
3550
+ "learning_rate": 5e-05,
3551
+ "loss": 3.5674,
3552
+ "step": 506
3553
+ },
3554
+ {
3555
+ "epoch": 0.1649048625792812,
3556
+ "grad_norm": 8.877306938171387,
3557
+ "learning_rate": 5e-05,
3558
+ "loss": 3.5393,
3559
+ "step": 507
3560
+ },
3561
+ {
3562
+ "epoch": 0.1652301187184908,
3563
+ "grad_norm": 7.502864837646484,
3564
+ "learning_rate": 5e-05,
3565
+ "loss": 3.6376,
3566
+ "step": 508
3567
+ },
3568
+ {
3569
+ "epoch": 0.16555537485770044,
3570
+ "grad_norm": 11.325250625610352,
3571
+ "learning_rate": 5e-05,
3572
+ "loss": 3.7114,
3573
+ "step": 509
3574
+ },
3575
+ {
3576
+ "epoch": 0.16588063099691006,
3577
+ "grad_norm": 9.918580055236816,
3578
+ "learning_rate": 5e-05,
3579
+ "loss": 3.783,
3580
+ "step": 510
3581
+ },
3582
+ {
3583
+ "epoch": 0.1662058871361197,
3584
+ "grad_norm": 8.940899848937988,
3585
+ "learning_rate": 5e-05,
3586
+ "loss": 3.707,
3587
+ "step": 511
3588
+ },
3589
+ {
3590
+ "epoch": 0.16653114327532934,
3591
+ "grad_norm": 17.020418167114258,
3592
+ "learning_rate": 5e-05,
3593
+ "loss": 4.319,
3594
+ "step": 512
3595
+ },
3596
+ {
3597
+ "epoch": 0.16685639941453895,
3598
+ "grad_norm": 10.722935676574707,
3599
+ "learning_rate": 5e-05,
3600
+ "loss": 3.4215,
3601
+ "step": 513
3602
+ },
3603
+ {
3604
+ "epoch": 0.1671816555537486,
3605
+ "grad_norm": 9.579489707946777,
3606
+ "learning_rate": 5e-05,
3607
+ "loss": 2.835,
3608
+ "step": 514
3609
+ },
3610
+ {
3611
+ "epoch": 0.1675069116929582,
3612
+ "grad_norm": 8.158295631408691,
3613
+ "learning_rate": 5e-05,
3614
+ "loss": 2.78,
3615
+ "step": 515
3616
+ },
3617
+ {
3618
+ "epoch": 0.16783216783216784,
3619
+ "grad_norm": 8.238765716552734,
3620
+ "learning_rate": 5e-05,
3621
+ "loss": 3.4628,
3622
+ "step": 516
3623
+ },
3624
+ {
3625
+ "epoch": 0.16815742397137745,
3626
+ "grad_norm": 8.580034255981445,
3627
+ "learning_rate": 5e-05,
3628
+ "loss": 3.9686,
3629
+ "step": 517
3630
+ },
3631
+ {
3632
+ "epoch": 0.1684826801105871,
3633
+ "grad_norm": 8.966435432434082,
3634
+ "learning_rate": 5e-05,
3635
+ "loss": 3.4936,
3636
+ "step": 518
3637
+ },
3638
+ {
3639
+ "epoch": 0.1688079362497967,
3640
+ "grad_norm": 6.970677375793457,
3641
+ "learning_rate": 5e-05,
3642
+ "loss": 3.4287,
3643
+ "step": 519
3644
+ },
3645
+ {
3646
+ "epoch": 0.16913319238900634,
3647
+ "grad_norm": 7.750168323516846,
3648
+ "learning_rate": 5e-05,
3649
+ "loss": 3.3396,
3650
+ "step": 520
3651
+ },
3652
+ {
3653
+ "epoch": 0.16945844852821598,
3654
+ "grad_norm": 7.394572734832764,
3655
+ "learning_rate": 5e-05,
3656
+ "loss": 3.6622,
3657
+ "step": 521
3658
+ },
3659
+ {
3660
+ "epoch": 0.1697837046674256,
3661
+ "grad_norm": 6.781547546386719,
3662
+ "learning_rate": 5e-05,
3663
+ "loss": 3.5704,
3664
+ "step": 522
3665
+ },
3666
+ {
3667
+ "epoch": 0.17010896080663523,
3668
+ "grad_norm": 11.887150764465332,
3669
+ "learning_rate": 5e-05,
3670
+ "loss": 3.4968,
3671
+ "step": 523
3672
+ },
3673
+ {
3674
+ "epoch": 0.17043421694584485,
3675
+ "grad_norm": 7.665548324584961,
3676
+ "learning_rate": 5e-05,
3677
+ "loss": 3.6475,
3678
+ "step": 524
3679
+ },
3680
+ {
3681
+ "epoch": 0.1707594730850545,
3682
+ "grad_norm": 8.977829933166504,
3683
+ "learning_rate": 5e-05,
3684
+ "loss": 2.6397,
3685
+ "step": 525
3686
+ },
3687
+ {
3688
+ "epoch": 0.1710847292242641,
3689
+ "grad_norm": 9.76028060913086,
3690
+ "learning_rate": 5e-05,
3691
+ "loss": 3.6625,
3692
+ "step": 526
3693
+ },
3694
+ {
3695
+ "epoch": 0.17140998536347374,
3696
+ "grad_norm": 9.248437881469727,
3697
+ "learning_rate": 5e-05,
3698
+ "loss": 3.5199,
3699
+ "step": 527
3700
+ },
3701
+ {
3702
+ "epoch": 0.17173524150268335,
3703
+ "grad_norm": 9.724522590637207,
3704
+ "learning_rate": 5e-05,
3705
+ "loss": 3.7095,
3706
+ "step": 528
3707
+ },
3708
+ {
3709
+ "epoch": 0.172060497641893,
3710
+ "grad_norm": 8.21666431427002,
3711
+ "learning_rate": 5e-05,
3712
+ "loss": 3.6802,
3713
+ "step": 529
3714
+ },
3715
+ {
3716
+ "epoch": 0.17238575378110263,
3717
+ "grad_norm": 7.362802505493164,
3718
+ "learning_rate": 5e-05,
3719
+ "loss": 3.6511,
3720
+ "step": 530
3721
+ },
3722
+ {
3723
+ "epoch": 0.17271100992031224,
3724
+ "grad_norm": 7.2599334716796875,
3725
+ "learning_rate": 5e-05,
3726
+ "loss": 3.1097,
3727
+ "step": 531
3728
+ },
3729
+ {
3730
+ "epoch": 0.17303626605952188,
3731
+ "grad_norm": 9.250597953796387,
3732
+ "learning_rate": 5e-05,
3733
+ "loss": 3.608,
3734
+ "step": 532
3735
+ },
3736
+ {
3737
+ "epoch": 0.1733615221987315,
3738
+ "grad_norm": 8.88681411743164,
3739
+ "learning_rate": 5e-05,
3740
+ "loss": 3.8212,
3741
+ "step": 533
3742
+ },
3743
+ {
3744
+ "epoch": 0.17368677833794113,
3745
+ "grad_norm": 11.290247917175293,
3746
+ "learning_rate": 5e-05,
3747
+ "loss": 3.3286,
3748
+ "step": 534
3749
+ },
3750
+ {
3751
+ "epoch": 0.17401203447715075,
3752
+ "grad_norm": 10.88404369354248,
3753
+ "learning_rate": 5e-05,
3754
+ "loss": 3.5241,
3755
+ "step": 535
3756
+ },
3757
+ {
3758
+ "epoch": 0.17433729061636039,
3759
+ "grad_norm": 9.20648193359375,
3760
+ "learning_rate": 5e-05,
3761
+ "loss": 3.6005,
3762
+ "step": 536
3763
+ },
3764
+ {
3765
+ "epoch": 0.17466254675557003,
3766
+ "grad_norm": 8.10119342803955,
3767
+ "learning_rate": 5e-05,
3768
+ "loss": 3.5739,
3769
+ "step": 537
3770
+ },
3771
+ {
3772
+ "epoch": 0.17498780289477964,
3773
+ "grad_norm": 10.661712646484375,
3774
+ "learning_rate": 5e-05,
3775
+ "loss": 3.4837,
3776
+ "step": 538
3777
+ },
3778
+ {
3779
+ "epoch": 0.17531305903398928,
3780
+ "grad_norm": 10.739490509033203,
3781
+ "learning_rate": 5e-05,
3782
+ "loss": 3.6787,
3783
+ "step": 539
3784
+ },
3785
+ {
3786
+ "epoch": 0.1756383151731989,
3787
+ "grad_norm": 9.183284759521484,
3788
+ "learning_rate": 5e-05,
3789
+ "loss": 3.6174,
3790
+ "step": 540
3791
+ },
3792
+ {
3793
+ "epoch": 0.17596357131240853,
3794
+ "grad_norm": 8.37816047668457,
3795
+ "learning_rate": 5e-05,
3796
+ "loss": 3.5964,
3797
+ "step": 541
3798
+ },
3799
+ {
3800
+ "epoch": 0.17628882745161814,
3801
+ "grad_norm": 10.69176959991455,
3802
+ "learning_rate": 5e-05,
3803
+ "loss": 3.8804,
3804
+ "step": 542
3805
+ },
3806
+ {
3807
+ "epoch": 0.17661408359082778,
3808
+ "grad_norm": 8.007463455200195,
3809
+ "learning_rate": 5e-05,
3810
+ "loss": 3.8485,
3811
+ "step": 543
3812
+ },
3813
+ {
3814
+ "epoch": 0.1769393397300374,
3815
+ "grad_norm": 8.839487075805664,
3816
+ "learning_rate": 5e-05,
3817
+ "loss": 3.6356,
3818
+ "step": 544
3819
+ },
3820
+ {
3821
+ "epoch": 0.17726459586924703,
3822
+ "grad_norm": 11.773893356323242,
3823
+ "learning_rate": 5e-05,
3824
+ "loss": 3.8846,
3825
+ "step": 545
3826
+ },
3827
+ {
3828
+ "epoch": 0.17758985200845667,
3829
+ "grad_norm": 11.021324157714844,
3830
+ "learning_rate": 5e-05,
3831
+ "loss": 3.8369,
3832
+ "step": 546
3833
+ },
3834
+ {
3835
+ "epoch": 0.17791510814766628,
3836
+ "grad_norm": 10.33259391784668,
3837
+ "learning_rate": 5e-05,
3838
+ "loss": 3.392,
3839
+ "step": 547
3840
+ },
3841
+ {
3842
+ "epoch": 0.17824036428687592,
3843
+ "grad_norm": 8.52408504486084,
3844
+ "learning_rate": 5e-05,
3845
+ "loss": 3.8102,
3846
+ "step": 548
3847
+ },
3848
+ {
3849
+ "epoch": 0.17856562042608554,
3850
+ "grad_norm": 9.792526245117188,
3851
+ "learning_rate": 5e-05,
3852
+ "loss": 3.9087,
3853
+ "step": 549
3854
+ },
3855
+ {
3856
+ "epoch": 0.17889087656529518,
3857
+ "grad_norm": 9.989365577697754,
3858
+ "learning_rate": 5e-05,
3859
+ "loss": 3.8268,
3860
+ "step": 550
3861
+ },
3862
+ {
3863
+ "epoch": 0.1792161327045048,
3864
+ "grad_norm": 9.42458438873291,
3865
+ "learning_rate": 5e-05,
3866
+ "loss": 3.7863,
3867
+ "step": 551
3868
+ },
3869
+ {
3870
+ "epoch": 0.17954138884371443,
3871
+ "grad_norm": 8.195592880249023,
3872
+ "learning_rate": 5e-05,
3873
+ "loss": 3.7507,
3874
+ "step": 552
3875
+ },
3876
+ {
3877
+ "epoch": 0.17986664498292404,
3878
+ "grad_norm": 9.999628067016602,
3879
+ "learning_rate": 5e-05,
3880
+ "loss": 3.742,
3881
+ "step": 553
3882
+ },
3883
+ {
3884
+ "epoch": 0.18019190112213368,
3885
+ "grad_norm": 8.54041576385498,
3886
+ "learning_rate": 5e-05,
3887
+ "loss": 4.4209,
3888
+ "step": 554
3889
+ },
3890
+ {
3891
+ "epoch": 0.18051715726134332,
3892
+ "grad_norm": 8.870122909545898,
3893
+ "learning_rate": 5e-05,
3894
+ "loss": 3.2489,
3895
+ "step": 555
3896
+ },
3897
+ {
3898
+ "epoch": 0.18084241340055293,
3899
+ "grad_norm": 10.103729248046875,
3900
+ "learning_rate": 5e-05,
3901
+ "loss": 3.5182,
3902
+ "step": 556
3903
+ },
3904
+ {
3905
+ "epoch": 0.18116766953976257,
3906
+ "grad_norm": 9.698139190673828,
3907
+ "learning_rate": 5e-05,
3908
+ "loss": 3.7934,
3909
+ "step": 557
3910
+ },
3911
+ {
3912
+ "epoch": 0.18149292567897218,
3913
+ "grad_norm": 9.132758140563965,
3914
+ "learning_rate": 5e-05,
3915
+ "loss": 3.3561,
3916
+ "step": 558
3917
+ },
3918
+ {
3919
+ "epoch": 0.18181818181818182,
3920
+ "grad_norm": 8.661752700805664,
3921
+ "learning_rate": 5e-05,
3922
+ "loss": 3.5775,
3923
+ "step": 559
3924
+ },
3925
+ {
3926
+ "epoch": 0.18214343795739144,
3927
+ "grad_norm": 9.332806587219238,
3928
+ "learning_rate": 5e-05,
3929
+ "loss": 3.0548,
3930
+ "step": 560
3931
+ },
3932
+ {
3933
+ "epoch": 0.18246869409660108,
3934
+ "grad_norm": 9.24280071258545,
3935
+ "learning_rate": 5e-05,
3936
+ "loss": 4.0208,
3937
+ "step": 561
3938
+ },
3939
+ {
3940
+ "epoch": 0.1827939502358107,
3941
+ "grad_norm": 9.924572944641113,
3942
+ "learning_rate": 5e-05,
3943
+ "loss": 3.6914,
3944
+ "step": 562
3945
+ },
3946
+ {
3947
+ "epoch": 0.18311920637502033,
3948
+ "grad_norm": 9.362936973571777,
3949
+ "learning_rate": 5e-05,
3950
+ "loss": 3.9219,
3951
+ "step": 563
3952
+ },
3953
+ {
3954
+ "epoch": 0.18344446251422997,
3955
+ "grad_norm": 12.603287696838379,
3956
+ "learning_rate": 5e-05,
3957
+ "loss": 3.4585,
3958
+ "step": 564
3959
+ },
3960
+ {
3961
+ "epoch": 0.18376971865343958,
3962
+ "grad_norm": 10.904036521911621,
3963
+ "learning_rate": 5e-05,
3964
+ "loss": 3.3266,
3965
+ "step": 565
3966
+ },
3967
+ {
3968
+ "epoch": 0.18409497479264922,
3969
+ "grad_norm": 9.598895072937012,
3970
+ "learning_rate": 5e-05,
3971
+ "loss": 3.6607,
3972
+ "step": 566
3973
+ },
3974
+ {
3975
+ "epoch": 0.18442023093185883,
3976
+ "grad_norm": 8.842279434204102,
3977
+ "learning_rate": 5e-05,
3978
+ "loss": 3.5674,
3979
+ "step": 567
3980
+ },
3981
+ {
3982
+ "epoch": 0.18474548707106847,
3983
+ "grad_norm": 9.379899024963379,
3984
+ "learning_rate": 5e-05,
3985
+ "loss": 3.6743,
3986
+ "step": 568
3987
+ },
3988
+ {
3989
+ "epoch": 0.18507074321027808,
3990
+ "grad_norm": 9.745834350585938,
3991
+ "learning_rate": 5e-05,
3992
+ "loss": 4.0652,
3993
+ "step": 569
3994
+ },
3995
+ {
3996
+ "epoch": 0.18539599934948772,
3997
+ "grad_norm": 8.990086555480957,
3998
+ "learning_rate": 5e-05,
3999
+ "loss": 3.637,
4000
+ "step": 570
4001
+ },
4002
+ {
4003
+ "epoch": 0.18572125548869736,
4004
+ "grad_norm": 8.382301330566406,
4005
+ "learning_rate": 5e-05,
4006
+ "loss": 3.967,
4007
+ "step": 571
4008
+ },
4009
+ {
4010
+ "epoch": 0.18604651162790697,
4011
+ "grad_norm": 8.533965110778809,
4012
+ "learning_rate": 5e-05,
4013
+ "loss": 3.4941,
4014
+ "step": 572
4015
+ },
4016
+ {
4017
+ "epoch": 0.18637176776711661,
4018
+ "grad_norm": 9.823786735534668,
4019
+ "learning_rate": 5e-05,
4020
+ "loss": 3.7345,
4021
+ "step": 573
4022
+ },
4023
+ {
4024
+ "epoch": 0.18669702390632623,
4025
+ "grad_norm": 7.766260147094727,
4026
+ "learning_rate": 5e-05,
4027
+ "loss": 3.6831,
4028
+ "step": 574
4029
+ },
4030
+ {
4031
+ "epoch": 0.18702228004553587,
4032
+ "grad_norm": 8.095032691955566,
4033
+ "learning_rate": 5e-05,
4034
+ "loss": 3.4701,
4035
+ "step": 575
4036
+ },
4037
+ {
4038
+ "epoch": 0.18734753618474548,
4039
+ "grad_norm": 11.641885757446289,
4040
+ "learning_rate": 5e-05,
4041
+ "loss": 4.0934,
4042
+ "step": 576
4043
+ },
4044
+ {
4045
+ "epoch": 0.18767279232395512,
4046
+ "grad_norm": 9.155062675476074,
4047
+ "learning_rate": 5e-05,
4048
+ "loss": 3.5356,
4049
+ "step": 577
4050
+ },
4051
+ {
4052
+ "epoch": 0.18799804846316473,
4053
+ "grad_norm": 8.703105926513672,
4054
+ "learning_rate": 5e-05,
4055
+ "loss": 3.8144,
4056
+ "step": 578
4057
+ },
4058
+ {
4059
+ "epoch": 0.18832330460237437,
4060
+ "grad_norm": 9.528350830078125,
4061
+ "learning_rate": 5e-05,
4062
+ "loss": 3.2073,
4063
+ "step": 579
4064
+ },
4065
+ {
4066
+ "epoch": 0.188648560741584,
4067
+ "grad_norm": 9.156220436096191,
4068
+ "learning_rate": 5e-05,
4069
+ "loss": 3.777,
4070
+ "step": 580
4071
+ },
4072
+ {
4073
+ "epoch": 0.18897381688079362,
4074
+ "grad_norm": 8.443305015563965,
4075
+ "learning_rate": 5e-05,
4076
+ "loss": 3.7239,
4077
+ "step": 581
4078
+ },
4079
+ {
4080
+ "epoch": 0.18929907302000326,
4081
+ "grad_norm": 7.838225841522217,
4082
+ "learning_rate": 5e-05,
4083
+ "loss": 3.4467,
4084
+ "step": 582
4085
+ },
4086
+ {
4087
+ "epoch": 0.18962432915921287,
4088
+ "grad_norm": 7.3834757804870605,
4089
+ "learning_rate": 5e-05,
4090
+ "loss": 3.5151,
4091
+ "step": 583
4092
+ },
4093
+ {
4094
+ "epoch": 0.1899495852984225,
4095
+ "grad_norm": 9.460673332214355,
4096
+ "learning_rate": 5e-05,
4097
+ "loss": 3.4287,
4098
+ "step": 584
4099
+ },
4100
+ {
4101
+ "epoch": 0.19027484143763213,
4102
+ "grad_norm": 8.232035636901855,
4103
+ "learning_rate": 5e-05,
4104
+ "loss": 4.2533,
4105
+ "step": 585
4106
+ },
4107
+ {
4108
+ "epoch": 0.19060009757684176,
4109
+ "grad_norm": 12.586129188537598,
4110
+ "learning_rate": 5e-05,
4111
+ "loss": 3.7747,
4112
+ "step": 586
4113
+ },
4114
+ {
4115
+ "epoch": 0.19092535371605138,
4116
+ "grad_norm": 8.150300979614258,
4117
+ "learning_rate": 5e-05,
4118
+ "loss": 3.9724,
4119
+ "step": 587
4120
+ },
4121
+ {
4122
+ "epoch": 0.19125060985526102,
4123
+ "grad_norm": 8.529426574707031,
4124
+ "learning_rate": 5e-05,
4125
+ "loss": 4.6377,
4126
+ "step": 588
4127
+ },
4128
+ {
4129
+ "epoch": 0.19157586599447066,
4130
+ "grad_norm": 7.794090747833252,
4131
+ "learning_rate": 5e-05,
4132
+ "loss": 3.2597,
4133
+ "step": 589
4134
+ },
4135
+ {
4136
+ "epoch": 0.19190112213368027,
4137
+ "grad_norm": 8.038799285888672,
4138
+ "learning_rate": 5e-05,
4139
+ "loss": 3.9826,
4140
+ "step": 590
4141
+ },
4142
+ {
4143
+ "epoch": 0.1922263782728899,
4144
+ "grad_norm": 7.855754852294922,
4145
+ "learning_rate": 5e-05,
4146
+ "loss": 3.7777,
4147
+ "step": 591
4148
+ },
4149
+ {
4150
+ "epoch": 0.19255163441209952,
4151
+ "grad_norm": 8.140380859375,
4152
+ "learning_rate": 5e-05,
4153
+ "loss": 3.443,
4154
+ "step": 592
4155
+ },
4156
+ {
4157
+ "epoch": 0.19287689055130916,
4158
+ "grad_norm": 9.79352855682373,
4159
+ "learning_rate": 5e-05,
4160
+ "loss": 3.7066,
4161
+ "step": 593
4162
+ },
4163
+ {
4164
+ "epoch": 0.19320214669051877,
4165
+ "grad_norm": 13.119344711303711,
4166
+ "learning_rate": 5e-05,
4167
+ "loss": 3.9336,
4168
+ "step": 594
4169
+ },
4170
+ {
4171
+ "epoch": 0.1935274028297284,
4172
+ "grad_norm": 8.955309867858887,
4173
+ "learning_rate": 5e-05,
4174
+ "loss": 3.3643,
4175
+ "step": 595
4176
+ },
4177
+ {
4178
+ "epoch": 0.19385265896893805,
4179
+ "grad_norm": 8.016314506530762,
4180
+ "learning_rate": 5e-05,
4181
+ "loss": 3.7039,
4182
+ "step": 596
4183
+ },
4184
+ {
4185
+ "epoch": 0.19417791510814766,
4186
+ "grad_norm": 8.888792991638184,
4187
+ "learning_rate": 5e-05,
4188
+ "loss": 3.6356,
4189
+ "step": 597
4190
+ },
4191
+ {
4192
+ "epoch": 0.1945031712473573,
4193
+ "grad_norm": 9.017245292663574,
4194
+ "learning_rate": 5e-05,
4195
+ "loss": 3.6173,
4196
+ "step": 598
4197
+ },
4198
+ {
4199
+ "epoch": 0.19482842738656692,
4200
+ "grad_norm": 9.05187702178955,
4201
+ "learning_rate": 5e-05,
4202
+ "loss": 3.7156,
4203
+ "step": 599
4204
+ },
4205
+ {
4206
+ "epoch": 0.19515368352577656,
4207
+ "grad_norm": 8.980157852172852,
4208
+ "learning_rate": 5e-05,
4209
+ "loss": 3.5519,
4210
+ "step": 600
4211
+ }
4212
+ ],
4213
+ "logging_steps": 1,
4214
+ "max_steps": 1000,
4215
+ "num_input_tokens_seen": 0,
4216
+ "num_train_epochs": 1,
4217
+ "save_steps": 200,
4218
+ "stateful_callbacks": {
4219
+ "TrainerControl": {
4220
+ "args": {
4221
+ "should_epoch_stop": false,
4222
+ "should_evaluate": false,
4223
+ "should_log": false,
4224
+ "should_save": true,
4225
+ "should_training_stop": false
4226
+ },
4227
+ "attributes": {}
4228
+ }
4229
+ },
4230
+ "total_flos": 2792420391095808.0,
4231
+ "train_batch_size": 2,
4232
+ "trial_name": null,
4233
+ "trial_params": null
4234
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9611fcc8a11cd9297c049391b24f1b3632e0428e0fe840486010c9f32b9a8af
3
+ size 6481