Automatic Speech Recognition
Transformers
TensorBoard
Safetensors
Malayalam
whisper
malayalam
indic-asr
fine-tuned
Instructions to use adalat-ai/whisper-medium-ml-rmft with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use adalat-ai/whisper-medium-ml-rmft with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("automatic-speech-recognition", model="adalat-ai/whisper-medium-ml-rmft")# Load model directly from transformers import AutoProcessor, AutoModelForSpeechSeq2Seq processor = AutoProcessor.from_pretrained("adalat-ai/whisper-medium-ml-rmft") model = AutoModelForSpeechSeq2Seq.from_pretrained("adalat-ai/whisper-medium-ml-rmft") - Notebooks
- Google Colab
- Kaggle
| { | |
| "best_global_step": 2448, | |
| "best_metric": 0.0535544753074646, | |
| "best_model_checkpoint": "./models/whisper-medium-reverse-ml-mft-1-1-1/checkpoint-2448", | |
| "epoch": 4.0, | |
| "eval_steps": 816, | |
| "global_step": 3264, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0, | |
| "eval_loss": 0.15549185872077942, | |
| "eval_runtime": 37.6186, | |
| "eval_samples_per_second": 133.338, | |
| "eval_steps_per_second": 1.063, | |
| "step": 0 | |
| }, | |
| { | |
| "epoch": 0.0024509803921568627, | |
| "grad_norm": 1.5625, | |
| "learning_rate": 3.067484662576688e-08, | |
| "loss": 0.124, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004901960784313725, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 9.202453987730062e-08, | |
| "loss": 0.1223, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007352941176470588, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 1.5337423312883438e-07, | |
| "loss": 0.1117, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.00980392156862745, | |
| "grad_norm": 1.5234375, | |
| "learning_rate": 2.1472392638036812e-07, | |
| "loss": 0.1288, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.012254901960784314, | |
| "grad_norm": 1.5234375, | |
| "learning_rate": 2.760736196319019e-07, | |
| "loss": 0.1241, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.014705882352941176, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 3.3742331288343563e-07, | |
| "loss": 0.1258, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01715686274509804, | |
| "grad_norm": 1.578125, | |
| "learning_rate": 3.9877300613496937e-07, | |
| "loss": 0.1465, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0196078431372549, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 4.601226993865031e-07, | |
| "loss": 0.1402, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.022058823529411766, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 5.214723926380368e-07, | |
| "loss": 0.1325, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.024509803921568627, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 5.828220858895705e-07, | |
| "loss": 0.1395, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02696078431372549, | |
| "grad_norm": 1.671875, | |
| "learning_rate": 6.441717791411044e-07, | |
| "loss": 0.1311, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.029411764705882353, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 7.05521472392638e-07, | |
| "loss": 0.1303, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.031862745098039214, | |
| "grad_norm": 1.703125, | |
| "learning_rate": 7.668711656441719e-07, | |
| "loss": 0.1327, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03431372549019608, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 8.282208588957055e-07, | |
| "loss": 0.1223, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03676470588235294, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 8.895705521472393e-07, | |
| "loss": 0.1348, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0392156862745098, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 9.509202453987732e-07, | |
| "loss": 0.1251, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.041666666666666664, | |
| "grad_norm": 1.5703125, | |
| "learning_rate": 1.0122699386503068e-06, | |
| "loss": 0.1224, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.04411764705882353, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 1.0736196319018406e-06, | |
| "loss": 0.1309, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04656862745098039, | |
| "grad_norm": 1.5859375, | |
| "learning_rate": 1.1349693251533743e-06, | |
| "loss": 0.1334, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.049019607843137254, | |
| "grad_norm": 1.875, | |
| "learning_rate": 1.196319018404908e-06, | |
| "loss": 0.1313, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.051470588235294115, | |
| "grad_norm": 1.78125, | |
| "learning_rate": 1.2576687116564418e-06, | |
| "loss": 0.1291, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.05392156862745098, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.3190184049079755e-06, | |
| "loss": 0.1183, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.056372549019607844, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 1.3803680981595095e-06, | |
| "loss": 0.1433, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.058823529411764705, | |
| "grad_norm": 1.6484375, | |
| "learning_rate": 1.441717791411043e-06, | |
| "loss": 0.1311, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.061274509803921566, | |
| "grad_norm": 1.5390625, | |
| "learning_rate": 1.5030674846625768e-06, | |
| "loss": 0.1187, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06372549019607843, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 1.5644171779141107e-06, | |
| "loss": 0.1273, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.0661764705882353, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.6257668711656445e-06, | |
| "loss": 0.1245, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06862745098039216, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 1.687116564417178e-06, | |
| "loss": 0.1278, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.07107843137254902, | |
| "grad_norm": 1.484375, | |
| "learning_rate": 1.7484662576687117e-06, | |
| "loss": 0.1252, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.07352941176470588, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 1.8098159509202457e-06, | |
| "loss": 0.1266, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07598039215686274, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 1.8711656441717794e-06, | |
| "loss": 0.1221, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.0784313725490196, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 1.932515337423313e-06, | |
| "loss": 0.1215, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.08088235294117647, | |
| "grad_norm": 1.609375, | |
| "learning_rate": 1.9938650306748465e-06, | |
| "loss": 0.1428, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 2.0552147239263804e-06, | |
| "loss": 0.1162, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.0857843137254902, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 2.1165644171779144e-06, | |
| "loss": 0.1228, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08823529411764706, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 2.177914110429448e-06, | |
| "loss": 0.1333, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.09068627450980392, | |
| "grad_norm": 1.8203125, | |
| "learning_rate": 2.239263803680982e-06, | |
| "loss": 0.1173, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.09313725490196079, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 2.3006134969325154e-06, | |
| "loss": 0.1169, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.09558823529411764, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 2.3619631901840493e-06, | |
| "loss": 0.1206, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.09803921568627451, | |
| "grad_norm": 1.515625, | |
| "learning_rate": 2.423312883435583e-06, | |
| "loss": 0.127, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10049019607843138, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 2.484662576687117e-06, | |
| "loss": 0.1158, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.10294117647058823, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 2.5460122699386504e-06, | |
| "loss": 0.1368, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.1053921568627451, | |
| "grad_norm": 1.21875, | |
| "learning_rate": 2.6073619631901843e-06, | |
| "loss": 0.1087, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.10784313725490197, | |
| "grad_norm": 1.5546875, | |
| "learning_rate": 2.6687116564417183e-06, | |
| "loss": 0.1097, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.11029411764705882, | |
| "grad_norm": 1.4609375, | |
| "learning_rate": 2.7300613496932514e-06, | |
| "loss": 0.1185, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11274509803921569, | |
| "grad_norm": 1.328125, | |
| "learning_rate": 2.7914110429447853e-06, | |
| "loss": 0.1084, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.11519607843137254, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 2.8527607361963193e-06, | |
| "loss": 0.1217, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.11764705882352941, | |
| "grad_norm": 1.1015625, | |
| "learning_rate": 2.914110429447853e-06, | |
| "loss": 0.1096, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.12009803921568628, | |
| "grad_norm": 1.5546875, | |
| "learning_rate": 2.9754601226993867e-06, | |
| "loss": 0.1076, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.12254901960784313, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 3.0368098159509207e-06, | |
| "loss": 0.0991, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 1.1171875, | |
| "learning_rate": 3.0981595092024542e-06, | |
| "loss": 0.1098, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.12745098039215685, | |
| "grad_norm": 1.25, | |
| "learning_rate": 3.159509202453988e-06, | |
| "loss": 0.1084, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.12990196078431374, | |
| "grad_norm": 1.265625, | |
| "learning_rate": 3.2208588957055213e-06, | |
| "loss": 0.1047, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1323529411764706, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 3.2822085889570552e-06, | |
| "loss": 0.1058, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.13480392156862744, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 3.343558282208589e-06, | |
| "loss": 0.0949, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.13725490196078433, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 3.4049079754601227e-06, | |
| "loss": 0.1214, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.13970588235294118, | |
| "grad_norm": 1.3046875, | |
| "learning_rate": 3.4662576687116567e-06, | |
| "loss": 0.1041, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.14215686274509803, | |
| "grad_norm": 1.3203125, | |
| "learning_rate": 3.5276073619631906e-06, | |
| "loss": 0.1127, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.14460784313725492, | |
| "grad_norm": 0.91796875, | |
| "learning_rate": 3.588957055214724e-06, | |
| "loss": 0.0946, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.14705882352941177, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 3.650306748466258e-06, | |
| "loss": 0.0931, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.14950980392156862, | |
| "grad_norm": 1.296875, | |
| "learning_rate": 3.711656441717792e-06, | |
| "loss": 0.0935, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.15196078431372548, | |
| "grad_norm": 1.109375, | |
| "learning_rate": 3.773006134969325e-06, | |
| "loss": 0.1132, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.15441176470588236, | |
| "grad_norm": 0.94140625, | |
| "learning_rate": 3.834355828220859e-06, | |
| "loss": 0.107, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1568627450980392, | |
| "grad_norm": 1.171875, | |
| "learning_rate": 3.8957055214723935e-06, | |
| "loss": 0.0869, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.15931372549019607, | |
| "grad_norm": 0.9609375, | |
| "learning_rate": 3.957055214723927e-06, | |
| "loss": 0.0927, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.16176470588235295, | |
| "grad_norm": 0.9375, | |
| "learning_rate": 4.0184049079754606e-06, | |
| "loss": 0.1006, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.1642156862745098, | |
| "grad_norm": 0.8515625, | |
| "learning_rate": 4.079754601226994e-06, | |
| "loss": 0.09, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 0.72265625, | |
| "learning_rate": 4.141104294478528e-06, | |
| "loss": 0.0873, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.16911764705882354, | |
| "grad_norm": 0.96484375, | |
| "learning_rate": 4.202453987730062e-06, | |
| "loss": 0.0933, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.1715686274509804, | |
| "grad_norm": 0.88671875, | |
| "learning_rate": 4.2638036809815955e-06, | |
| "loss": 0.0946, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.17401960784313725, | |
| "grad_norm": 0.671875, | |
| "learning_rate": 4.325153374233129e-06, | |
| "loss": 0.09, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.17647058823529413, | |
| "grad_norm": 0.65625, | |
| "learning_rate": 4.3865030674846634e-06, | |
| "loss": 0.0832, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.17892156862745098, | |
| "grad_norm": 0.66796875, | |
| "learning_rate": 4.447852760736197e-06, | |
| "loss": 0.093, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.18137254901960784, | |
| "grad_norm": 0.74609375, | |
| "learning_rate": 4.5092024539877305e-06, | |
| "loss": 0.0851, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.18382352941176472, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 4.570552147239265e-06, | |
| "loss": 0.0938, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.18627450980392157, | |
| "grad_norm": 0.828125, | |
| "learning_rate": 4.6319018404907975e-06, | |
| "loss": 0.081, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.18872549019607843, | |
| "grad_norm": 0.6875, | |
| "learning_rate": 4.693251533742332e-06, | |
| "loss": 0.0719, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.19117647058823528, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 4.7546012269938654e-06, | |
| "loss": 0.0779, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.19362745098039216, | |
| "grad_norm": 0.72265625, | |
| "learning_rate": 4.815950920245399e-06, | |
| "loss": 0.0804, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.19607843137254902, | |
| "grad_norm": 0.671875, | |
| "learning_rate": 4.877300613496933e-06, | |
| "loss": 0.0862, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.19852941176470587, | |
| "grad_norm": 0.63671875, | |
| "learning_rate": 4.938650306748467e-06, | |
| "loss": 0.0898, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.20098039215686275, | |
| "grad_norm": 0.62890625, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0732, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2034313725490196, | |
| "grad_norm": 0.58984375, | |
| "learning_rate": 5.061349693251534e-06, | |
| "loss": 0.076, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.20588235294117646, | |
| "grad_norm": 0.62109375, | |
| "learning_rate": 5.122699386503068e-06, | |
| "loss": 0.0797, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 0.5234375, | |
| "learning_rate": 5.184049079754602e-06, | |
| "loss": 0.078, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2107843137254902, | |
| "grad_norm": 0.515625, | |
| "learning_rate": 5.245398773006135e-06, | |
| "loss": 0.0729, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.21323529411764705, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 5.30674846625767e-06, | |
| "loss": 0.0786, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.21568627450980393, | |
| "grad_norm": 0.640625, | |
| "learning_rate": 5.368098159509203e-06, | |
| "loss": 0.0819, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.2181372549019608, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 5.429447852760736e-06, | |
| "loss": 0.0771, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.22058823529411764, | |
| "grad_norm": 0.482421875, | |
| "learning_rate": 5.490797546012271e-06, | |
| "loss": 0.073, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.22303921568627452, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 5.552147239263804e-06, | |
| "loss": 0.088, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.22549019607843138, | |
| "grad_norm": 0.53515625, | |
| "learning_rate": 5.613496932515337e-06, | |
| "loss": 0.0837, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.22794117647058823, | |
| "grad_norm": 0.443359375, | |
| "learning_rate": 5.674846625766872e-06, | |
| "loss": 0.0732, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.23039215686274508, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 5.736196319018405e-06, | |
| "loss": 0.0789, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.23284313725490197, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 5.797546012269939e-06, | |
| "loss": 0.0711, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 5.858895705521472e-06, | |
| "loss": 0.0696, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.23774509803921567, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 5.920245398773007e-06, | |
| "loss": 0.0713, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.24019607843137256, | |
| "grad_norm": 0.52734375, | |
| "learning_rate": 5.98159509202454e-06, | |
| "loss": 0.0816, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2426470588235294, | |
| "grad_norm": 0.515625, | |
| "learning_rate": 6.042944785276074e-06, | |
| "loss": 0.0706, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.24509803921568626, | |
| "grad_norm": 0.455078125, | |
| "learning_rate": 6.104294478527608e-06, | |
| "loss": 0.0752, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.24754901960784315, | |
| "grad_norm": 0.453125, | |
| "learning_rate": 6.165644171779142e-06, | |
| "loss": 0.0723, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.470703125, | |
| "learning_rate": 6.226993865030675e-06, | |
| "loss": 0.0759, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.25245098039215685, | |
| "grad_norm": 0.3984375, | |
| "learning_rate": 6.28834355828221e-06, | |
| "loss": 0.0746, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2549019607843137, | |
| "grad_norm": 0.4375, | |
| "learning_rate": 6.349693251533743e-06, | |
| "loss": 0.0705, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.25735294117647056, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 6.411042944785276e-06, | |
| "loss": 0.0756, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.25980392156862747, | |
| "grad_norm": 0.435546875, | |
| "learning_rate": 6.472392638036811e-06, | |
| "loss": 0.0709, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.2622549019607843, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 6.533742331288344e-06, | |
| "loss": 0.0698, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.2647058823529412, | |
| "grad_norm": 0.5234375, | |
| "learning_rate": 6.595092024539877e-06, | |
| "loss": 0.0693, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.26715686274509803, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 6.656441717791412e-06, | |
| "loss": 0.0643, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.2696078431372549, | |
| "grad_norm": 0.46484375, | |
| "learning_rate": 6.717791411042945e-06, | |
| "loss": 0.0713, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.27205882352941174, | |
| "grad_norm": 0.4140625, | |
| "learning_rate": 6.779141104294479e-06, | |
| "loss": 0.0777, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.27450980392156865, | |
| "grad_norm": 0.4453125, | |
| "learning_rate": 6.840490797546013e-06, | |
| "loss": 0.0596, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.2769607843137255, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 6.901840490797547e-06, | |
| "loss": 0.0658, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.27941176470588236, | |
| "grad_norm": 0.384765625, | |
| "learning_rate": 6.96319018404908e-06, | |
| "loss": 0.0708, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.2818627450980392, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 7.0245398773006145e-06, | |
| "loss": 0.0759, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.28431372549019607, | |
| "grad_norm": 0.498046875, | |
| "learning_rate": 7.085889570552148e-06, | |
| "loss": 0.0671, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.2867647058823529, | |
| "grad_norm": 0.3359375, | |
| "learning_rate": 7.1472392638036816e-06, | |
| "loss": 0.0716, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.28921568627450983, | |
| "grad_norm": 0.37890625, | |
| "learning_rate": 7.208588957055215e-06, | |
| "loss": 0.0642, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.2916666666666667, | |
| "grad_norm": 0.361328125, | |
| "learning_rate": 7.2699386503067495e-06, | |
| "loss": 0.0593, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.29411764705882354, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 7.331288343558283e-06, | |
| "loss": 0.0593, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2965686274509804, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 7.392638036809816e-06, | |
| "loss": 0.0623, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.29901960784313725, | |
| "grad_norm": 0.46484375, | |
| "learning_rate": 7.453987730061351e-06, | |
| "loss": 0.0677, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.3014705882352941, | |
| "grad_norm": 0.419921875, | |
| "learning_rate": 7.5153374233128836e-06, | |
| "loss": 0.0648, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.30392156862745096, | |
| "grad_norm": 0.44921875, | |
| "learning_rate": 7.576687116564417e-06, | |
| "loss": 0.0739, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.30637254901960786, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 7.638036809815951e-06, | |
| "loss": 0.0659, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3088235294117647, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 7.699386503067485e-06, | |
| "loss": 0.0663, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.3112745098039216, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 7.760736196319019e-06, | |
| "loss": 0.064, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.3137254901960784, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 7.822085889570554e-06, | |
| "loss": 0.0592, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.3161764705882353, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 7.883435582822087e-06, | |
| "loss": 0.0644, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.31862745098039214, | |
| "grad_norm": 0.337890625, | |
| "learning_rate": 7.944785276073619e-06, | |
| "loss": 0.0621, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.32107843137254904, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 8.006134969325154e-06, | |
| "loss": 0.0574, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.3235294117647059, | |
| "grad_norm": 0.341796875, | |
| "learning_rate": 8.067484662576688e-06, | |
| "loss": 0.065, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.32598039215686275, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 8.128834355828221e-06, | |
| "loss": 0.0619, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.3284313725490196, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 8.190184049079755e-06, | |
| "loss": 0.057, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.33088235294117646, | |
| "grad_norm": 0.361328125, | |
| "learning_rate": 8.251533742331288e-06, | |
| "loss": 0.0614, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 8.312883435582822e-06, | |
| "loss": 0.0685, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.33578431372549017, | |
| "grad_norm": 0.357421875, | |
| "learning_rate": 8.374233128834357e-06, | |
| "loss": 0.0658, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.3382352941176471, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 8.43558282208589e-06, | |
| "loss": 0.0642, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.34068627450980393, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 8.496932515337424e-06, | |
| "loss": 0.0739, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3431372549019608, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 8.558282208588958e-06, | |
| "loss": 0.0586, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.34558823529411764, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 8.619631901840491e-06, | |
| "loss": 0.0611, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.3480392156862745, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 8.680981595092025e-06, | |
| "loss": 0.0572, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.35049019607843135, | |
| "grad_norm": 0.416015625, | |
| "learning_rate": 8.742331288343558e-06, | |
| "loss": 0.0679, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.35294117647058826, | |
| "grad_norm": 0.369140625, | |
| "learning_rate": 8.803680981595094e-06, | |
| "loss": 0.0594, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3553921568627451, | |
| "grad_norm": 0.4296875, | |
| "learning_rate": 8.865030674846627e-06, | |
| "loss": 0.0672, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.35784313725490197, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 8.926380368098159e-06, | |
| "loss": 0.0566, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.3602941176470588, | |
| "grad_norm": 0.3515625, | |
| "learning_rate": 8.987730061349694e-06, | |
| "loss": 0.0636, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.3627450980392157, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 9.049079754601228e-06, | |
| "loss": 0.0688, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.36519607843137253, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.110429447852761e-06, | |
| "loss": 0.0617, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.36764705882352944, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.171779141104295e-06, | |
| "loss": 0.0597, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3700980392156863, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 9.233128834355828e-06, | |
| "loss": 0.07, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.37254901960784315, | |
| "grad_norm": 0.34765625, | |
| "learning_rate": 9.294478527607362e-06, | |
| "loss": 0.0594, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 9.355828220858897e-06, | |
| "loss": 0.0539, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.37745098039215685, | |
| "grad_norm": 0.3359375, | |
| "learning_rate": 9.41717791411043e-06, | |
| "loss": 0.0658, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3799019607843137, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.478527607361964e-06, | |
| "loss": 0.0618, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.38235294117647056, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 9.539877300613498e-06, | |
| "loss": 0.055, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.38480392156862747, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 9.601226993865031e-06, | |
| "loss": 0.0583, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.3872549019607843, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 9.662576687116565e-06, | |
| "loss": 0.0599, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3897058823529412, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 9.7239263803681e-06, | |
| "loss": 0.0604, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.39215686274509803, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.785276073619633e-06, | |
| "loss": 0.0556, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3946078431372549, | |
| "grad_norm": 0.380859375, | |
| "learning_rate": 9.846625766871167e-06, | |
| "loss": 0.0643, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.39705882352941174, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.9079754601227e-06, | |
| "loss": 0.0572, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.39950980392156865, | |
| "grad_norm": 0.400390625, | |
| "learning_rate": 9.969325153374234e-06, | |
| "loss": 0.0597, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.4019607843137255, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.999997141513604e-06, | |
| "loss": 0.0509, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.40441176470588236, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 9.999974273642042e-06, | |
| "loss": 0.0603, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4068627450980392, | |
| "grad_norm": 0.365234375, | |
| "learning_rate": 9.999928538003506e-06, | |
| "loss": 0.0644, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.40931372549019607, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 9.999859934807169e-06, | |
| "loss": 0.0587, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.4117647058823529, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.999768464366794e-06, | |
| "loss": 0.0519, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.41421568627450983, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.99965412710073e-06, | |
| "loss": 0.0508, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.349609375, | |
| "learning_rate": 9.999516923531906e-06, | |
| "loss": 0.0645, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.41911764705882354, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.999356854287834e-06, | |
| "loss": 0.0569, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.4215686274509804, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.999173920100601e-06, | |
| "loss": 0.0594, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.42401960784313725, | |
| "grad_norm": 0.349609375, | |
| "learning_rate": 9.998968121806873e-06, | |
| "loss": 0.0564, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.4264705882352941, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.998739460347882e-06, | |
| "loss": 0.0573, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.42892156862745096, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.99848793676943e-06, | |
| "loss": 0.0607, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.43137254901960786, | |
| "grad_norm": 0.390625, | |
| "learning_rate": 9.99821355222188e-06, | |
| "loss": 0.0602, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.4338235294117647, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.99791630796015e-06, | |
| "loss": 0.0617, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.4362745098039216, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 9.997596205343709e-06, | |
| "loss": 0.0565, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.4387254901960784, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 9.997253245836573e-06, | |
| "loss": 0.0524, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.4411764705882353, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 9.996887431007291e-06, | |
| "loss": 0.0569, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.44362745098039214, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.996498762528947e-06, | |
| "loss": 0.0564, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.44607843137254904, | |
| "grad_norm": 0.341796875, | |
| "learning_rate": 9.996087242179147e-06, | |
| "loss": 0.0526, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.4485294117647059, | |
| "grad_norm": 0.330078125, | |
| "learning_rate": 9.995652871840006e-06, | |
| "loss": 0.0547, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.45098039215686275, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 9.995195653498157e-06, | |
| "loss": 0.0581, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.4534313725490196, | |
| "grad_norm": 0.43359375, | |
| "learning_rate": 9.99471558924472e-06, | |
| "loss": 0.0563, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.45588235294117646, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 9.994212681275306e-06, | |
| "loss": 0.0611, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.4583333333333333, | |
| "grad_norm": 0.365234375, | |
| "learning_rate": 9.993686931890003e-06, | |
| "loss": 0.0602, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.46078431372549017, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 9.993138343493364e-06, | |
| "loss": 0.057, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.4632352941176471, | |
| "grad_norm": 0.36328125, | |
| "learning_rate": 9.992566918594406e-06, | |
| "loss": 0.0579, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.46568627450980393, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 9.99197265980658e-06, | |
| "loss": 0.0554, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4681372549019608, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 9.991355569847776e-06, | |
| "loss": 0.0613, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 9.990715651540303e-06, | |
| "loss": 0.0643, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4730392156862745, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.990052907810875e-06, | |
| "loss": 0.0585, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.47549019607843135, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.989367341690602e-06, | |
| "loss": 0.0549, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.47794117647058826, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 9.988658956314973e-06, | |
| "loss": 0.0599, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4803921568627451, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 9.987927754923844e-06, | |
| "loss": 0.0556, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.48284313725490197, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.98717374086142e-06, | |
| "loss": 0.0487, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.4852941176470588, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.986396917576244e-06, | |
| "loss": 0.0517, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4877450980392157, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.985597288621174e-06, | |
| "loss": 0.0453, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.49019607843137253, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 9.984774857653377e-06, | |
| "loss": 0.053, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.49264705882352944, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 9.983929628434305e-06, | |
| "loss": 0.0526, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.4950980392156863, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.983061604829676e-06, | |
| "loss": 0.0523, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.49754901960784315, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 9.982170790809464e-06, | |
| "loss": 0.0576, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 9.981257190447875e-06, | |
| "loss": 0.0548, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.5024509803921569, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 9.980320807923333e-06, | |
| "loss": 0.0572, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5049019607843137, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.979361647518453e-06, | |
| "loss": 0.0579, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.5073529411764706, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 9.978379713620029e-06, | |
| "loss": 0.057, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.5098039215686274, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 9.977375010719013e-06, | |
| "loss": 0.0476, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.5122549019607843, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.976347543410487e-06, | |
| "loss": 0.0529, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.5147058823529411, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.975297316393658e-06, | |
| "loss": 0.0525, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5171568627450981, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.974224334471814e-06, | |
| "loss": 0.0576, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.5196078431372549, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.973128602552324e-06, | |
| "loss": 0.055, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.5220588235294118, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 9.9720101256466e-06, | |
| "loss": 0.0532, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.5245098039215687, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.970868908870087e-06, | |
| "loss": 0.0497, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.5269607843137255, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 9.969704957442221e-06, | |
| "loss": 0.0518, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5294117647058824, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 9.968518276686428e-06, | |
| "loss": 0.0649, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.5318627450980392, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 9.96730887203008e-06, | |
| "loss": 0.052, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.5343137254901961, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 9.966076749004485e-06, | |
| "loss": 0.0547, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.5367647058823529, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.964821913244853e-06, | |
| "loss": 0.0528, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.5392156862745098, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.96354437049027e-06, | |
| "loss": 0.0589, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5416666666666666, | |
| "grad_norm": 0.337890625, | |
| "learning_rate": 9.962244126583678e-06, | |
| "loss": 0.0528, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.5441176470588235, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.960921187471841e-06, | |
| "loss": 0.0583, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.5465686274509803, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.959575559205323e-06, | |
| "loss": 0.0564, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.5490196078431373, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 9.958207247938458e-06, | |
| "loss": 0.0524, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.5514705882352942, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.956816259929322e-06, | |
| "loss": 0.0497, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.553921568627451, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 9.955402601539709e-06, | |
| "loss": 0.0584, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.5563725490196079, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 9.953966279235092e-06, | |
| "loss": 0.0583, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.5588235294117647, | |
| "grad_norm": 0.392578125, | |
| "learning_rate": 9.952507299584602e-06, | |
| "loss": 0.0558, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.5612745098039216, | |
| "grad_norm": 0.36328125, | |
| "learning_rate": 9.951025669260996e-06, | |
| "loss": 0.0661, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.5637254901960784, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 9.949521395040623e-06, | |
| "loss": 0.0573, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5661764705882353, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 9.947994483803394e-06, | |
| "loss": 0.0563, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.5686274509803921, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 9.94644494253276e-06, | |
| "loss": 0.0546, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.571078431372549, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.944872778315663e-06, | |
| "loss": 0.0572, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.5735294117647058, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.94327799834252e-06, | |
| "loss": 0.0544, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.5759803921568627, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.94166060990718e-06, | |
| "loss": 0.0691, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5784313725490197, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.940020620406891e-06, | |
| "loss": 0.0527, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.5808823529411765, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 9.938358037342272e-06, | |
| "loss": 0.0504, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 9.936672868317277e-06, | |
| "loss": 0.0522, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.5857843137254902, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 9.934965121039153e-06, | |
| "loss": 0.0522, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.5882352941176471, | |
| "grad_norm": 0.376953125, | |
| "learning_rate": 9.933234803318415e-06, | |
| "loss": 0.0601, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5906862745098039, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.931481923068804e-06, | |
| "loss": 0.0559, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.5931372549019608, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.92970648830725e-06, | |
| "loss": 0.0563, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.5955882352941176, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.927908507153847e-06, | |
| "loss": 0.0461, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.5980392156862745, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 9.926087987831794e-06, | |
| "loss": 0.0592, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.6004901960784313, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.92424493866738e-06, | |
| "loss": 0.0543, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6029411764705882, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.922379368089928e-06, | |
| "loss": 0.0559, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.6053921568627451, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.920491284631773e-06, | |
| "loss": 0.0522, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.6078431372549019, | |
| "grad_norm": 0.353515625, | |
| "learning_rate": 9.918580696928206e-06, | |
| "loss": 0.0608, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.6102941176470589, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 9.91664761371745e-06, | |
| "loss": 0.0463, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.6127450980392157, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.914692043840608e-06, | |
| "loss": 0.0498, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6151960784313726, | |
| "grad_norm": 0.330078125, | |
| "learning_rate": 9.912713996241633e-06, | |
| "loss": 0.0551, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.6176470588235294, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.91071347996727e-06, | |
| "loss": 0.051, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.6200980392156863, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.90869050416704e-06, | |
| "loss": 0.0488, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.6225490196078431, | |
| "grad_norm": 0.34375, | |
| "learning_rate": 9.906645078093179e-06, | |
| "loss": 0.0553, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 9.904577211100597e-06, | |
| "loss": 0.0564, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6274509803921569, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.902486912646846e-06, | |
| "loss": 0.0543, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.6299019607843137, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.900374192292064e-06, | |
| "loss": 0.0581, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.6323529411764706, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.898239059698943e-06, | |
| "loss": 0.0488, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.6348039215686274, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.896081524632673e-06, | |
| "loss": 0.0515, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.6372549019607843, | |
| "grad_norm": 0.369140625, | |
| "learning_rate": 9.893901596960906e-06, | |
| "loss": 0.0544, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6397058823529411, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.891699286653714e-06, | |
| "loss": 0.0545, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.6421568627450981, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 9.88947460378353e-06, | |
| "loss": 0.0492, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.6446078431372549, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.88722755852511e-06, | |
| "loss": 0.0574, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.6470588235294118, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.884958161155492e-06, | |
| "loss": 0.0476, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.6495098039215687, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.882666422053935e-06, | |
| "loss": 0.0595, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6519607843137255, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.880352351701888e-06, | |
| "loss": 0.0532, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.6544117647058824, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 9.87801596068293e-06, | |
| "loss": 0.0572, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.6568627450980392, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.875657259682722e-06, | |
| "loss": 0.0546, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.6593137254901961, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.873276259488967e-06, | |
| "loss": 0.0476, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.6617647058823529, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.87087297099135e-06, | |
| "loss": 0.0527, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6642156862745098, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.868447405181499e-06, | |
| "loss": 0.0609, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.865999573152922e-06, | |
| "loss": 0.0515, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.6691176470588235, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.86352948610097e-06, | |
| "loss": 0.0524, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.6715686274509803, | |
| "grad_norm": 0.349609375, | |
| "learning_rate": 9.861037155322777e-06, | |
| "loss": 0.0505, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.6740196078431373, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.858522592217208e-06, | |
| "loss": 0.0483, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6764705882352942, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.85598580828481e-06, | |
| "loss": 0.0517, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.678921568627451, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.85342681512776e-06, | |
| "loss": 0.0534, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.6813725490196079, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.85084562444981e-06, | |
| "loss": 0.06, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.6838235294117647, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 9.848242248056234e-06, | |
| "loss": 0.0473, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.6862745098039216, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 9.845616697853775e-06, | |
| "loss": 0.0521, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6887254901960784, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 9.84296898585059e-06, | |
| "loss": 0.048, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.6911764705882353, | |
| "grad_norm": 0.330078125, | |
| "learning_rate": 9.84029912415619e-06, | |
| "loss": 0.0514, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.6936274509803921, | |
| "grad_norm": 0.353515625, | |
| "learning_rate": 9.837607124981397e-06, | |
| "loss": 0.0521, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.696078431372549, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 9.834893000638272e-06, | |
| "loss": 0.0531, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.6985294117647058, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.832156763540075e-06, | |
| "loss": 0.0529, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7009803921568627, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 9.829398426201196e-06, | |
| "loss": 0.0605, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.7034313725490197, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 9.826618001237101e-06, | |
| "loss": 0.0535, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 9.82381550136428e-06, | |
| "loss": 0.055, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.7083333333333334, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 9.82099093940018e-06, | |
| "loss": 0.0475, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.7107843137254902, | |
| "grad_norm": 0.361328125, | |
| "learning_rate": 9.818144328263154e-06, | |
| "loss": 0.0548, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7132352941176471, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 9.815275680972396e-06, | |
| "loss": 0.0499, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.7156862745098039, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 9.812385010647885e-06, | |
| "loss": 0.0556, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.7181372549019608, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.809472330510323e-06, | |
| "loss": 0.0541, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.7205882352941176, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 9.80653765388108e-06, | |
| "loss": 0.0499, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.7230392156862745, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.803580994182122e-06, | |
| "loss": 0.0482, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7254901960784313, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.800602364935962e-06, | |
| "loss": 0.0518, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.7279411764705882, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.797601779765589e-06, | |
| "loss": 0.0477, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.7303921568627451, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.794579252394409e-06, | |
| "loss": 0.0519, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.7328431372549019, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 9.791534796646185e-06, | |
| "loss": 0.0512, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.7352941176470589, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 9.788468426444968e-06, | |
| "loss": 0.0554, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7377450980392157, | |
| "grad_norm": 0.357421875, | |
| "learning_rate": 9.78538015581504e-06, | |
| "loss": 0.0569, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.7401960784313726, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 9.782269998880842e-06, | |
| "loss": 0.0492, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.7426470588235294, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.779137969866918e-06, | |
| "loss": 0.0534, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.7450980392156863, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.775984083097844e-06, | |
| "loss": 0.0565, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.7475490196078431, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.772808352998163e-06, | |
| "loss": 0.0548, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.769610794092319e-06, | |
| "loss": 0.0485, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.7524509803921569, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 9.766391421004598e-06, | |
| "loss": 0.0514, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.7549019607843137, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.763150248459044e-06, | |
| "loss": 0.0534, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.7573529411764706, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 9.759887291279415e-06, | |
| "loss": 0.0626, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.7598039215686274, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 9.756602564389094e-06, | |
| "loss": 0.0476, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.7622549019607843, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 9.753296082811033e-06, | |
| "loss": 0.0474, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.7647058823529411, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.749967861667677e-06, | |
| "loss": 0.0523, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.7671568627450981, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 9.746617916180906e-06, | |
| "loss": 0.0527, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.7696078431372549, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.74324626167195e-06, | |
| "loss": 0.0479, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.7720588235294118, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.739852913561332e-06, | |
| "loss": 0.0435, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7745098039215687, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 9.73643788736879e-06, | |
| "loss": 0.0523, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.7769607843137255, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.73300119871321e-06, | |
| "loss": 0.0582, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.7794117647058824, | |
| "grad_norm": 0.34765625, | |
| "learning_rate": 9.729542863312548e-06, | |
| "loss": 0.0585, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.7818627450980392, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.726062896983774e-06, | |
| "loss": 0.047, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.7843137254901961, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 9.722561315642776e-06, | |
| "loss": 0.0505, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7867647058823529, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.719038135304306e-06, | |
| "loss": 0.0518, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.7892156862745098, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 9.715493372081904e-06, | |
| "loss": 0.0509, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.7916666666666666, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 9.711927042187812e-06, | |
| "loss": 0.0477, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.7941176470588235, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.70833916193292e-06, | |
| "loss": 0.0509, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.7965686274509803, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.704729747726669e-06, | |
| "loss": 0.044, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7990196078431373, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.701098816076995e-06, | |
| "loss": 0.045, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.8014705882352942, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 9.697446383590245e-06, | |
| "loss": 0.053, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.803921568627451, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 9.693772466971097e-06, | |
| "loss": 0.0545, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.8063725490196079, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 9.690077083022493e-06, | |
| "loss": 0.056, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.8088235294117647, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 9.686360248645554e-06, | |
| "loss": 0.0502, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8112745098039216, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.682621980839511e-06, | |
| "loss": 0.0468, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.8137254901960784, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.678862296701615e-06, | |
| "loss": 0.0548, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.8161764705882353, | |
| "grad_norm": 0.427734375, | |
| "learning_rate": 9.675081213427076e-06, | |
| "loss": 0.0557, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.8186274509803921, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.671278748308963e-06, | |
| "loss": 0.0479, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.821078431372549, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 9.667454918738148e-06, | |
| "loss": 0.0502, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8235294117647058, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.663609742203208e-06, | |
| "loss": 0.0558, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.8259803921568627, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 9.659743236290353e-06, | |
| "loss": 0.0487, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.8284313725490197, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 9.655855418683345e-06, | |
| "loss": 0.0469, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.8308823529411765, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 9.651946307163417e-06, | |
| "loss": 0.0528, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.648015919609194e-06, | |
| "loss": 0.0471, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8357843137254902, | |
| "grad_norm": 0.3359375, | |
| "learning_rate": 9.644064273996603e-06, | |
| "loss": 0.0477, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.8382352941176471, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.6400913883988e-06, | |
| "loss": 0.0486, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.8406862745098039, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.636097280986081e-06, | |
| "loss": 0.0511, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.8431372549019608, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 9.632081970025808e-06, | |
| "loss": 0.0479, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.8455882352941176, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 9.62804547388231e-06, | |
| "loss": 0.0471, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.8480392156862745, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 9.623987811016814e-06, | |
| "loss": 0.0528, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.8504901960784313, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 9.619908999987356e-06, | |
| "loss": 0.0469, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.8529411764705882, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 9.615809059448687e-06, | |
| "loss": 0.0438, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.8553921568627451, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 9.611688008152205e-06, | |
| "loss": 0.0521, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.8578431372549019, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.607545864945851e-06, | |
| "loss": 0.056, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8602941176470589, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.60338264877404e-06, | |
| "loss": 0.0495, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.8627450980392157, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 9.599198378677559e-06, | |
| "loss": 0.0487, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.8651960784313726, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 9.59499307379349e-06, | |
| "loss": 0.0465, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.8676470588235294, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 9.590766753355117e-06, | |
| "loss": 0.0472, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.8700980392156863, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 9.586519436691843e-06, | |
| "loss": 0.049, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8725490196078431, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 9.582251143229098e-06, | |
| "loss": 0.049, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 9.577961892488247e-06, | |
| "loss": 0.0497, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.8774509803921569, | |
| "grad_norm": 0.365234375, | |
| "learning_rate": 9.573651704086512e-06, | |
| "loss": 0.0498, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.8799019607843137, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.569320597736871e-06, | |
| "loss": 0.0567, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.8823529411764706, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 9.564968593247971e-06, | |
| "loss": 0.0489, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8848039215686274, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.56059571052404e-06, | |
| "loss": 0.047, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.8872549019607843, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 9.556201969564789e-06, | |
| "loss": 0.0522, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.8897058823529411, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.551787390465336e-06, | |
| "loss": 0.0497, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.8921568627450981, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.547351993416099e-06, | |
| "loss": 0.0476, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.8946078431372549, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.542895798702702e-06, | |
| "loss": 0.0538, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8970588235294118, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 9.538418826705899e-06, | |
| "loss": 0.0461, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.8995098039215687, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.533921097901462e-06, | |
| "loss": 0.0531, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.9019607843137255, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 9.529402632860104e-06, | |
| "loss": 0.0551, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.9044117647058824, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 9.524863452247368e-06, | |
| "loss": 0.0506, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.9068627450980392, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 9.52030357682355e-06, | |
| "loss": 0.0508, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9093137254901961, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.515723027443587e-06, | |
| "loss": 0.0518, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.9117647058823529, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.511121825056978e-06, | |
| "loss": 0.0495, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.9142156862745098, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.506499990707672e-06, | |
| "loss": 0.0547, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 9.501857545533987e-06, | |
| "loss": 0.0501, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.9191176470588235, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 9.497194510768502e-06, | |
| "loss": 0.0517, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.9215686274509803, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.492510907737966e-06, | |
| "loss": 0.0451, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.9240196078431373, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 9.487806757863194e-06, | |
| "loss": 0.0568, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.9264705882352942, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.483082082658984e-06, | |
| "loss": 0.0525, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.928921568627451, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 9.478336903733996e-06, | |
| "loss": 0.0542, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.9313725490196079, | |
| "grad_norm": 0.3359375, | |
| "learning_rate": 9.473571242790672e-06, | |
| "loss": 0.0537, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9338235294117647, | |
| "grad_norm": 0.25, | |
| "learning_rate": 9.468785121625131e-06, | |
| "loss": 0.0552, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.9362745098039216, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.463978562127065e-06, | |
| "loss": 0.0458, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.9387254901960784, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 9.459151586279643e-06, | |
| "loss": 0.0495, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 9.454304216159411e-06, | |
| "loss": 0.0512, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.9436274509803921, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.44943647393619e-06, | |
| "loss": 0.052, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.946078431372549, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.444548381872972e-06, | |
| "loss": 0.0532, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.9485294117647058, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.439639962325822e-06, | |
| "loss": 0.049, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.9509803921568627, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 9.434711237743776e-06, | |
| "loss": 0.0489, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.9534313725490197, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 9.429762230668734e-06, | |
| "loss": 0.0497, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.9558823529411765, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 9.42479296373536e-06, | |
| "loss": 0.059, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.9583333333333334, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.41980345967098e-06, | |
| "loss": 0.0496, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.9607843137254902, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.414793741295472e-06, | |
| "loss": 0.0512, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.9632352941176471, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.409763831521173e-06, | |
| "loss": 0.0541, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.9656862745098039, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 9.404713753352757e-06, | |
| "loss": 0.0495, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.9681372549019608, | |
| "grad_norm": 0.34375, | |
| "learning_rate": 9.39964352988715e-06, | |
| "loss": 0.0551, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.9705882352941176, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 9.394553184313407e-06, | |
| "loss": 0.0507, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.9730392156862745, | |
| "grad_norm": 0.25, | |
| "learning_rate": 9.389442739912612e-06, | |
| "loss": 0.0433, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.9754901960784313, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 9.38431222005778e-06, | |
| "loss": 0.0549, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.9779411764705882, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.379161648213737e-06, | |
| "loss": 0.0508, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.9803921568627451, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 9.373991047937017e-06, | |
| "loss": 0.0481, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9828431372549019, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 9.368800442875763e-06, | |
| "loss": 0.0484, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.9852941176470589, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.363589856769602e-06, | |
| "loss": 0.0429, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.9877450980392157, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 9.358359313449553e-06, | |
| "loss": 0.0476, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.9901960784313726, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.353108836837907e-06, | |
| "loss": 0.0594, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.9926470588235294, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.347838450948123e-06, | |
| "loss": 0.0462, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.9950980392156863, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 9.342548179884716e-06, | |
| "loss": 0.057, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.9975490196078431, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 9.337238047843148e-06, | |
| "loss": 0.0555, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.431640625, | |
| "learning_rate": 9.331908079109718e-06, | |
| "loss": 0.0484, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.05779758095741272, | |
| "eval_runtime": 37.3876, | |
| "eval_samples_per_second": 134.162, | |
| "eval_steps_per_second": 1.07, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.0024509803921569, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 9.326558298061446e-06, | |
| "loss": 0.0465, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.0049019607843137, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 9.321188729165967e-06, | |
| "loss": 0.0449, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.0073529411764706, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 9.315799396981417e-06, | |
| "loss": 0.0447, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.0098039215686274, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 9.310390326156324e-06, | |
| "loss": 0.0533, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.0122549019607843, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 9.304961541429487e-06, | |
| "loss": 0.0496, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.0147058823529411, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 9.299513067629872e-06, | |
| "loss": 0.0439, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.017156862745098, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.294044929676493e-06, | |
| "loss": 0.0585, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.0196078431372548, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 9.2885571525783e-06, | |
| "loss": 0.0562, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.0220588235294117, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.283049761434059e-06, | |
| "loss": 0.0531, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.0245098039215685, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.277522781432254e-06, | |
| "loss": 0.0523, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.0269607843137254, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 9.271976237850951e-06, | |
| "loss": 0.0488, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.0294117647058822, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 9.26641015605769e-06, | |
| "loss": 0.0551, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.031862745098039, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 9.260824561509378e-06, | |
| "loss": 0.0507, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.0343137254901962, | |
| "grad_norm": 0.25, | |
| "learning_rate": 9.255219479752163e-06, | |
| "loss": 0.0472, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.036764705882353, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 9.249594936421312e-06, | |
| "loss": 0.0521, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.0392156862745099, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.24395095724111e-06, | |
| "loss": 0.0497, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.0416666666666667, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.238287568024732e-06, | |
| "loss": 0.0485, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.0441176470588236, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 9.23260479467412e-06, | |
| "loss": 0.0501, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.0465686274509804, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 9.226902663179877e-06, | |
| "loss": 0.0515, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.0490196078431373, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.221181199621141e-06, | |
| "loss": 0.0521, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.0514705882352942, | |
| "grad_norm": 0.353515625, | |
| "learning_rate": 9.215440430165463e-06, | |
| "loss": 0.0476, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.053921568627451, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 9.209680381068698e-06, | |
| "loss": 0.044, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.0563725490196079, | |
| "grad_norm": 0.36328125, | |
| "learning_rate": 9.203901078674868e-06, | |
| "loss": 0.0551, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.0588235294117647, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 9.198102549416062e-06, | |
| "loss": 0.0482, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.0612745098039216, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 9.192284819812298e-06, | |
| "loss": 0.0454, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.0637254901960784, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 9.18644791647141e-06, | |
| "loss": 0.0524, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.0661764705882353, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 9.180591866088924e-06, | |
| "loss": 0.0502, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.0686274509803921, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 9.174716695447935e-06, | |
| "loss": 0.0509, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.071078431372549, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 9.168822431418992e-06, | |
| "loss": 0.0496, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.0735294117647058, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 9.162909100959962e-06, | |
| "loss": 0.0521, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.0759803921568627, | |
| "grad_norm": 0.25, | |
| "learning_rate": 9.156976731115919e-06, | |
| "loss": 0.0529, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.0784313725490196, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.15102534901901e-06, | |
| "loss": 0.0494, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.0808823529411764, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 9.145054981888342e-06, | |
| "loss": 0.0589, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.0833333333333333, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 9.139065657029845e-06, | |
| "loss": 0.0437, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.0857843137254901, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 9.13305740183616e-06, | |
| "loss": 0.0472, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.088235294117647, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 9.127030243786502e-06, | |
| "loss": 0.0539, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.0906862745098038, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 9.120984210446544e-06, | |
| "loss": 0.0469, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.093137254901961, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 9.114919329468283e-06, | |
| "loss": 0.0478, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.0955882352941178, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 9.108835628589919e-06, | |
| "loss": 0.0506, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.0980392156862746, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 9.102733135635727e-06, | |
| "loss": 0.0557, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.1004901960784315, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 9.096611878515926e-06, | |
| "loss": 0.0482, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.1029411764705883, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 9.09047188522656e-06, | |
| "loss": 0.0565, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.1053921568627452, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.084313183849356e-06, | |
| "loss": 0.0445, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.107843137254902, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.078135802551611e-06, | |
| "loss": 0.0456, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.1102941176470589, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 9.071939769586054e-06, | |
| "loss": 0.0526, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.1127450980392157, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 9.065725113290718e-06, | |
| "loss": 0.0469, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.1151960784313726, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 9.059491862088811e-06, | |
| "loss": 0.0567, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.1176470588235294, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 9.053240044488587e-06, | |
| "loss": 0.0513, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.1200980392156863, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 9.046969689083215e-06, | |
| "loss": 0.048, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.1225490196078431, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.040680824550648e-06, | |
| "loss": 0.0469, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.034373479653491e-06, | |
| "loss": 0.0491, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 1.1274509803921569, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 9.028047683238872e-06, | |
| "loss": 0.0483, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.1299019607843137, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 9.021703464238308e-06, | |
| "loss": 0.0507, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 1.1323529411764706, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 9.01534085166757e-06, | |
| "loss": 0.051, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 1.1348039215686274, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 9.008959874626559e-06, | |
| "loss": 0.0472, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 1.1372549019607843, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 9.002560562299164e-06, | |
| "loss": 0.0558, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 1.1397058823529411, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 8.99614294395313e-06, | |
| "loss": 0.0471, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.142156862745098, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 8.989707048939931e-06, | |
| "loss": 0.059, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 1.1446078431372548, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 8.983252906694626e-06, | |
| "loss": 0.0441, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.1470588235294117, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 8.97678054673573e-06, | |
| "loss": 0.0446, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.1495098039215685, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 8.970289998665083e-06, | |
| "loss": 0.0477, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.1519607843137254, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 8.963781292167701e-06, | |
| "loss": 0.0545, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.1544117647058822, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 8.957254457011659e-06, | |
| "loss": 0.0551, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 1.156862745098039, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.950709523047936e-06, | |
| "loss": 0.0434, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 1.159313725490196, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 8.944146520210293e-06, | |
| "loss": 0.0482, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 1.161764705882353, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 8.937565478515127e-06, | |
| "loss": 0.0514, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 1.1642156862745099, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 8.930966428061339e-06, | |
| "loss": 0.0465, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.1666666666666667, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.924349399030194e-06, | |
| "loss": 0.0482, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 1.1691176470588236, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 8.917714421685183e-06, | |
| "loss": 0.048, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 1.1715686274509804, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.911061526371886e-06, | |
| "loss": 0.0531, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 1.1740196078431373, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 8.90439074351783e-06, | |
| "loss": 0.0488, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 1.1764705882352942, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 8.897702103632355e-06, | |
| "loss": 0.0469, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.178921568627451, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 8.89099563730647e-06, | |
| "loss": 0.0516, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 1.1813725490196079, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 8.884271375212714e-06, | |
| "loss": 0.0484, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 1.1838235294117647, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 8.877529348105019e-06, | |
| "loss": 0.0523, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 1.1862745098039216, | |
| "grad_norm": 0.20703125, | |
| "learning_rate": 8.87076958681856e-06, | |
| "loss": 0.045, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 1.1887254901960784, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 8.86399212226963e-06, | |
| "loss": 0.042, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.1911764705882353, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 8.857196985455481e-06, | |
| "loss": 0.047, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 1.1936274509803921, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 8.850384207454195e-06, | |
| "loss": 0.0444, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 1.196078431372549, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.84355381942454e-06, | |
| "loss": 0.0487, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.1985294117647058, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 8.836705852605814e-06, | |
| "loss": 0.0532, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 1.2009803921568627, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 8.829840338317726e-06, | |
| "loss": 0.0459, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.2034313725490196, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 8.822957307960232e-06, | |
| "loss": 0.0443, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 1.2058823529411764, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 8.8160567930134e-06, | |
| "loss": 0.0516, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 1.2083333333333333, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 8.809138825037268e-06, | |
| "loss": 0.0471, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 1.2107843137254901, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 8.80220343567169e-06, | |
| "loss": 0.0471, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 1.213235294117647, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 8.795250656636207e-06, | |
| "loss": 0.0466, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.215686274509804, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 8.788280519729891e-06, | |
| "loss": 0.0518, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 1.218137254901961, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 8.781293056831192e-06, | |
| "loss": 0.0503, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 1.2205882352941178, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 8.774288299897816e-06, | |
| "loss": 0.0483, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.2230392156862746, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 8.767266280966551e-06, | |
| "loss": 0.0529, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.2254901960784315, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 8.760227032153147e-06, | |
| "loss": 0.0516, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.2279411764705883, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 8.753170585652148e-06, | |
| "loss": 0.0481, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 1.2303921568627452, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 8.746096973736755e-06, | |
| "loss": 0.0508, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 1.232843137254902, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.739006228758675e-06, | |
| "loss": 0.0481, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 1.2352941176470589, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 8.731898383147976e-06, | |
| "loss": 0.0459, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.2377450980392157, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 8.724773469412936e-06, | |
| "loss": 0.0461, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.2401960784313726, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 8.7176315201399e-06, | |
| "loss": 0.0507, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.2426470588235294, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 8.710472567993117e-06, | |
| "loss": 0.0436, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.2450980392156863, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 8.70329664571461e-06, | |
| "loss": 0.05, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 1.2475490196078431, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 8.696103786124009e-06, | |
| "loss": 0.0487, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 8.68889402211841e-06, | |
| "loss": 0.0485, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.2524509803921569, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.681667386672223e-06, | |
| "loss": 0.0495, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 1.2549019607843137, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 8.674423912837025e-06, | |
| "loss": 0.0495, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.2573529411764706, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.667163633741397e-06, | |
| "loss": 0.0503, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 1.2598039215686274, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 8.659886582590787e-06, | |
| "loss": 0.0484, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 1.2622549019607843, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.652592792667346e-06, | |
| "loss": 0.049, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.2647058823529411, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 8.645282297329785e-06, | |
| "loss": 0.0481, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 1.267156862745098, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 8.637955130013213e-06, | |
| "loss": 0.0451, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.2696078431372548, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 8.630611324228996e-06, | |
| "loss": 0.0491, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.2720588235294117, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 8.623250913564598e-06, | |
| "loss": 0.0513, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.2745098039215685, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 8.615873931683417e-06, | |
| "loss": 0.0434, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.2769607843137254, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 8.608480412324652e-06, | |
| "loss": 0.0473, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 1.2794117647058822, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 8.60107038930313e-06, | |
| "loss": 0.05, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.281862745098039, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 8.593643896509163e-06, | |
| "loss": 0.0536, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 1.284313725490196, | |
| "grad_norm": 0.25, | |
| "learning_rate": 8.586200967908385e-06, | |
| "loss": 0.0482, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 1.2867647058823528, | |
| "grad_norm": 0.25, | |
| "learning_rate": 8.578741637541605e-06, | |
| "loss": 0.0498, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.2892156862745099, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 8.571265939524642e-06, | |
| "loss": 0.0461, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 1.2916666666666667, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 8.563773908048177e-06, | |
| "loss": 0.0439, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 1.2941176470588236, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 8.556265577377593e-06, | |
| "loss": 0.0433, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 1.2965686274509804, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 8.548740981852816e-06, | |
| "loss": 0.0462, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 1.2990196078431373, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 8.541200155888166e-06, | |
| "loss": 0.051, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.3014705882352942, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 8.533643133972186e-06, | |
| "loss": 0.0463, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 1.303921568627451, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 8.5260699506675e-06, | |
| "loss": 0.0513, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 1.3063725490196079, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 8.518480640610644e-06, | |
| "loss": 0.0478, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 1.3088235294117647, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 8.510875238511911e-06, | |
| "loss": 0.0498, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 1.3112745098039216, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 8.503253779155193e-06, | |
| "loss": 0.047, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.3137254901960784, | |
| "grad_norm": 0.205078125, | |
| "learning_rate": 8.495616297397819e-06, | |
| "loss": 0.0447, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 1.3161764705882353, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.487962828170403e-06, | |
| "loss": 0.048, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 1.3186274509803921, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 8.480293406476671e-06, | |
| "loss": 0.0481, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 1.321078431372549, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.472608067393317e-06, | |
| "loss": 0.0429, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 1.3235294117647058, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 8.464906846069827e-06, | |
| "loss": 0.0497, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.3259803921568627, | |
| "grad_norm": 0.2275390625, | |
| "learning_rate": 8.457189777728333e-06, | |
| "loss": 0.046, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 1.3284313725490196, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 8.449456897663438e-06, | |
| "loss": 0.0416, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 1.3308823529411764, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 8.441708241242062e-06, | |
| "loss": 0.0463, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.433943843903286e-06, | |
| "loss": 0.0512, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 1.3357843137254901, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 8.426163741158175e-06, | |
| "loss": 0.0481, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.3382352941176472, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 8.41836796858963e-06, | |
| "loss": 0.0483, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 1.340686274509804, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 8.410556561852212e-06, | |
| "loss": 0.0557, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 1.343137254901961, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 8.402729556671995e-06, | |
| "loss": 0.045, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 1.3455882352941178, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.39488698884639e-06, | |
| "loss": 0.0488, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 1.3480392156862746, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 8.38702889424398e-06, | |
| "loss": 0.0443, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.3504901960784315, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 8.379155308804368e-06, | |
| "loss": 0.0494, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 1.3529411764705883, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 8.371266268538002e-06, | |
| "loss": 0.0453, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 1.3553921568627452, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 8.363361809526013e-06, | |
| "loss": 0.053, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 1.357843137254902, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 8.355441967920056e-06, | |
| "loss": 0.045, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 1.3602941176470589, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 8.347506779942136e-06, | |
| "loss": 0.0507, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.3627450980392157, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 8.33955628188444e-06, | |
| "loss": 0.0524, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 1.3651960784313726, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 8.331590510109188e-06, | |
| "loss": 0.0456, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 1.3676470588235294, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 8.323609501048446e-06, | |
| "loss": 0.0459, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 1.3700980392156863, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 8.315613291203977e-06, | |
| "loss": 0.0533, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 1.3725490196078431, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 8.30760191714706e-06, | |
| "loss": 0.0475, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 8.299575415518327e-06, | |
| "loss": 0.0435, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 1.3774509803921569, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 8.291533823027607e-06, | |
| "loss": 0.0504, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 1.3799019607843137, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 8.283477176453738e-06, | |
| "loss": 0.0497, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 1.3823529411764706, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.275405512644412e-06, | |
| "loss": 0.0457, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 1.3848039215686274, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 8.267318868516007e-06, | |
| "loss": 0.0482, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.3872549019607843, | |
| "grad_norm": 0.25, | |
| "learning_rate": 8.259217281053411e-06, | |
| "loss": 0.0475, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 1.3897058823529411, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 8.25110078730986e-06, | |
| "loss": 0.0485, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 1.392156862745098, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 8.242969424406757e-06, | |
| "loss": 0.0452, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 1.3946078431372548, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 8.234823229533519e-06, | |
| "loss": 0.0516, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 1.3970588235294117, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.226662239947396e-06, | |
| "loss": 0.0467, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.3995098039215685, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.2184864929733e-06, | |
| "loss": 0.0482, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 1.4019607843137254, | |
| "grad_norm": 0.203125, | |
| "learning_rate": 8.21029602600364e-06, | |
| "loss": 0.0426, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 1.4044117647058822, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 8.202090876498144e-06, | |
| "loss": 0.0472, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 1.406862745098039, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 8.193871081983698e-06, | |
| "loss": 0.051, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 1.409313725490196, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 8.185636680054161e-06, | |
| "loss": 0.0483, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.4117647058823528, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 8.177387708370208e-06, | |
| "loss": 0.0428, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 1.4142156862745099, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 8.169124204659142e-06, | |
| "loss": 0.0431, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 1.4166666666666667, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.160846206714734e-06, | |
| "loss": 0.0534, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 1.4191176470588236, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 8.152553752397043e-06, | |
| "loss": 0.0481, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 1.4215686274509804, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 8.14424687963225e-06, | |
| "loss": 0.049, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.4240196078431373, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 8.135925626412474e-06, | |
| "loss": 0.0469, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 1.4264705882352942, | |
| "grad_norm": 0.197265625, | |
| "learning_rate": 8.127590030795608e-06, | |
| "loss": 0.0484, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 1.428921568627451, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 8.11924013090514e-06, | |
| "loss": 0.0497, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 1.4313725490196079, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 8.110875964929976e-06, | |
| "loss": 0.0496, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 1.4338235294117647, | |
| "grad_norm": 0.25, | |
| "learning_rate": 8.102497571124276e-06, | |
| "loss": 0.0516, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.4362745098039216, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 8.09410498780727e-06, | |
| "loss": 0.0475, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 1.4387254901960784, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 8.085698253363081e-06, | |
| "loss": 0.0439, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 1.4411764705882353, | |
| "grad_norm": 0.36328125, | |
| "learning_rate": 8.077277406240556e-06, | |
| "loss": 0.0481, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 1.4436274509803921, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 8.068842484953086e-06, | |
| "loss": 0.0456, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 1.446078431372549, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 8.060393528078435e-06, | |
| "loss": 0.0421, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.4485294117647058, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 8.051930574258554e-06, | |
| "loss": 0.0456, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 1.4509803921568627, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 8.043453662199416e-06, | |
| "loss": 0.0496, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 1.4534313725490196, | |
| "grad_norm": 0.390625, | |
| "learning_rate": 8.03496283067083e-06, | |
| "loss": 0.0451, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 1.4558823529411764, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 8.026458118506264e-06, | |
| "loss": 0.0507, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 1.4583333333333333, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 8.017939564602679e-06, | |
| "loss": 0.0482, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.4607843137254901, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 8.009407207920333e-06, | |
| "loss": 0.0466, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 1.4632352941176472, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 8.000861087482615e-06, | |
| "loss": 0.0468, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 1.465686274509804, | |
| "grad_norm": 0.197265625, | |
| "learning_rate": 7.992301242375866e-06, | |
| "loss": 0.0461, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 1.468137254901961, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 7.983727711749194e-06, | |
| "loss": 0.0508, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 1.4705882352941178, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 7.975140534814304e-06, | |
| "loss": 0.0545, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.4730392156862746, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 7.96653975084531e-06, | |
| "loss": 0.0489, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 1.4754901960784315, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 7.957925399178556e-06, | |
| "loss": 0.0472, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 1.4779411764705883, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 7.949297519212444e-06, | |
| "loss": 0.0494, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 1.4803921568627452, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 7.940656150407247e-06, | |
| "loss": 0.048, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 1.482843137254902, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 7.932001332284929e-06, | |
| "loss": 0.042, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.4852941176470589, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 7.923333104428967e-06, | |
| "loss": 0.0434, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 1.4877450980392157, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 7.91465150648417e-06, | |
| "loss": 0.0374, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 1.4901960784313726, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 7.90595657815649e-06, | |
| "loss": 0.0457, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 1.4926470588235294, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 7.897248359212852e-06, | |
| "loss": 0.0445, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 1.4950980392156863, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 7.888526889480967e-06, | |
| "loss": 0.045, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.4975490196078431, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 7.879792208849146e-06, | |
| "loss": 0.0496, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 7.871044357266124e-06, | |
| "loss": 0.0465, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 1.5024509803921569, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 7.862283374740874e-06, | |
| "loss": 0.0486, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 1.5049019607843137, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 7.85350930134242e-06, | |
| "loss": 0.0485, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 1.5073529411764706, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 7.844722177199663e-06, | |
| "loss": 0.0483, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.5098039215686274, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 7.835922042501194e-06, | |
| "loss": 0.0404, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 1.5122549019607843, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 7.827108937495102e-06, | |
| "loss": 0.0465, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 1.5147058823529411, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 7.818282902488806e-06, | |
| "loss": 0.0448, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 1.517156862745098, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 7.809443977848849e-06, | |
| "loss": 0.0504, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 1.5196078431372548, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 7.800592204000739e-06, | |
| "loss": 0.0478, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.5220588235294117, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 7.791727621428741e-06, | |
| "loss": 0.0464, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 1.5245098039215685, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 7.782850270675705e-06, | |
| "loss": 0.0435, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 1.5269607843137254, | |
| "grad_norm": 0.205078125, | |
| "learning_rate": 7.773960192342882e-06, | |
| "loss": 0.0448, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 1.5294117647058822, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 7.765057427089726e-06, | |
| "loss": 0.0533, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 1.531862745098039, | |
| "grad_norm": 0.2080078125, | |
| "learning_rate": 7.75614201563372e-06, | |
| "loss": 0.0448, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.534313725490196, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 7.74721399875018e-06, | |
| "loss": 0.0453, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 1.5367647058823528, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 7.73827341727208e-06, | |
| "loss": 0.0458, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 1.5392156862745097, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 7.729320312089862e-06, | |
| "loss": 0.0506, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 1.5416666666666665, | |
| "grad_norm": 0.3359375, | |
| "learning_rate": 7.720354724151237e-06, | |
| "loss": 0.0467, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 1.5441176470588234, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 7.71137669446101e-06, | |
| "loss": 0.0498, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.5465686274509802, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 7.702386264080894e-06, | |
| "loss": 0.0486, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 1.5490196078431373, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 7.69338347412931e-06, | |
| "loss": 0.0438, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 1.5514705882352942, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 7.684368365781213e-06, | |
| "loss": 0.0425, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 1.553921568627451, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 7.675340980267896e-06, | |
| "loss": 0.0506, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 1.5563725490196079, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 7.666301358876798e-06, | |
| "loss": 0.0495, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.5588235294117647, | |
| "grad_norm": 0.3515625, | |
| "learning_rate": 7.657249542951324e-06, | |
| "loss": 0.048, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 1.5612745098039216, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 7.648185573890651e-06, | |
| "loss": 0.0555, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 1.5637254901960784, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 7.639109493149537e-06, | |
| "loss": 0.0495, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 1.5661764705882353, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 7.630021342238139e-06, | |
| "loss": 0.0475, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 1.5686274509803921, | |
| "grad_norm": 0.201171875, | |
| "learning_rate": 7.620921162721811e-06, | |
| "loss": 0.0466, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.571078431372549, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 7.611808996220923e-06, | |
| "loss": 0.0502, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 1.5735294117647058, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 7.602684884410674e-06, | |
| "loss": 0.0471, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 1.5759803921568627, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 7.593548869020887e-06, | |
| "loss": 0.0589, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 1.5784313725490198, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 7.584400991835834e-06, | |
| "loss": 0.0461, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 1.5808823529411766, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 7.575241294694033e-06, | |
| "loss": 0.0449, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.5833333333333335, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 7.566069819488066e-06, | |
| "loss": 0.0461, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 1.5857843137254903, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 7.556886608164378e-06, | |
| "loss": 0.0442, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 1.5882352941176472, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 7.547691702723095e-06, | |
| "loss": 0.0499, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 1.590686274509804, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 7.53848514521782e-06, | |
| "loss": 0.0484, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 1.593137254901961, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 7.529266977755456e-06, | |
| "loss": 0.0489, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.5955882352941178, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 7.520037242496e-06, | |
| "loss": 0.0407, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 1.5980392156862746, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 7.510795981652356e-06, | |
| "loss": 0.0511, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 1.6004901960784315, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 7.501543237490141e-06, | |
| "loss": 0.0471, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 1.6029411764705883, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 7.4922790523274935e-06, | |
| "loss": 0.0495, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 1.6053921568627452, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 7.483003468534874e-06, | |
| "loss": 0.0457, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.607843137254902, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 7.473716528534883e-06, | |
| "loss": 0.0532, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 1.6102941176470589, | |
| "grad_norm": 0.185546875, | |
| "learning_rate": 7.46441827480205e-06, | |
| "loss": 0.0408, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 1.6127450980392157, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 7.455108749862658e-06, | |
| "loss": 0.0447, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 1.6151960784313726, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 7.445787996294532e-06, | |
| "loss": 0.0484, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 1.6176470588235294, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 7.43645605672686e-06, | |
| "loss": 0.0446, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.6200980392156863, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 7.4271129738399805e-06, | |
| "loss": 0.0424, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 1.6225490196078431, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 7.4177587903652035e-06, | |
| "loss": 0.0482, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 7.4083935490846096e-06, | |
| "loss": 0.0497, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 1.6274509803921569, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 7.399017292830848e-06, | |
| "loss": 0.048, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 1.6299019607843137, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 7.389630064486946e-06, | |
| "loss": 0.0504, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.6323529411764706, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 7.380231906986117e-06, | |
| "loss": 0.0433, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 1.6348039215686274, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 7.370822863311558e-06, | |
| "loss": 0.0457, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 1.6372549019607843, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 7.361402976496251e-06, | |
| "loss": 0.0469, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 1.6397058823529411, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 7.351972289622775e-06, | |
| "loss": 0.0489, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 1.642156862745098, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 7.342530845823103e-06, | |
| "loss": 0.0441, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.6446078431372548, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 7.333078688278402e-06, | |
| "loss": 0.052, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 1.6470588235294117, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 7.323615860218844e-06, | |
| "loss": 0.0411, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 1.6495098039215685, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 7.314142404923399e-06, | |
| "loss": 0.0522, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 1.6519607843137254, | |
| "grad_norm": 0.25, | |
| "learning_rate": 7.3046583657196445e-06, | |
| "loss": 0.0477, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 1.6544117647058822, | |
| "grad_norm": 0.25, | |
| "learning_rate": 7.295163785983563e-06, | |
| "loss": 0.0489, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.656862745098039, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 7.2856587091393475e-06, | |
| "loss": 0.0485, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 1.659313725490196, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 7.276143178659195e-06, | |
| "loss": 0.0426, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 1.6617647058823528, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 7.2666172380631184e-06, | |
| "loss": 0.0461, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 1.6642156862745097, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 7.257080930918737e-06, | |
| "loss": 0.0538, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 7.2475343008410884e-06, | |
| "loss": 0.0455, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.6691176470588234, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 7.237977391492419e-06, | |
| "loss": 0.047, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 1.6715686274509802, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 7.228410246581987e-06, | |
| "loss": 0.0446, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 1.6740196078431373, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 7.2188329098658685e-06, | |
| "loss": 0.0431, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 1.6764705882352942, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 7.2092454251467445e-06, | |
| "loss": 0.0461, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 1.678921568627451, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 7.199647836273721e-06, | |
| "loss": 0.048, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.6813725490196079, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 7.190040187142103e-06, | |
| "loss": 0.0535, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 1.6838235294117647, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 7.180422521693217e-06, | |
| "loss": 0.0424, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 1.6862745098039216, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 7.1707948839141935e-06, | |
| "loss": 0.0458, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 1.6887254901960784, | |
| "grad_norm": 0.2021484375, | |
| "learning_rate": 7.161157317837775e-06, | |
| "loss": 0.0425, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 1.6911764705882353, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 7.1515098675421125e-06, | |
| "loss": 0.0454, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.6936274509803921, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 7.1418525771505605e-06, | |
| "loss": 0.0455, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 1.696078431372549, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 7.13218549083148e-06, | |
| "loss": 0.0472, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 1.6985294117647058, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 7.122508652798033e-06, | |
| "loss": 0.0471, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 1.7009803921568627, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 7.112822107307986e-06, | |
| "loss": 0.053, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 1.7034313725490198, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 7.1031258986634965e-06, | |
| "loss": 0.0471, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.7058823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 7.093420071210922e-06, | |
| "loss": 0.0484, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 1.7083333333333335, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 7.083704669340611e-06, | |
| "loss": 0.0427, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 1.7107843137254903, | |
| "grad_norm": 0.330078125, | |
| "learning_rate": 7.0739797374867e-06, | |
| "loss": 0.0481, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 1.7132352941176472, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 7.0642453201269145e-06, | |
| "loss": 0.0447, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 1.715686274509804, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 7.054501461782361e-06, | |
| "loss": 0.0496, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.718137254901961, | |
| "grad_norm": 0.25, | |
| "learning_rate": 7.044748207017327e-06, | |
| "loss": 0.0495, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 1.7205882352941178, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 7.034985600439068e-06, | |
| "loss": 0.0454, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 1.7230392156862746, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 7.0252136866976205e-06, | |
| "loss": 0.0425, | |
| "step": 1406 | |
| }, | |
| { | |
| "epoch": 1.7254901960784315, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 7.015432510485584e-06, | |
| "loss": 0.0465, | |
| "step": 1408 | |
| }, | |
| { | |
| "epoch": 1.7279411764705883, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 7.005642116537919e-06, | |
| "loss": 0.0435, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.7303921568627452, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 6.9958425496317435e-06, | |
| "loss": 0.047, | |
| "step": 1412 | |
| }, | |
| { | |
| "epoch": 1.732843137254902, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 6.986033854586134e-06, | |
| "loss": 0.0468, | |
| "step": 1414 | |
| }, | |
| { | |
| "epoch": 1.7352941176470589, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 6.9762160762619116e-06, | |
| "loss": 0.0502, | |
| "step": 1416 | |
| }, | |
| { | |
| "epoch": 1.7377450980392157, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 6.966389259561438e-06, | |
| "loss": 0.0503, | |
| "step": 1418 | |
| }, | |
| { | |
| "epoch": 1.7401960784313726, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 6.956553449428418e-06, | |
| "loss": 0.0445, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.7426470588235294, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 6.946708690847684e-06, | |
| "loss": 0.0468, | |
| "step": 1422 | |
| }, | |
| { | |
| "epoch": 1.7450980392156863, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 6.936855028844996e-06, | |
| "loss": 0.0507, | |
| "step": 1424 | |
| }, | |
| { | |
| "epoch": 1.7475490196078431, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 6.926992508486839e-06, | |
| "loss": 0.0485, | |
| "step": 1426 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 6.9171211748802055e-06, | |
| "loss": 0.0434, | |
| "step": 1428 | |
| }, | |
| { | |
| "epoch": 1.7524509803921569, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 6.907241073172399e-06, | |
| "loss": 0.0466, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.7549019607843137, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 6.897352248550828e-06, | |
| "loss": 0.0483, | |
| "step": 1432 | |
| }, | |
| { | |
| "epoch": 1.7573529411764706, | |
| "grad_norm": 0.34765625, | |
| "learning_rate": 6.887454746242787e-06, | |
| "loss": 0.0554, | |
| "step": 1434 | |
| }, | |
| { | |
| "epoch": 1.7598039215686274, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 6.87754861151527e-06, | |
| "loss": 0.0427, | |
| "step": 1436 | |
| }, | |
| { | |
| "epoch": 1.7622549019607843, | |
| "grad_norm": 0.373046875, | |
| "learning_rate": 6.867633889674743e-06, | |
| "loss": 0.0433, | |
| "step": 1438 | |
| }, | |
| { | |
| "epoch": 1.7647058823529411, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 6.857710626066949e-06, | |
| "loss": 0.0472, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.767156862745098, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 6.847778866076697e-06, | |
| "loss": 0.0476, | |
| "step": 1442 | |
| }, | |
| { | |
| "epoch": 1.7696078431372548, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 6.837838655127659e-06, | |
| "loss": 0.0434, | |
| "step": 1444 | |
| }, | |
| { | |
| "epoch": 1.7720588235294117, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.827890038682151e-06, | |
| "loss": 0.0394, | |
| "step": 1446 | |
| }, | |
| { | |
| "epoch": 1.7745098039215685, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 6.817933062240937e-06, | |
| "loss": 0.0475, | |
| "step": 1448 | |
| }, | |
| { | |
| "epoch": 1.7769607843137254, | |
| "grad_norm": 0.25, | |
| "learning_rate": 6.8079677713430136e-06, | |
| "loss": 0.052, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.7794117647058822, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 6.797994211565405e-06, | |
| "loss": 0.053, | |
| "step": 1452 | |
| }, | |
| { | |
| "epoch": 1.781862745098039, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 6.788012428522956e-06, | |
| "loss": 0.0425, | |
| "step": 1454 | |
| }, | |
| { | |
| "epoch": 1.784313725490196, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 6.7780224678681194e-06, | |
| "loss": 0.0459, | |
| "step": 1456 | |
| }, | |
| { | |
| "epoch": 1.7867647058823528, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 6.768024375290747e-06, | |
| "loss": 0.0468, | |
| "step": 1458 | |
| }, | |
| { | |
| "epoch": 1.7892156862745097, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 6.758018196517885e-06, | |
| "loss": 0.0462, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.7916666666666665, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 6.7480039773135605e-06, | |
| "loss": 0.043, | |
| "step": 1462 | |
| }, | |
| { | |
| "epoch": 1.7941176470588234, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 6.737981763478578e-06, | |
| "loss": 0.0459, | |
| "step": 1464 | |
| }, | |
| { | |
| "epoch": 1.7965686274509802, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 6.727951600850302e-06, | |
| "loss": 0.0403, | |
| "step": 1466 | |
| }, | |
| { | |
| "epoch": 1.7990196078431373, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 6.717913535302453e-06, | |
| "loss": 0.0412, | |
| "step": 1468 | |
| }, | |
| { | |
| "epoch": 1.8014705882352942, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 6.7078676127448965e-06, | |
| "loss": 0.0482, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.803921568627451, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 6.697813879123429e-06, | |
| "loss": 0.0502, | |
| "step": 1472 | |
| }, | |
| { | |
| "epoch": 1.8063725490196079, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 6.687752380419579e-06, | |
| "loss": 0.0505, | |
| "step": 1474 | |
| }, | |
| { | |
| "epoch": 1.8088235294117647, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 6.677683162650383e-06, | |
| "loss": 0.0459, | |
| "step": 1476 | |
| }, | |
| { | |
| "epoch": 1.8112745098039216, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 6.667606271868183e-06, | |
| "loss": 0.0423, | |
| "step": 1478 | |
| }, | |
| { | |
| "epoch": 1.8137254901960784, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 6.657521754160415e-06, | |
| "loss": 0.0494, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.8161764705882353, | |
| "grad_norm": 0.40625, | |
| "learning_rate": 6.6474296556493945e-06, | |
| "loss": 0.0509, | |
| "step": 1482 | |
| }, | |
| { | |
| "epoch": 1.8186274509803921, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 6.637330022492112e-06, | |
| "loss": 0.0441, | |
| "step": 1484 | |
| }, | |
| { | |
| "epoch": 1.821078431372549, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 6.627222900880017e-06, | |
| "loss": 0.0464, | |
| "step": 1486 | |
| }, | |
| { | |
| "epoch": 1.8235294117647058, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 6.617108337038807e-06, | |
| "loss": 0.0501, | |
| "step": 1488 | |
| }, | |
| { | |
| "epoch": 1.8259803921568627, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 6.606986377228216e-06, | |
| "loss": 0.0446, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.8284313725490198, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.59685706774181e-06, | |
| "loss": 0.0437, | |
| "step": 1492 | |
| }, | |
| { | |
| "epoch": 1.8308823529411766, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 6.586720454906762e-06, | |
| "loss": 0.0482, | |
| "step": 1494 | |
| }, | |
| { | |
| "epoch": 1.8333333333333335, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 6.576576585083651e-06, | |
| "loss": 0.0433, | |
| "step": 1496 | |
| }, | |
| { | |
| "epoch": 1.8357843137254903, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 6.566425504666249e-06, | |
| "loss": 0.0439, | |
| "step": 1498 | |
| }, | |
| { | |
| "epoch": 1.8382352941176472, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 6.556267260081297e-06, | |
| "loss": 0.0443, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.840686274509804, | |
| "grad_norm": 0.25, | |
| "learning_rate": 6.5461018977883135e-06, | |
| "loss": 0.0464, | |
| "step": 1502 | |
| }, | |
| { | |
| "epoch": 1.843137254901961, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 6.535929464279364e-06, | |
| "loss": 0.0442, | |
| "step": 1504 | |
| }, | |
| { | |
| "epoch": 1.8455882352941178, | |
| "grad_norm": 0.1865234375, | |
| "learning_rate": 6.525750006078854e-06, | |
| "loss": 0.0436, | |
| "step": 1506 | |
| }, | |
| { | |
| "epoch": 1.8480392156862746, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 6.515563569743318e-06, | |
| "loss": 0.0483, | |
| "step": 1508 | |
| }, | |
| { | |
| "epoch": 1.8504901960784315, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 6.50537020186121e-06, | |
| "loss": 0.0428, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.8529411764705883, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 6.495169949052679e-06, | |
| "loss": 0.0403, | |
| "step": 1512 | |
| }, | |
| { | |
| "epoch": 1.8553921568627452, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 6.484962857969366e-06, | |
| "loss": 0.0483, | |
| "step": 1514 | |
| }, | |
| { | |
| "epoch": 1.857843137254902, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 6.474748975294189e-06, | |
| "loss": 0.0505, | |
| "step": 1516 | |
| }, | |
| { | |
| "epoch": 1.8602941176470589, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 6.4645283477411235e-06, | |
| "loss": 0.0454, | |
| "step": 1518 | |
| }, | |
| { | |
| "epoch": 1.8627450980392157, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 6.454301022054999e-06, | |
| "loss": 0.0447, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.8651960784313726, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 6.444067045011275e-06, | |
| "loss": 0.0431, | |
| "step": 1522 | |
| }, | |
| { | |
| "epoch": 1.8676470588235294, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 6.433826463415832e-06, | |
| "loss": 0.0441, | |
| "step": 1524 | |
| }, | |
| { | |
| "epoch": 1.8700980392156863, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 6.423579324104758e-06, | |
| "loss": 0.0449, | |
| "step": 1526 | |
| }, | |
| { | |
| "epoch": 1.8725490196078431, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 6.413325673944133e-06, | |
| "loss": 0.0447, | |
| "step": 1528 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 6.403065559829816e-06, | |
| "loss": 0.0451, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.8774509803921569, | |
| "grad_norm": 0.341796875, | |
| "learning_rate": 6.392799028687227e-06, | |
| "loss": 0.0458, | |
| "step": 1532 | |
| }, | |
| { | |
| "epoch": 1.8799019607843137, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 6.382526127471136e-06, | |
| "loss": 0.0521, | |
| "step": 1534 | |
| }, | |
| { | |
| "epoch": 1.8823529411764706, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 6.372246903165445e-06, | |
| "loss": 0.0452, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 1.8848039215686274, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 6.361961402782981e-06, | |
| "loss": 0.044, | |
| "step": 1538 | |
| }, | |
| { | |
| "epoch": 1.8872549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 6.35166967336527e-06, | |
| "loss": 0.048, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.8897058823529411, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.3413717619823265e-06, | |
| "loss": 0.0458, | |
| "step": 1542 | |
| }, | |
| { | |
| "epoch": 1.892156862745098, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 6.331067715732443e-06, | |
| "loss": 0.0446, | |
| "step": 1544 | |
| }, | |
| { | |
| "epoch": 1.8946078431372548, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 6.320757581741965e-06, | |
| "loss": 0.0498, | |
| "step": 1546 | |
| }, | |
| { | |
| "epoch": 1.8970588235294117, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 6.310441407165085e-06, | |
| "loss": 0.0429, | |
| "step": 1548 | |
| }, | |
| { | |
| "epoch": 1.8995098039215685, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 6.300119239183621e-06, | |
| "loss": 0.0485, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.9019607843137254, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 6.289791125006801e-06, | |
| "loss": 0.0508, | |
| "step": 1552 | |
| }, | |
| { | |
| "epoch": 1.9044117647058822, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 6.279457111871052e-06, | |
| "loss": 0.0462, | |
| "step": 1554 | |
| }, | |
| { | |
| "epoch": 1.906862745098039, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 6.269117247039775e-06, | |
| "loss": 0.0474, | |
| "step": 1556 | |
| }, | |
| { | |
| "epoch": 1.909313725490196, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 6.258771577803141e-06, | |
| "loss": 0.0479, | |
| "step": 1558 | |
| }, | |
| { | |
| "epoch": 1.9117647058823528, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 6.248420151477862e-06, | |
| "loss": 0.0465, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.9142156862745097, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.238063015406982e-06, | |
| "loss": 0.0503, | |
| "step": 1562 | |
| }, | |
| { | |
| "epoch": 1.9166666666666665, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 6.227700216959661e-06, | |
| "loss": 0.0465, | |
| "step": 1564 | |
| }, | |
| { | |
| "epoch": 1.9191176470588234, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 6.2173318035309525e-06, | |
| "loss": 0.0469, | |
| "step": 1566 | |
| }, | |
| { | |
| "epoch": 1.9215686274509802, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 6.206957822541596e-06, | |
| "loss": 0.0418, | |
| "step": 1568 | |
| }, | |
| { | |
| "epoch": 1.9240196078431373, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 6.1965783214377895e-06, | |
| "loss": 0.0524, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.9264705882352942, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 6.1861933476909805e-06, | |
| "loss": 0.048, | |
| "step": 1572 | |
| }, | |
| { | |
| "epoch": 1.928921568627451, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 6.175802948797645e-06, | |
| "loss": 0.0505, | |
| "step": 1574 | |
| }, | |
| { | |
| "epoch": 1.9313725490196079, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 6.165407172279073e-06, | |
| "loss": 0.0501, | |
| "step": 1576 | |
| }, | |
| { | |
| "epoch": 1.9338235294117647, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.1550060656811464e-06, | |
| "loss": 0.0513, | |
| "step": 1578 | |
| }, | |
| { | |
| "epoch": 1.9362745098039216, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 6.144599676574128e-06, | |
| "loss": 0.0424, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.9387254901960784, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 6.1341880525524365e-06, | |
| "loss": 0.0455, | |
| "step": 1582 | |
| }, | |
| { | |
| "epoch": 1.9411764705882353, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 6.123771241234437e-06, | |
| "loss": 0.0462, | |
| "step": 1584 | |
| }, | |
| { | |
| "epoch": 1.9436274509803921, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 6.113349290262216e-06, | |
| "loss": 0.0476, | |
| "step": 1586 | |
| }, | |
| { | |
| "epoch": 1.946078431372549, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 6.1029222473013705e-06, | |
| "loss": 0.0487, | |
| "step": 1588 | |
| }, | |
| { | |
| "epoch": 1.9485294117647058, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 6.0924901600407825e-06, | |
| "loss": 0.0456, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.9509803921568627, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 6.082053076192405e-06, | |
| "loss": 0.0454, | |
| "step": 1592 | |
| }, | |
| { | |
| "epoch": 1.9534313725490198, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 6.071611043491044e-06, | |
| "loss": 0.0459, | |
| "step": 1594 | |
| }, | |
| { | |
| "epoch": 1.9558823529411766, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 6.061164109694139e-06, | |
| "loss": 0.0554, | |
| "step": 1596 | |
| }, | |
| { | |
| "epoch": 1.9583333333333335, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 6.050712322581546e-06, | |
| "loss": 0.0463, | |
| "step": 1598 | |
| }, | |
| { | |
| "epoch": 1.9607843137254903, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.040255729955318e-06, | |
| "loss": 0.0478, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.9632352941176472, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 6.029794379639484e-06, | |
| "loss": 0.0493, | |
| "step": 1602 | |
| }, | |
| { | |
| "epoch": 1.965686274509804, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 6.019328319479836e-06, | |
| "loss": 0.0458, | |
| "step": 1604 | |
| }, | |
| { | |
| "epoch": 1.968137254901961, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 6.008857597343704e-06, | |
| "loss": 0.0509, | |
| "step": 1606 | |
| }, | |
| { | |
| "epoch": 1.9705882352941178, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 5.998382261119744e-06, | |
| "loss": 0.0473, | |
| "step": 1608 | |
| }, | |
| { | |
| "epoch": 1.9730392156862746, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 5.987902358717709e-06, | |
| "loss": 0.0406, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.9754901960784315, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 5.97741793806824e-06, | |
| "loss": 0.0508, | |
| "step": 1612 | |
| }, | |
| { | |
| "epoch": 1.9779411764705883, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 5.966929047122641e-06, | |
| "loss": 0.0469, | |
| "step": 1614 | |
| }, | |
| { | |
| "epoch": 1.9803921568627452, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 5.956435733852662e-06, | |
| "loss": 0.0452, | |
| "step": 1616 | |
| }, | |
| { | |
| "epoch": 1.982843137254902, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 5.945938046250277e-06, | |
| "loss": 0.0452, | |
| "step": 1618 | |
| }, | |
| { | |
| "epoch": 1.9852941176470589, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 5.935436032327471e-06, | |
| "loss": 0.0404, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.9877450980392157, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 5.924929740116008e-06, | |
| "loss": 0.045, | |
| "step": 1622 | |
| }, | |
| { | |
| "epoch": 1.9901960784313726, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 5.914419217667228e-06, | |
| "loss": 0.0556, | |
| "step": 1624 | |
| }, | |
| { | |
| "epoch": 1.9926470588235294, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 5.903904513051812e-06, | |
| "loss": 0.0429, | |
| "step": 1626 | |
| }, | |
| { | |
| "epoch": 1.9950980392156863, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 5.893385674359572e-06, | |
| "loss": 0.0531, | |
| "step": 1628 | |
| }, | |
| { | |
| "epoch": 1.9975490196078431, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 5.882862749699223e-06, | |
| "loss": 0.051, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.408203125, | |
| "learning_rate": 5.872335787198173e-06, | |
| "loss": 0.045, | |
| "step": 1632 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.05398834869265556, | |
| "eval_runtime": 37.4119, | |
| "eval_samples_per_second": 134.075, | |
| "eval_steps_per_second": 1.069, | |
| "step": 1632 | |
| }, | |
| { | |
| "epoch": 2.002450980392157, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 5.861804835002292e-06, | |
| "loss": 0.0441, | |
| "step": 1634 | |
| }, | |
| { | |
| "epoch": 2.0049019607843137, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 5.8512699412757045e-06, | |
| "loss": 0.0421, | |
| "step": 1636 | |
| }, | |
| { | |
| "epoch": 2.0073529411764706, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 5.840731154200554e-06, | |
| "loss": 0.0417, | |
| "step": 1638 | |
| }, | |
| { | |
| "epoch": 2.0098039215686274, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 5.830188521976794e-06, | |
| "loss": 0.0498, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.0122549019607843, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 5.819642092821964e-06, | |
| "loss": 0.0463, | |
| "step": 1642 | |
| }, | |
| { | |
| "epoch": 2.014705882352941, | |
| "grad_norm": 0.1953125, | |
| "learning_rate": 5.809091914970971e-06, | |
| "loss": 0.0412, | |
| "step": 1644 | |
| }, | |
| { | |
| "epoch": 2.017156862745098, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 5.798538036675863e-06, | |
| "loss": 0.054, | |
| "step": 1646 | |
| }, | |
| { | |
| "epoch": 2.019607843137255, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 5.787980506205615e-06, | |
| "loss": 0.0529, | |
| "step": 1648 | |
| }, | |
| { | |
| "epoch": 2.0220588235294117, | |
| "grad_norm": 0.25, | |
| "learning_rate": 5.777419371845905e-06, | |
| "loss": 0.0492, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.0245098039215685, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 5.766854681898893e-06, | |
| "loss": 0.049, | |
| "step": 1652 | |
| }, | |
| { | |
| "epoch": 2.0269607843137254, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 5.7562864846830016e-06, | |
| "loss": 0.0455, | |
| "step": 1654 | |
| }, | |
| { | |
| "epoch": 2.0294117647058822, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 5.745714828532691e-06, | |
| "loss": 0.0518, | |
| "step": 1656 | |
| }, | |
| { | |
| "epoch": 2.031862745098039, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 5.73513976179825e-06, | |
| "loss": 0.0477, | |
| "step": 1658 | |
| }, | |
| { | |
| "epoch": 2.034313725490196, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 5.724561332845553e-06, | |
| "loss": 0.0445, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.036764705882353, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 5.713979590055864e-06, | |
| "loss": 0.0483, | |
| "step": 1662 | |
| }, | |
| { | |
| "epoch": 2.0392156862745097, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 5.703394581825595e-06, | |
| "loss": 0.0468, | |
| "step": 1664 | |
| }, | |
| { | |
| "epoch": 2.0416666666666665, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 5.6928063565660955e-06, | |
| "loss": 0.046, | |
| "step": 1666 | |
| }, | |
| { | |
| "epoch": 2.0441176470588234, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 5.68221496270343e-06, | |
| "loss": 0.0468, | |
| "step": 1668 | |
| }, | |
| { | |
| "epoch": 2.0465686274509802, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 5.671620448678148e-06, | |
| "loss": 0.0487, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.049019607843137, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 5.661022862945081e-06, | |
| "loss": 0.0492, | |
| "step": 1672 | |
| }, | |
| { | |
| "epoch": 2.051470588235294, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 5.650422253973099e-06, | |
| "loss": 0.0446, | |
| "step": 1674 | |
| }, | |
| { | |
| "epoch": 2.053921568627451, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 5.639818670244904e-06, | |
| "loss": 0.0416, | |
| "step": 1676 | |
| }, | |
| { | |
| "epoch": 2.0563725490196076, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 5.6292121602567995e-06, | |
| "loss": 0.0519, | |
| "step": 1678 | |
| }, | |
| { | |
| "epoch": 2.0588235294117645, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 5.6186027725184775e-06, | |
| "loss": 0.0453, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.0612745098039214, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 5.607990555552788e-06, | |
| "loss": 0.043, | |
| "step": 1682 | |
| }, | |
| { | |
| "epoch": 2.063725490196078, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 5.597375557895522e-06, | |
| "loss": 0.0492, | |
| "step": 1684 | |
| }, | |
| { | |
| "epoch": 2.0661764705882355, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 5.586757828095187e-06, | |
| "loss": 0.0473, | |
| "step": 1686 | |
| }, | |
| { | |
| "epoch": 2.0686274509803924, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 5.576137414712788e-06, | |
| "loss": 0.0479, | |
| "step": 1688 | |
| }, | |
| { | |
| "epoch": 2.071078431372549, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 5.565514366321602e-06, | |
| "loss": 0.0467, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.073529411764706, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 5.5548887315069575e-06, | |
| "loss": 0.0491, | |
| "step": 1692 | |
| }, | |
| { | |
| "epoch": 2.075980392156863, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 5.544260558866015e-06, | |
| "loss": 0.0503, | |
| "step": 1694 | |
| }, | |
| { | |
| "epoch": 2.0784313725490198, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 5.533629897007536e-06, | |
| "loss": 0.0465, | |
| "step": 1696 | |
| }, | |
| { | |
| "epoch": 2.0808823529411766, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 5.522996794551673e-06, | |
| "loss": 0.0551, | |
| "step": 1698 | |
| }, | |
| { | |
| "epoch": 2.0833333333333335, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 5.512361300129738e-06, | |
| "loss": 0.0409, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.0857843137254903, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 5.501723462383982e-06, | |
| "loss": 0.0442, | |
| "step": 1702 | |
| }, | |
| { | |
| "epoch": 2.088235294117647, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 5.491083329967375e-06, | |
| "loss": 0.0508, | |
| "step": 1704 | |
| }, | |
| { | |
| "epoch": 2.090686274509804, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 5.48044095154338e-06, | |
| "loss": 0.0447, | |
| "step": 1706 | |
| }, | |
| { | |
| "epoch": 2.093137254901961, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 5.469796375785734e-06, | |
| "loss": 0.0452, | |
| "step": 1708 | |
| }, | |
| { | |
| "epoch": 2.0955882352941178, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 5.459149651378224e-06, | |
| "loss": 0.0475, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.0980392156862746, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 5.4485008270144615e-06, | |
| "loss": 0.0526, | |
| "step": 1712 | |
| }, | |
| { | |
| "epoch": 2.1004901960784315, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 5.437849951397665e-06, | |
| "loss": 0.0454, | |
| "step": 1714 | |
| }, | |
| { | |
| "epoch": 2.1029411764705883, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 5.427197073240433e-06, | |
| "loss": 0.0528, | |
| "step": 1716 | |
| }, | |
| { | |
| "epoch": 2.105392156862745, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 5.416542241264524e-06, | |
| "loss": 0.0421, | |
| "step": 1718 | |
| }, | |
| { | |
| "epoch": 2.107843137254902, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 5.405885504200631e-06, | |
| "loss": 0.0435, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.110294117647059, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 5.395226910788162e-06, | |
| "loss": 0.0495, | |
| "step": 1722 | |
| }, | |
| { | |
| "epoch": 2.1127450980392157, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 5.384566509775011e-06, | |
| "loss": 0.0441, | |
| "step": 1724 | |
| }, | |
| { | |
| "epoch": 2.1151960784313726, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 5.373904349917343e-06, | |
| "loss": 0.054, | |
| "step": 1726 | |
| }, | |
| { | |
| "epoch": 2.1176470588235294, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 5.3632404799793706e-06, | |
| "loss": 0.049, | |
| "step": 1728 | |
| }, | |
| { | |
| "epoch": 2.1200980392156863, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 5.3525749487331184e-06, | |
| "loss": 0.0453, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.122549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 5.341907804958215e-06, | |
| "loss": 0.0444, | |
| "step": 1732 | |
| }, | |
| { | |
| "epoch": 2.125, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 5.331239097441664e-06, | |
| "loss": 0.0462, | |
| "step": 1734 | |
| }, | |
| { | |
| "epoch": 2.127450980392157, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 5.320568874977618e-06, | |
| "loss": 0.0457, | |
| "step": 1736 | |
| }, | |
| { | |
| "epoch": 2.1299019607843137, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 5.3098971863671646e-06, | |
| "loss": 0.0482, | |
| "step": 1738 | |
| }, | |
| { | |
| "epoch": 2.1323529411764706, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 5.299224080418088e-06, | |
| "loss": 0.0485, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.1348039215686274, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 5.288549605944662e-06, | |
| "loss": 0.0448, | |
| "step": 1742 | |
| }, | |
| { | |
| "epoch": 2.1372549019607843, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 5.277873811767415e-06, | |
| "loss": 0.0524, | |
| "step": 1744 | |
| }, | |
| { | |
| "epoch": 2.139705882352941, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 5.267196746712914e-06, | |
| "loss": 0.0443, | |
| "step": 1746 | |
| }, | |
| { | |
| "epoch": 2.142156862745098, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 5.256518459613538e-06, | |
| "loss": 0.056, | |
| "step": 1748 | |
| }, | |
| { | |
| "epoch": 2.144607843137255, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 5.245838999307253e-06, | |
| "loss": 0.0417, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.1470588235294117, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 5.2351584146373936e-06, | |
| "loss": 0.0425, | |
| "step": 1752 | |
| }, | |
| { | |
| "epoch": 2.1495098039215685, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 5.224476754452433e-06, | |
| "loss": 0.0456, | |
| "step": 1754 | |
| }, | |
| { | |
| "epoch": 2.1519607843137254, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 5.21379406760577e-06, | |
| "loss": 0.0509, | |
| "step": 1756 | |
| }, | |
| { | |
| "epoch": 2.1544117647058822, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 5.203110402955491e-06, | |
| "loss": 0.0522, | |
| "step": 1758 | |
| }, | |
| { | |
| "epoch": 2.156862745098039, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 5.19242580936416e-06, | |
| "loss": 0.0412, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.159313725490196, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 5.181740335698586e-06, | |
| "loss": 0.046, | |
| "step": 1762 | |
| }, | |
| { | |
| "epoch": 2.161764705882353, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 5.171054030829605e-06, | |
| "loss": 0.0488, | |
| "step": 1764 | |
| }, | |
| { | |
| "epoch": 2.1642156862745097, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 5.1603669436318584e-06, | |
| "loss": 0.0445, | |
| "step": 1766 | |
| }, | |
| { | |
| "epoch": 2.1666666666666665, | |
| "grad_norm": 0.25, | |
| "learning_rate": 5.1496791229835555e-06, | |
| "loss": 0.0461, | |
| "step": 1768 | |
| }, | |
| { | |
| "epoch": 2.1691176470588234, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 5.1389906177662705e-06, | |
| "loss": 0.0457, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.1715686274509802, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 5.128301476864703e-06, | |
| "loss": 0.0505, | |
| "step": 1772 | |
| }, | |
| { | |
| "epoch": 2.174019607843137, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 5.1176117491664615e-06, | |
| "loss": 0.0464, | |
| "step": 1774 | |
| }, | |
| { | |
| "epoch": 2.176470588235294, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 5.106921483561837e-06, | |
| "loss": 0.0447, | |
| "step": 1776 | |
| }, | |
| { | |
| "epoch": 2.178921568627451, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 5.096230728943583e-06, | |
| "loss": 0.0488, | |
| "step": 1778 | |
| }, | |
| { | |
| "epoch": 2.1813725490196076, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 5.085539534206687e-06, | |
| "loss": 0.0458, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.1838235294117645, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 5.07484794824815e-06, | |
| "loss": 0.0502, | |
| "step": 1782 | |
| }, | |
| { | |
| "epoch": 2.186274509803922, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 5.064156019966765e-06, | |
| "loss": 0.043, | |
| "step": 1784 | |
| }, | |
| { | |
| "epoch": 2.188725490196078, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 5.053463798262888e-06, | |
| "loss": 0.0401, | |
| "step": 1786 | |
| }, | |
| { | |
| "epoch": 2.1911764705882355, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 5.0427713320382164e-06, | |
| "loss": 0.0451, | |
| "step": 1788 | |
| }, | |
| { | |
| "epoch": 2.1936274509803924, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 5.032078670195567e-06, | |
| "loss": 0.0423, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.196078431372549, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 5.021385861638651e-06, | |
| "loss": 0.0457, | |
| "step": 1792 | |
| }, | |
| { | |
| "epoch": 2.198529411764706, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 5.010692955271853e-06, | |
| "loss": 0.0504, | |
| "step": 1794 | |
| }, | |
| { | |
| "epoch": 2.200980392156863, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0438, | |
| "step": 1796 | |
| }, | |
| { | |
| "epoch": 2.2034313725490198, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 4.989307044728148e-06, | |
| "loss": 0.0419, | |
| "step": 1798 | |
| }, | |
| { | |
| "epoch": 2.2058823529411766, | |
| "grad_norm": 0.20703125, | |
| "learning_rate": 4.9786141383613505e-06, | |
| "loss": 0.0493, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.2083333333333335, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 4.967921329804434e-06, | |
| "loss": 0.0446, | |
| "step": 1802 | |
| }, | |
| { | |
| "epoch": 2.2107843137254903, | |
| "grad_norm": 0.203125, | |
| "learning_rate": 4.957228667961786e-06, | |
| "loss": 0.0452, | |
| "step": 1804 | |
| }, | |
| { | |
| "epoch": 2.213235294117647, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 4.946536201737114e-06, | |
| "loss": 0.0446, | |
| "step": 1806 | |
| }, | |
| { | |
| "epoch": 2.215686274509804, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 4.935843980033235e-06, | |
| "loss": 0.0496, | |
| "step": 1808 | |
| }, | |
| { | |
| "epoch": 2.218137254901961, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 4.9251520517518515e-06, | |
| "loss": 0.0483, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.2205882352941178, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.914460465793314e-06, | |
| "loss": 0.0463, | |
| "step": 1812 | |
| }, | |
| { | |
| "epoch": 2.2230392156862746, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 4.903769271056419e-06, | |
| "loss": 0.0501, | |
| "step": 1814 | |
| }, | |
| { | |
| "epoch": 2.2254901960784315, | |
| "grad_norm": 0.25, | |
| "learning_rate": 4.893078516438164e-06, | |
| "loss": 0.0488, | |
| "step": 1816 | |
| }, | |
| { | |
| "epoch": 2.2279411764705883, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 4.882388250833539e-06, | |
| "loss": 0.046, | |
| "step": 1818 | |
| }, | |
| { | |
| "epoch": 2.230392156862745, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.871698523135298e-06, | |
| "loss": 0.0485, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.232843137254902, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.86100938223373e-06, | |
| "loss": 0.046, | |
| "step": 1822 | |
| }, | |
| { | |
| "epoch": 2.235294117647059, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 4.850320877016446e-06, | |
| "loss": 0.044, | |
| "step": 1824 | |
| }, | |
| { | |
| "epoch": 2.2377450980392157, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 4.839633056368144e-06, | |
| "loss": 0.0439, | |
| "step": 1826 | |
| }, | |
| { | |
| "epoch": 2.2401960784313726, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 4.828945969170396e-06, | |
| "loss": 0.0481, | |
| "step": 1828 | |
| }, | |
| { | |
| "epoch": 2.2426470588235294, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 4.818259664301416e-06, | |
| "loss": 0.0412, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.2450980392156863, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 4.807574190635842e-06, | |
| "loss": 0.0474, | |
| "step": 1832 | |
| }, | |
| { | |
| "epoch": 2.247549019607843, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 4.796889597044511e-06, | |
| "loss": 0.0465, | |
| "step": 1834 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 4.786205932394232e-06, | |
| "loss": 0.0461, | |
| "step": 1836 | |
| }, | |
| { | |
| "epoch": 2.252450980392157, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 4.775523245547567e-06, | |
| "loss": 0.0475, | |
| "step": 1838 | |
| }, | |
| { | |
| "epoch": 2.2549019607843137, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 4.764841585362609e-06, | |
| "loss": 0.0475, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.2573529411764706, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 4.754161000692748e-06, | |
| "loss": 0.0479, | |
| "step": 1842 | |
| }, | |
| { | |
| "epoch": 2.2598039215686274, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 4.743481540386463e-06, | |
| "loss": 0.0462, | |
| "step": 1844 | |
| }, | |
| { | |
| "epoch": 2.2622549019607843, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.732803253287087e-06, | |
| "loss": 0.0473, | |
| "step": 1846 | |
| }, | |
| { | |
| "epoch": 2.264705882352941, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 4.722126188232586e-06, | |
| "loss": 0.0462, | |
| "step": 1848 | |
| }, | |
| { | |
| "epoch": 2.267156862745098, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 4.711450394055341e-06, | |
| "loss": 0.0434, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.269607843137255, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 4.700775919581914e-06, | |
| "loss": 0.0471, | |
| "step": 1852 | |
| }, | |
| { | |
| "epoch": 2.2720588235294117, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 4.690102813632836e-06, | |
| "loss": 0.0486, | |
| "step": 1854 | |
| }, | |
| { | |
| "epoch": 2.2745098039215685, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 4.679431125022383e-06, | |
| "loss": 0.0419, | |
| "step": 1856 | |
| }, | |
| { | |
| "epoch": 2.2769607843137254, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 4.668760902558338e-06, | |
| "loss": 0.0455, | |
| "step": 1858 | |
| }, | |
| { | |
| "epoch": 2.2794117647058822, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 4.658092195041788e-06, | |
| "loss": 0.0479, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.281862745098039, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 4.647425051266884e-06, | |
| "loss": 0.0514, | |
| "step": 1862 | |
| }, | |
| { | |
| "epoch": 2.284313725490196, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 4.63675952002063e-06, | |
| "loss": 0.0463, | |
| "step": 1864 | |
| }, | |
| { | |
| "epoch": 2.286764705882353, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 4.6260956500826574e-06, | |
| "loss": 0.0475, | |
| "step": 1866 | |
| }, | |
| { | |
| "epoch": 2.2892156862745097, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 4.615433490224991e-06, | |
| "loss": 0.0444, | |
| "step": 1868 | |
| }, | |
| { | |
| "epoch": 2.2916666666666665, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 4.604773089211842e-06, | |
| "loss": 0.0423, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.2941176470588234, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 4.594114495799371e-06, | |
| "loss": 0.0417, | |
| "step": 1872 | |
| }, | |
| { | |
| "epoch": 2.2965686274509802, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 4.583457758735477e-06, | |
| "loss": 0.0444, | |
| "step": 1874 | |
| }, | |
| { | |
| "epoch": 2.299019607843137, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 4.572802926759568e-06, | |
| "loss": 0.0492, | |
| "step": 1876 | |
| }, | |
| { | |
| "epoch": 2.301470588235294, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 4.562150048602336e-06, | |
| "loss": 0.0442, | |
| "step": 1878 | |
| }, | |
| { | |
| "epoch": 2.303921568627451, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 4.55149917298554e-06, | |
| "loss": 0.049, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.306372549019608, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 4.540850348621777e-06, | |
| "loss": 0.0458, | |
| "step": 1882 | |
| }, | |
| { | |
| "epoch": 2.3088235294117645, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 4.530203624214267e-06, | |
| "loss": 0.0481, | |
| "step": 1884 | |
| }, | |
| { | |
| "epoch": 2.311274509803922, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 4.519559048456622e-06, | |
| "loss": 0.0453, | |
| "step": 1886 | |
| }, | |
| { | |
| "epoch": 2.313725490196078, | |
| "grad_norm": 0.1982421875, | |
| "learning_rate": 4.508916670032627e-06, | |
| "loss": 0.0429, | |
| "step": 1888 | |
| }, | |
| { | |
| "epoch": 2.3161764705882355, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 4.4982765376160205e-06, | |
| "loss": 0.0461, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.318627450980392, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 4.487638699870264e-06, | |
| "loss": 0.0467, | |
| "step": 1892 | |
| }, | |
| { | |
| "epoch": 2.321078431372549, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 4.477003205448328e-06, | |
| "loss": 0.0412, | |
| "step": 1894 | |
| }, | |
| { | |
| "epoch": 2.323529411764706, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 4.466370102992466e-06, | |
| "loss": 0.0479, | |
| "step": 1896 | |
| }, | |
| { | |
| "epoch": 2.325980392156863, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 4.455739441133987e-06, | |
| "loss": 0.0444, | |
| "step": 1898 | |
| }, | |
| { | |
| "epoch": 2.3284313725490198, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 4.4451112684930424e-06, | |
| "loss": 0.0399, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.3308823529411766, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 4.4344856336784e-06, | |
| "loss": 0.0445, | |
| "step": 1902 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.423862585287213e-06, | |
| "loss": 0.049, | |
| "step": 1904 | |
| }, | |
| { | |
| "epoch": 2.3357843137254903, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 4.413242171904815e-06, | |
| "loss": 0.0461, | |
| "step": 1906 | |
| }, | |
| { | |
| "epoch": 2.338235294117647, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 4.402624442104479e-06, | |
| "loss": 0.0466, | |
| "step": 1908 | |
| }, | |
| { | |
| "epoch": 2.340686274509804, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 4.392009444447213e-06, | |
| "loss": 0.0535, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.343137254901961, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 4.381397227481523e-06, | |
| "loss": 0.0433, | |
| "step": 1912 | |
| }, | |
| { | |
| "epoch": 2.3455882352941178, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.3707878397432004e-06, | |
| "loss": 0.0469, | |
| "step": 1914 | |
| }, | |
| { | |
| "epoch": 2.3480392156862746, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 4.360181329755099e-06, | |
| "loss": 0.0427, | |
| "step": 1916 | |
| }, | |
| { | |
| "epoch": 2.3504901960784315, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.349577746026902e-06, | |
| "loss": 0.047, | |
| "step": 1918 | |
| }, | |
| { | |
| "epoch": 2.3529411764705883, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 4.33897713705492e-06, | |
| "loss": 0.0433, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.355392156862745, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 4.328379551321853e-06, | |
| "loss": 0.051, | |
| "step": 1922 | |
| }, | |
| { | |
| "epoch": 2.357843137254902, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 4.317785037296573e-06, | |
| "loss": 0.0435, | |
| "step": 1924 | |
| }, | |
| { | |
| "epoch": 2.360294117647059, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 4.307193643433907e-06, | |
| "loss": 0.0487, | |
| "step": 1926 | |
| }, | |
| { | |
| "epoch": 2.3627450980392157, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.296605418174406e-06, | |
| "loss": 0.0503, | |
| "step": 1928 | |
| }, | |
| { | |
| "epoch": 2.3651960784313726, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 4.286020409944137e-06, | |
| "loss": 0.0436, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.3676470588235294, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 4.2754386671544475e-06, | |
| "loss": 0.0443, | |
| "step": 1932 | |
| }, | |
| { | |
| "epoch": 2.3700980392156863, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 4.264860238201752e-06, | |
| "loss": 0.0509, | |
| "step": 1934 | |
| }, | |
| { | |
| "epoch": 2.372549019607843, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 4.2542851714673095e-06, | |
| "loss": 0.0458, | |
| "step": 1936 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 4.243713515317001e-06, | |
| "loss": 0.0419, | |
| "step": 1938 | |
| }, | |
| { | |
| "epoch": 2.377450980392157, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.233145318101109e-06, | |
| "loss": 0.0483, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.3799019607843137, | |
| "grad_norm": 0.2275390625, | |
| "learning_rate": 4.222580628154097e-06, | |
| "loss": 0.0479, | |
| "step": 1942 | |
| }, | |
| { | |
| "epoch": 2.3823529411764706, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.2120194937943856e-06, | |
| "loss": 0.0443, | |
| "step": 1944 | |
| }, | |
| { | |
| "epoch": 2.3848039215686274, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 4.20146196332414e-06, | |
| "loss": 0.0468, | |
| "step": 1946 | |
| }, | |
| { | |
| "epoch": 2.3872549019607843, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.190908085029031e-06, | |
| "loss": 0.0457, | |
| "step": 1948 | |
| }, | |
| { | |
| "epoch": 2.389705882352941, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 4.180357907178037e-06, | |
| "loss": 0.0468, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.392156862745098, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 4.1698114780232085e-06, | |
| "loss": 0.0438, | |
| "step": 1952 | |
| }, | |
| { | |
| "epoch": 2.394607843137255, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 4.159268845799448e-06, | |
| "loss": 0.0496, | |
| "step": 1954 | |
| }, | |
| { | |
| "epoch": 2.3970588235294117, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 4.148730058724298e-06, | |
| "loss": 0.0451, | |
| "step": 1956 | |
| }, | |
| { | |
| "epoch": 2.3995098039215685, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 4.1381951649977085e-06, | |
| "loss": 0.0465, | |
| "step": 1958 | |
| }, | |
| { | |
| "epoch": 2.4019607843137254, | |
| "grad_norm": 0.2021484375, | |
| "learning_rate": 4.127664212801828e-06, | |
| "loss": 0.0416, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.4044117647058822, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 4.117137250300779e-06, | |
| "loss": 0.0453, | |
| "step": 1962 | |
| }, | |
| { | |
| "epoch": 2.406862745098039, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 4.10661432564043e-06, | |
| "loss": 0.0491, | |
| "step": 1964 | |
| }, | |
| { | |
| "epoch": 2.409313725490196, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 4.096095486948189e-06, | |
| "loss": 0.0466, | |
| "step": 1966 | |
| }, | |
| { | |
| "epoch": 2.411764705882353, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 4.085580782332773e-06, | |
| "loss": 0.0414, | |
| "step": 1968 | |
| }, | |
| { | |
| "epoch": 2.4142156862745097, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 4.075070259883992e-06, | |
| "loss": 0.042, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.4166666666666665, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 4.064563967672533e-06, | |
| "loss": 0.0517, | |
| "step": 1972 | |
| }, | |
| { | |
| "epoch": 2.4191176470588234, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 4.054061953749724e-06, | |
| "loss": 0.0467, | |
| "step": 1974 | |
| }, | |
| { | |
| "epoch": 2.4215686274509802, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 4.043564266147339e-06, | |
| "loss": 0.0477, | |
| "step": 1976 | |
| }, | |
| { | |
| "epoch": 2.424019607843137, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 4.033070952877362e-06, | |
| "loss": 0.0455, | |
| "step": 1978 | |
| }, | |
| { | |
| "epoch": 2.426470588235294, | |
| "grad_norm": 0.1982421875, | |
| "learning_rate": 4.022582061931761e-06, | |
| "loss": 0.0471, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.428921568627451, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 4.0120976412822935e-06, | |
| "loss": 0.0481, | |
| "step": 1982 | |
| }, | |
| { | |
| "epoch": 2.431372549019608, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 4.0016177388802575e-06, | |
| "loss": 0.0481, | |
| "step": 1984 | |
| }, | |
| { | |
| "epoch": 2.4338235294117645, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 3.991142402656296e-06, | |
| "loss": 0.0499, | |
| "step": 1986 | |
| }, | |
| { | |
| "epoch": 2.436274509803922, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 3.980671680520166e-06, | |
| "loss": 0.0461, | |
| "step": 1988 | |
| }, | |
| { | |
| "epoch": 2.438725490196078, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 3.970205620360517e-06, | |
| "loss": 0.0425, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.4411764705882355, | |
| "grad_norm": 0.349609375, | |
| "learning_rate": 3.959744270044684e-06, | |
| "loss": 0.0468, | |
| "step": 1992 | |
| }, | |
| { | |
| "epoch": 2.443627450980392, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 3.949287677418456e-06, | |
| "loss": 0.0442, | |
| "step": 1994 | |
| }, | |
| { | |
| "epoch": 2.446078431372549, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 3.938835890305862e-06, | |
| "loss": 0.0406, | |
| "step": 1996 | |
| }, | |
| { | |
| "epoch": 2.448529411764706, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 3.928388956508958e-06, | |
| "loss": 0.0442, | |
| "step": 1998 | |
| }, | |
| { | |
| "epoch": 2.450980392156863, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 3.917946923807596e-06, | |
| "loss": 0.0482, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.4534313725490198, | |
| "grad_norm": 0.380859375, | |
| "learning_rate": 3.90750983995922e-06, | |
| "loss": 0.0432, | |
| "step": 2002 | |
| }, | |
| { | |
| "epoch": 2.4558823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 3.89707775269863e-06, | |
| "loss": 0.0492, | |
| "step": 2004 | |
| }, | |
| { | |
| "epoch": 2.4583333333333335, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 3.886650709737784e-06, | |
| "loss": 0.0465, | |
| "step": 2006 | |
| }, | |
| { | |
| "epoch": 2.4607843137254903, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 3.876228758765566e-06, | |
| "loss": 0.0451, | |
| "step": 2008 | |
| }, | |
| { | |
| "epoch": 2.463235294117647, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 3.865811947447565e-06, | |
| "loss": 0.0451, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.465686274509804, | |
| "grad_norm": 0.19140625, | |
| "learning_rate": 3.8554003234258754e-06, | |
| "loss": 0.0447, | |
| "step": 2012 | |
| }, | |
| { | |
| "epoch": 2.468137254901961, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 3.844993934318854e-06, | |
| "loss": 0.0492, | |
| "step": 2014 | |
| }, | |
| { | |
| "epoch": 2.4705882352941178, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 3.8345928277209275e-06, | |
| "loss": 0.0531, | |
| "step": 2016 | |
| }, | |
| { | |
| "epoch": 2.4730392156862746, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 3.824197051202356e-06, | |
| "loss": 0.0472, | |
| "step": 2018 | |
| }, | |
| { | |
| "epoch": 2.4754901960784315, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 3.81380665230902e-06, | |
| "loss": 0.0461, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.4779411764705883, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 3.803421678562213e-06, | |
| "loss": 0.0479, | |
| "step": 2022 | |
| }, | |
| { | |
| "epoch": 2.480392156862745, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 3.7930421774584057e-06, | |
| "loss": 0.0467, | |
| "step": 2024 | |
| }, | |
| { | |
| "epoch": 2.482843137254902, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 3.7826681964690484e-06, | |
| "loss": 0.0409, | |
| "step": 2026 | |
| }, | |
| { | |
| "epoch": 2.485294117647059, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 3.7722997830403417e-06, | |
| "loss": 0.0421, | |
| "step": 2028 | |
| }, | |
| { | |
| "epoch": 2.4877450980392157, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 3.7619369845930195e-06, | |
| "loss": 0.0364, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.4901960784313726, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 3.7515798485221407e-06, | |
| "loss": 0.0444, | |
| "step": 2032 | |
| }, | |
| { | |
| "epoch": 2.4926470588235294, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 3.7412284221968596e-06, | |
| "loss": 0.0432, | |
| "step": 2034 | |
| }, | |
| { | |
| "epoch": 2.4950980392156863, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 3.7308827529602243e-06, | |
| "loss": 0.0438, | |
| "step": 2036 | |
| }, | |
| { | |
| "epoch": 2.497549019607843, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 3.72054288812895e-06, | |
| "loss": 0.0482, | |
| "step": 2038 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 3.7102088749931998e-06, | |
| "loss": 0.0454, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.502450980392157, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 3.69988076081638e-06, | |
| "loss": 0.0473, | |
| "step": 2042 | |
| }, | |
| { | |
| "epoch": 2.5049019607843137, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.6895585928349165e-06, | |
| "loss": 0.0471, | |
| "step": 2044 | |
| }, | |
| { | |
| "epoch": 2.5073529411764706, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 3.679242418258036e-06, | |
| "loss": 0.047, | |
| "step": 2046 | |
| }, | |
| { | |
| "epoch": 2.5098039215686274, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 3.668932284267559e-06, | |
| "loss": 0.0392, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 2.5122549019607843, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 3.6586282380176748e-06, | |
| "loss": 0.0453, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.514705882352941, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 3.648330326634731e-06, | |
| "loss": 0.0435, | |
| "step": 2052 | |
| }, | |
| { | |
| "epoch": 2.517156862745098, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 3.6380385972170195e-06, | |
| "loss": 0.0492, | |
| "step": 2054 | |
| }, | |
| { | |
| "epoch": 2.519607843137255, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.6277530968345552e-06, | |
| "loss": 0.0465, | |
| "step": 2056 | |
| }, | |
| { | |
| "epoch": 2.5220588235294117, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 3.617473872528867e-06, | |
| "loss": 0.0452, | |
| "step": 2058 | |
| }, | |
| { | |
| "epoch": 2.5245098039215685, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.6072009713127743e-06, | |
| "loss": 0.0424, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.5269607843137254, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 3.5969344401701844e-06, | |
| "loss": 0.0437, | |
| "step": 2062 | |
| }, | |
| { | |
| "epoch": 2.5294117647058822, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.586674326055868e-06, | |
| "loss": 0.0511, | |
| "step": 2064 | |
| }, | |
| { | |
| "epoch": 2.531862745098039, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 3.5764206758952426e-06, | |
| "loss": 0.0437, | |
| "step": 2066 | |
| }, | |
| { | |
| "epoch": 2.534313725490196, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 3.56617353658417e-06, | |
| "loss": 0.044, | |
| "step": 2068 | |
| }, | |
| { | |
| "epoch": 2.536764705882353, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 3.5559329549887268e-06, | |
| "loss": 0.0447, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.5392156862745097, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 3.5456989779450014e-06, | |
| "loss": 0.0493, | |
| "step": 2072 | |
| }, | |
| { | |
| "epoch": 2.5416666666666665, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 3.5354716522588774e-06, | |
| "loss": 0.0456, | |
| "step": 2074 | |
| }, | |
| { | |
| "epoch": 2.5441176470588234, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 3.5252510247058124e-06, | |
| "loss": 0.0484, | |
| "step": 2076 | |
| }, | |
| { | |
| "epoch": 2.5465686274509802, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 3.515037142030636e-06, | |
| "loss": 0.0474, | |
| "step": 2078 | |
| }, | |
| { | |
| "epoch": 2.549019607843137, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 3.5048300509473228e-06, | |
| "loss": 0.0424, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.5514705882352944, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 3.4946297981387913e-06, | |
| "loss": 0.0412, | |
| "step": 2082 | |
| }, | |
| { | |
| "epoch": 2.553921568627451, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 3.484436430256683e-06, | |
| "loss": 0.0492, | |
| "step": 2084 | |
| }, | |
| { | |
| "epoch": 2.556372549019608, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 3.474249993921147e-06, | |
| "loss": 0.048, | |
| "step": 2086 | |
| }, | |
| { | |
| "epoch": 2.5588235294117645, | |
| "grad_norm": 0.349609375, | |
| "learning_rate": 3.4640705357206382e-06, | |
| "loss": 0.0468, | |
| "step": 2088 | |
| }, | |
| { | |
| "epoch": 2.561274509803922, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 3.4538981022116873e-06, | |
| "loss": 0.0536, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.563725490196078, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 3.4437327399187025e-06, | |
| "loss": 0.0481, | |
| "step": 2092 | |
| }, | |
| { | |
| "epoch": 2.5661764705882355, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 3.4335744953337542e-06, | |
| "loss": 0.0461, | |
| "step": 2094 | |
| }, | |
| { | |
| "epoch": 2.568627450980392, | |
| "grad_norm": 0.2021484375, | |
| "learning_rate": 3.423423414916349e-06, | |
| "loss": 0.0454, | |
| "step": 2096 | |
| }, | |
| { | |
| "epoch": 2.571078431372549, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 3.4132795450932395e-06, | |
| "loss": 0.0491, | |
| "step": 2098 | |
| }, | |
| { | |
| "epoch": 2.5735294117647056, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 3.4031429322581917e-06, | |
| "loss": 0.0458, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.575980392156863, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 3.3930136227717843e-06, | |
| "loss": 0.057, | |
| "step": 2102 | |
| }, | |
| { | |
| "epoch": 2.5784313725490198, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 3.382891662961195e-06, | |
| "loss": 0.0449, | |
| "step": 2104 | |
| }, | |
| { | |
| "epoch": 2.5808823529411766, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 3.372777099119985e-06, | |
| "loss": 0.0439, | |
| "step": 2106 | |
| }, | |
| { | |
| "epoch": 2.5833333333333335, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.3626699775078884e-06, | |
| "loss": 0.045, | |
| "step": 2108 | |
| }, | |
| { | |
| "epoch": 2.5857843137254903, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.3525703443506063e-06, | |
| "loss": 0.0429, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.588235294117647, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 3.342478245839587e-06, | |
| "loss": 0.0482, | |
| "step": 2112 | |
| }, | |
| { | |
| "epoch": 2.590686274509804, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.3323937281318184e-06, | |
| "loss": 0.0474, | |
| "step": 2114 | |
| }, | |
| { | |
| "epoch": 2.593137254901961, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 3.322316837349618e-06, | |
| "loss": 0.0478, | |
| "step": 2116 | |
| }, | |
| { | |
| "epoch": 2.5955882352941178, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 3.312247619580421e-06, | |
| "loss": 0.0397, | |
| "step": 2118 | |
| }, | |
| { | |
| "epoch": 2.5980392156862746, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 3.302186120876572e-06, | |
| "loss": 0.0496, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.6004901960784315, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 3.292132387255106e-06, | |
| "loss": 0.0459, | |
| "step": 2122 | |
| }, | |
| { | |
| "epoch": 2.6029411764705883, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 3.2820864646975493e-06, | |
| "loss": 0.0485, | |
| "step": 2124 | |
| }, | |
| { | |
| "epoch": 2.605392156862745, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 3.2720483991497e-06, | |
| "loss": 0.0446, | |
| "step": 2126 | |
| }, | |
| { | |
| "epoch": 2.607843137254902, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.2620182365214224e-06, | |
| "loss": 0.052, | |
| "step": 2128 | |
| }, | |
| { | |
| "epoch": 2.610294117647059, | |
| "grad_norm": 0.1884765625, | |
| "learning_rate": 3.251996022686441e-06, | |
| "loss": 0.0399, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.6127450980392157, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 3.241981803482116e-06, | |
| "loss": 0.0438, | |
| "step": 2132 | |
| }, | |
| { | |
| "epoch": 2.6151960784313726, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 3.2319756247092552e-06, | |
| "loss": 0.0472, | |
| "step": 2134 | |
| }, | |
| { | |
| "epoch": 2.6176470588235294, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 3.221977532131882e-06, | |
| "loss": 0.0434, | |
| "step": 2136 | |
| }, | |
| { | |
| "epoch": 2.6200980392156863, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 3.2119875714770433e-06, | |
| "loss": 0.0415, | |
| "step": 2138 | |
| }, | |
| { | |
| "epoch": 2.622549019607843, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 3.2020057884345963e-06, | |
| "loss": 0.0468, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.625, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 3.1920322286569877e-06, | |
| "loss": 0.0485, | |
| "step": 2142 | |
| }, | |
| { | |
| "epoch": 2.627450980392157, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.182066937759066e-06, | |
| "loss": 0.0469, | |
| "step": 2144 | |
| }, | |
| { | |
| "epoch": 2.6299019607843137, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 3.172109961317851e-06, | |
| "loss": 0.0489, | |
| "step": 2146 | |
| }, | |
| { | |
| "epoch": 2.6323529411764706, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 3.1621613448723414e-06, | |
| "loss": 0.0424, | |
| "step": 2148 | |
| }, | |
| { | |
| "epoch": 2.6348039215686274, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 3.152221133923303e-06, | |
| "loss": 0.0446, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.6372549019607843, | |
| "grad_norm": 0.337890625, | |
| "learning_rate": 3.1422893739330524e-06, | |
| "loss": 0.0455, | |
| "step": 2152 | |
| }, | |
| { | |
| "epoch": 2.639705882352941, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 3.132366110325259e-06, | |
| "loss": 0.048, | |
| "step": 2154 | |
| }, | |
| { | |
| "epoch": 2.642156862745098, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 3.122451388484732e-06, | |
| "loss": 0.0433, | |
| "step": 2156 | |
| }, | |
| { | |
| "epoch": 2.644607843137255, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 3.112545253757213e-06, | |
| "loss": 0.051, | |
| "step": 2158 | |
| }, | |
| { | |
| "epoch": 2.6470588235294117, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 3.102647751449174e-06, | |
| "loss": 0.04, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.6495098039215685, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 3.0927589268276013e-06, | |
| "loss": 0.0509, | |
| "step": 2162 | |
| }, | |
| { | |
| "epoch": 2.6519607843137254, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 3.082878825119796e-06, | |
| "loss": 0.0466, | |
| "step": 2164 | |
| }, | |
| { | |
| "epoch": 2.6544117647058822, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.073007491513162e-06, | |
| "loss": 0.0473, | |
| "step": 2166 | |
| }, | |
| { | |
| "epoch": 2.656862745098039, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.063144971155004e-06, | |
| "loss": 0.0474, | |
| "step": 2168 | |
| }, | |
| { | |
| "epoch": 2.659313725490196, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 3.0532913091523187e-06, | |
| "loss": 0.0417, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.661764705882353, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 3.043446550571584e-06, | |
| "loss": 0.045, | |
| "step": 2172 | |
| }, | |
| { | |
| "epoch": 2.6642156862745097, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 3.0336107404385632e-06, | |
| "loss": 0.0525, | |
| "step": 2174 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 3.0237839237380905e-06, | |
| "loss": 0.0445, | |
| "step": 2176 | |
| }, | |
| { | |
| "epoch": 2.6691176470588234, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 3.013966145413866e-06, | |
| "loss": 0.0461, | |
| "step": 2178 | |
| }, | |
| { | |
| "epoch": 2.6715686274509802, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 3.004157450368258e-06, | |
| "loss": 0.0436, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.674019607843137, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 2.994357883462083e-06, | |
| "loss": 0.0423, | |
| "step": 2182 | |
| }, | |
| { | |
| "epoch": 2.6764705882352944, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 2.9845674895144165e-06, | |
| "loss": 0.0453, | |
| "step": 2184 | |
| }, | |
| { | |
| "epoch": 2.678921568627451, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 2.9747863133023803e-06, | |
| "loss": 0.047, | |
| "step": 2186 | |
| }, | |
| { | |
| "epoch": 2.681372549019608, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 2.9650143995609326e-06, | |
| "loss": 0.0524, | |
| "step": 2188 | |
| }, | |
| { | |
| "epoch": 2.6838235294117645, | |
| "grad_norm": 0.201171875, | |
| "learning_rate": 2.9552517929826763e-06, | |
| "loss": 0.0415, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.686274509803922, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 2.945498538217639e-06, | |
| "loss": 0.0448, | |
| "step": 2192 | |
| }, | |
| { | |
| "epoch": 2.688725490196078, | |
| "grad_norm": 0.1953125, | |
| "learning_rate": 2.935754679873085e-06, | |
| "loss": 0.0415, | |
| "step": 2194 | |
| }, | |
| { | |
| "epoch": 2.6911764705882355, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 2.9260202625133016e-06, | |
| "loss": 0.0444, | |
| "step": 2196 | |
| }, | |
| { | |
| "epoch": 2.693627450980392, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 2.9162953306593895e-06, | |
| "loss": 0.0444, | |
| "step": 2198 | |
| }, | |
| { | |
| "epoch": 2.696078431372549, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 2.9065799287890797e-06, | |
| "loss": 0.0462, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.6985294117647056, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 2.896874101336504e-06, | |
| "loss": 0.0462, | |
| "step": 2202 | |
| }, | |
| { | |
| "epoch": 2.700980392156863, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 2.8871778926920136e-06, | |
| "loss": 0.0518, | |
| "step": 2204 | |
| }, | |
| { | |
| "epoch": 2.7034313725490198, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 2.8774913472019666e-06, | |
| "loss": 0.0461, | |
| "step": 2206 | |
| }, | |
| { | |
| "epoch": 2.7058823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 2.86781450916852e-06, | |
| "loss": 0.0475, | |
| "step": 2208 | |
| }, | |
| { | |
| "epoch": 2.7083333333333335, | |
| "grad_norm": 0.337890625, | |
| "learning_rate": 2.858147422849441e-06, | |
| "loss": 0.0418, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.7107843137254903, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 2.8484901324578883e-06, | |
| "loss": 0.047, | |
| "step": 2212 | |
| }, | |
| { | |
| "epoch": 2.713235294117647, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 2.838842682162224e-06, | |
| "loss": 0.0437, | |
| "step": 2214 | |
| }, | |
| { | |
| "epoch": 2.715686274509804, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 2.8292051160858077e-06, | |
| "loss": 0.0485, | |
| "step": 2216 | |
| }, | |
| { | |
| "epoch": 2.718137254901961, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 2.819577478306783e-06, | |
| "loss": 0.0486, | |
| "step": 2218 | |
| }, | |
| { | |
| "epoch": 2.7205882352941178, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 2.809959812857898e-06, | |
| "loss": 0.0446, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.7230392156862746, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 2.800352163726281e-06, | |
| "loss": 0.0414, | |
| "step": 2222 | |
| }, | |
| { | |
| "epoch": 2.7254901960784315, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.790754574853255e-06, | |
| "loss": 0.0457, | |
| "step": 2224 | |
| }, | |
| { | |
| "epoch": 2.7279411764705883, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 2.781167090134135e-06, | |
| "loss": 0.0427, | |
| "step": 2226 | |
| }, | |
| { | |
| "epoch": 2.730392156862745, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 2.7715897534180136e-06, | |
| "loss": 0.046, | |
| "step": 2228 | |
| }, | |
| { | |
| "epoch": 2.732843137254902, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 2.7620226085075835e-06, | |
| "loss": 0.046, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.735294117647059, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 2.752465699158913e-06, | |
| "loss": 0.0494, | |
| "step": 2232 | |
| }, | |
| { | |
| "epoch": 2.7377450980392157, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 2.7429190690812636e-06, | |
| "loss": 0.0493, | |
| "step": 2234 | |
| }, | |
| { | |
| "epoch": 2.7401960784313726, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 2.733382761936884e-06, | |
| "loss": 0.0437, | |
| "step": 2236 | |
| }, | |
| { | |
| "epoch": 2.7426470588235294, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 2.723856821340806e-06, | |
| "loss": 0.0458, | |
| "step": 2238 | |
| }, | |
| { | |
| "epoch": 2.7450980392156863, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 2.714341290860655e-06, | |
| "loss": 0.0496, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.747549019607843, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 2.7048362140164373e-06, | |
| "loss": 0.0475, | |
| "step": 2242 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 2.6953416342803564e-06, | |
| "loss": 0.0425, | |
| "step": 2244 | |
| }, | |
| { | |
| "epoch": 2.752450980392157, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 2.685857595076603e-06, | |
| "loss": 0.0458, | |
| "step": 2246 | |
| }, | |
| { | |
| "epoch": 2.7549019607843137, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 2.6763841397811576e-06, | |
| "loss": 0.0474, | |
| "step": 2248 | |
| }, | |
| { | |
| "epoch": 2.7573529411764706, | |
| "grad_norm": 0.3515625, | |
| "learning_rate": 2.6669213117215985e-06, | |
| "loss": 0.054, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.7598039215686274, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 2.657469154176899e-06, | |
| "loss": 0.0418, | |
| "step": 2252 | |
| }, | |
| { | |
| "epoch": 2.7622549019607843, | |
| "grad_norm": 0.380859375, | |
| "learning_rate": 2.6480277103772255e-06, | |
| "loss": 0.0426, | |
| "step": 2254 | |
| }, | |
| { | |
| "epoch": 2.764705882352941, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 2.638597023503751e-06, | |
| "loss": 0.0463, | |
| "step": 2256 | |
| }, | |
| { | |
| "epoch": 2.767156862745098, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 2.6291771366884445e-06, | |
| "loss": 0.0469, | |
| "step": 2258 | |
| }, | |
| { | |
| "epoch": 2.769607843137255, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 2.6197680930138834e-06, | |
| "loss": 0.0426, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.7720588235294117, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 2.6103699355130562e-06, | |
| "loss": 0.0387, | |
| "step": 2262 | |
| }, | |
| { | |
| "epoch": 2.7745098039215685, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 2.600982707169154e-06, | |
| "loss": 0.0466, | |
| "step": 2264 | |
| }, | |
| { | |
| "epoch": 2.7769607843137254, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 2.5916064509153925e-06, | |
| "loss": 0.0509, | |
| "step": 2266 | |
| }, | |
| { | |
| "epoch": 2.7794117647058822, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 2.582241209634797e-06, | |
| "loss": 0.052, | |
| "step": 2268 | |
| }, | |
| { | |
| "epoch": 2.781862745098039, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 2.5728870261600203e-06, | |
| "loss": 0.0418, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.784313725490196, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 2.563543943273143e-06, | |
| "loss": 0.0451, | |
| "step": 2272 | |
| }, | |
| { | |
| "epoch": 2.786764705882353, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.5542120037054684e-06, | |
| "loss": 0.046, | |
| "step": 2274 | |
| }, | |
| { | |
| "epoch": 2.7892156862745097, | |
| "grad_norm": 0.2236328125, | |
| "learning_rate": 2.5448912501373458e-06, | |
| "loss": 0.0454, | |
| "step": 2276 | |
| }, | |
| { | |
| "epoch": 2.7916666666666665, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 2.535581725197952e-06, | |
| "loss": 0.0423, | |
| "step": 2278 | |
| }, | |
| { | |
| "epoch": 2.7941176470588234, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 2.5262834714651195e-06, | |
| "loss": 0.045, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.7965686274509802, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 2.5169965314651283e-06, | |
| "loss": 0.0396, | |
| "step": 2282 | |
| }, | |
| { | |
| "epoch": 2.799019607843137, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 2.5077209476725094e-06, | |
| "loss": 0.0405, | |
| "step": 2284 | |
| }, | |
| { | |
| "epoch": 2.8014705882352944, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 2.498456762509862e-06, | |
| "loss": 0.0474, | |
| "step": 2286 | |
| }, | |
| { | |
| "epoch": 2.803921568627451, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 2.4892040183476462e-06, | |
| "loss": 0.0494, | |
| "step": 2288 | |
| }, | |
| { | |
| "epoch": 2.806372549019608, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 2.4799627575040014e-06, | |
| "loss": 0.0496, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.8088235294117645, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 2.4707330222445465e-06, | |
| "loss": 0.0451, | |
| "step": 2292 | |
| }, | |
| { | |
| "epoch": 2.811274509803922, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 2.4615148547821815e-06, | |
| "loss": 0.0415, | |
| "step": 2294 | |
| }, | |
| { | |
| "epoch": 2.813725490196078, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 2.4523082972769075e-06, | |
| "loss": 0.0486, | |
| "step": 2296 | |
| }, | |
| { | |
| "epoch": 2.8161764705882355, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 2.443113391835624e-06, | |
| "loss": 0.0501, | |
| "step": 2298 | |
| }, | |
| { | |
| "epoch": 2.818627450980392, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 2.433930180511936e-06, | |
| "loss": 0.0436, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.821078431372549, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 2.4247587053059674e-06, | |
| "loss": 0.0457, | |
| "step": 2302 | |
| }, | |
| { | |
| "epoch": 2.8235294117647056, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 2.4155990081641685e-06, | |
| "loss": 0.0493, | |
| "step": 2304 | |
| }, | |
| { | |
| "epoch": 2.825980392156863, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 2.4064511309791143e-06, | |
| "loss": 0.0439, | |
| "step": 2306 | |
| }, | |
| { | |
| "epoch": 2.8284313725490198, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 2.3973151155893276e-06, | |
| "loss": 0.0431, | |
| "step": 2308 | |
| }, | |
| { | |
| "epoch": 2.8308823529411766, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 2.3881910037790784e-06, | |
| "loss": 0.0474, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.8333333333333335, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 2.3790788372781915e-06, | |
| "loss": 0.0427, | |
| "step": 2312 | |
| }, | |
| { | |
| "epoch": 2.8357843137254903, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 2.3699786577618622e-06, | |
| "loss": 0.0432, | |
| "step": 2314 | |
| }, | |
| { | |
| "epoch": 2.838235294117647, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 2.360890506850464e-06, | |
| "loss": 0.0435, | |
| "step": 2316 | |
| }, | |
| { | |
| "epoch": 2.840686274509804, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 2.3518144261093495e-06, | |
| "loss": 0.0456, | |
| "step": 2318 | |
| }, | |
| { | |
| "epoch": 2.843137254901961, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 2.342750457048675e-06, | |
| "loss": 0.0435, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.8455882352941178, | |
| "grad_norm": 0.185546875, | |
| "learning_rate": 2.3336986411232025e-06, | |
| "loss": 0.043, | |
| "step": 2322 | |
| }, | |
| { | |
| "epoch": 2.8480392156862746, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 2.3246590197321038e-06, | |
| "loss": 0.0476, | |
| "step": 2324 | |
| }, | |
| { | |
| "epoch": 2.8504901960784315, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 2.3156316342187853e-06, | |
| "loss": 0.0421, | |
| "step": 2326 | |
| }, | |
| { | |
| "epoch": 2.8529411764705883, | |
| "grad_norm": 0.2080078125, | |
| "learning_rate": 2.3066165258706904e-06, | |
| "loss": 0.0397, | |
| "step": 2328 | |
| }, | |
| { | |
| "epoch": 2.855392156862745, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 2.297613735919107e-06, | |
| "loss": 0.0476, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.857843137254902, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 2.2886233055389914e-06, | |
| "loss": 0.0498, | |
| "step": 2332 | |
| }, | |
| { | |
| "epoch": 2.860294117647059, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 2.279645275848764e-06, | |
| "loss": 0.0447, | |
| "step": 2334 | |
| }, | |
| { | |
| "epoch": 2.8627450980392157, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 2.270679687910138e-06, | |
| "loss": 0.044, | |
| "step": 2336 | |
| }, | |
| { | |
| "epoch": 2.8651960784313726, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 2.26172658272792e-06, | |
| "loss": 0.0425, | |
| "step": 2338 | |
| }, | |
| { | |
| "epoch": 2.8676470588235294, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 2.2527860012498216e-06, | |
| "loss": 0.0436, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.8700980392156863, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 2.243857984366284e-06, | |
| "loss": 0.0443, | |
| "step": 2342 | |
| }, | |
| { | |
| "epoch": 2.872549019607843, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 2.2349425729102752e-06, | |
| "loss": 0.044, | |
| "step": 2344 | |
| }, | |
| { | |
| "epoch": 2.875, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 2.2260398076571176e-06, | |
| "loss": 0.0444, | |
| "step": 2346 | |
| }, | |
| { | |
| "epoch": 2.877450980392157, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 2.217149729324295e-06, | |
| "loss": 0.0452, | |
| "step": 2348 | |
| }, | |
| { | |
| "epoch": 2.8799019607843137, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 2.2082723785712603e-06, | |
| "loss": 0.0513, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.8823529411764706, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 2.199407795999263e-06, | |
| "loss": 0.0445, | |
| "step": 2352 | |
| }, | |
| { | |
| "epoch": 2.8848039215686274, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 2.190556022151152e-06, | |
| "loss": 0.0435, | |
| "step": 2354 | |
| }, | |
| { | |
| "epoch": 2.8872549019607843, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 2.1817170975111957e-06, | |
| "loss": 0.0473, | |
| "step": 2356 | |
| }, | |
| { | |
| "epoch": 2.889705882352941, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 2.172891062504899e-06, | |
| "loss": 0.0452, | |
| "step": 2358 | |
| }, | |
| { | |
| "epoch": 2.892156862745098, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 2.164077957498807e-06, | |
| "loss": 0.0441, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.894607843137255, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.1552778228003383e-06, | |
| "loss": 0.0492, | |
| "step": 2362 | |
| }, | |
| { | |
| "epoch": 2.8970588235294117, | |
| "grad_norm": 0.34765625, | |
| "learning_rate": 2.146490698657582e-06, | |
| "loss": 0.0424, | |
| "step": 2364 | |
| }, | |
| { | |
| "epoch": 2.8995098039215685, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 2.1377166252591276e-06, | |
| "loss": 0.0477, | |
| "step": 2366 | |
| }, | |
| { | |
| "epoch": 2.9019607843137254, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 2.128955642733877e-06, | |
| "loss": 0.0501, | |
| "step": 2368 | |
| }, | |
| { | |
| "epoch": 2.9044117647058822, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 2.120207791150854e-06, | |
| "loss": 0.0454, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.906862745098039, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 2.1114731105190352e-06, | |
| "loss": 0.0468, | |
| "step": 2372 | |
| }, | |
| { | |
| "epoch": 2.909313725490196, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 2.102751640787149e-06, | |
| "loss": 0.0472, | |
| "step": 2374 | |
| }, | |
| { | |
| "epoch": 2.911764705882353, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 2.0940434218435115e-06, | |
| "loss": 0.046, | |
| "step": 2376 | |
| }, | |
| { | |
| "epoch": 2.9142156862745097, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 2.085348493515833e-06, | |
| "loss": 0.0496, | |
| "step": 2378 | |
| }, | |
| { | |
| "epoch": 2.9166666666666665, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 2.0766668955710334e-06, | |
| "loss": 0.0459, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.9191176470588234, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.0679986677150714e-06, | |
| "loss": 0.0461, | |
| "step": 2382 | |
| }, | |
| { | |
| "epoch": 2.9215686274509802, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 2.059343849592755e-06, | |
| "loss": 0.0413, | |
| "step": 2384 | |
| }, | |
| { | |
| "epoch": 2.924019607843137, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.050702480787557e-06, | |
| "loss": 0.0517, | |
| "step": 2386 | |
| }, | |
| { | |
| "epoch": 2.9264705882352944, | |
| "grad_norm": 0.3203125, | |
| "learning_rate": 2.0420746008214463e-06, | |
| "loss": 0.0474, | |
| "step": 2388 | |
| }, | |
| { | |
| "epoch": 2.928921568627451, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 2.033460249154692e-06, | |
| "loss": 0.0499, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.931372549019608, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 2.0248594651856964e-06, | |
| "loss": 0.0495, | |
| "step": 2392 | |
| }, | |
| { | |
| "epoch": 2.9338235294117645, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 2.0162722882508072e-06, | |
| "loss": 0.0507, | |
| "step": 2394 | |
| }, | |
| { | |
| "epoch": 2.936274509803922, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 2.0076987576241365e-06, | |
| "loss": 0.0418, | |
| "step": 2396 | |
| }, | |
| { | |
| "epoch": 2.938725490196078, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 1.9991389125173882e-06, | |
| "loss": 0.0449, | |
| "step": 2398 | |
| }, | |
| { | |
| "epoch": 2.9411764705882355, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 1.9905927920796704e-06, | |
| "loss": 0.0453, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.943627450980392, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 1.9820604353973226e-06, | |
| "loss": 0.047, | |
| "step": 2402 | |
| }, | |
| { | |
| "epoch": 2.946078431372549, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 1.973541881493738e-06, | |
| "loss": 0.048, | |
| "step": 2404 | |
| }, | |
| { | |
| "epoch": 2.9485294117647056, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 1.965037169329173e-06, | |
| "loss": 0.045, | |
| "step": 2406 | |
| }, | |
| { | |
| "epoch": 2.950980392156863, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 1.9565463378005873e-06, | |
| "loss": 0.0448, | |
| "step": 2408 | |
| }, | |
| { | |
| "epoch": 2.9534313725490198, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.9480694257414486e-06, | |
| "loss": 0.0453, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.9558823529411766, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 1.9396064719215675e-06, | |
| "loss": 0.0548, | |
| "step": 2412 | |
| }, | |
| { | |
| "epoch": 2.9583333333333335, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 1.9311575150469146e-06, | |
| "loss": 0.0458, | |
| "step": 2414 | |
| }, | |
| { | |
| "epoch": 2.9607843137254903, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.9227225937594463e-06, | |
| "loss": 0.0473, | |
| "step": 2416 | |
| }, | |
| { | |
| "epoch": 2.963235294117647, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 1.9143017466369203e-06, | |
| "loss": 0.0485, | |
| "step": 2418 | |
| }, | |
| { | |
| "epoch": 2.965686274509804, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 1.90589501219273e-06, | |
| "loss": 0.0453, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.968137254901961, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 1.8975024288757238e-06, | |
| "loss": 0.0502, | |
| "step": 2422 | |
| }, | |
| { | |
| "epoch": 2.9705882352941178, | |
| "grad_norm": 0.2236328125, | |
| "learning_rate": 1.889124035070024e-06, | |
| "loss": 0.0468, | |
| "step": 2424 | |
| }, | |
| { | |
| "epoch": 2.9730392156862746, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 1.8807598690948614e-06, | |
| "loss": 0.0402, | |
| "step": 2426 | |
| }, | |
| { | |
| "epoch": 2.9754901960784315, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 1.8724099692043934e-06, | |
| "loss": 0.0502, | |
| "step": 2428 | |
| }, | |
| { | |
| "epoch": 2.9779411764705883, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.8640743735875266e-06, | |
| "loss": 0.0463, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.980392156862745, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 1.8557531203677504e-06, | |
| "loss": 0.0447, | |
| "step": 2432 | |
| }, | |
| { | |
| "epoch": 2.982843137254902, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.8474462476029575e-06, | |
| "loss": 0.0447, | |
| "step": 2434 | |
| }, | |
| { | |
| "epoch": 2.985294117647059, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 1.8391537932852677e-06, | |
| "loss": 0.0399, | |
| "step": 2436 | |
| }, | |
| { | |
| "epoch": 2.9877450980392157, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 1.8308757953408601e-06, | |
| "loss": 0.0445, | |
| "step": 2438 | |
| }, | |
| { | |
| "epoch": 2.9901960784313726, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 1.8226122916297933e-06, | |
| "loss": 0.055, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.9926470588235294, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 1.8143633199458383e-06, | |
| "loss": 0.0424, | |
| "step": 2442 | |
| }, | |
| { | |
| "epoch": 2.9950980392156863, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.806128918016304e-06, | |
| "loss": 0.0524, | |
| "step": 2444 | |
| }, | |
| { | |
| "epoch": 2.997549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.7979091235018564e-06, | |
| "loss": 0.0503, | |
| "step": 2446 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 1.7897039739963607e-06, | |
| "loss": 0.0443, | |
| "step": 2448 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.0535544753074646, | |
| "eval_runtime": 37.6568, | |
| "eval_samples_per_second": 133.203, | |
| "eval_steps_per_second": 1.062, | |
| "step": 2448 | |
| }, | |
| { | |
| "epoch": 3.002450980392157, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 1.7815135070267008e-06, | |
| "loss": 0.0437, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.0049019607843137, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 1.7733377600526041e-06, | |
| "loss": 0.0417, | |
| "step": 2452 | |
| }, | |
| { | |
| "epoch": 3.0073529411764706, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 1.7651767704664819e-06, | |
| "loss": 0.0412, | |
| "step": 2454 | |
| }, | |
| { | |
| "epoch": 3.0098039215686274, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 1.7570305755932438e-06, | |
| "loss": 0.0494, | |
| "step": 2456 | |
| }, | |
| { | |
| "epoch": 3.0122549019607843, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 1.748899212690141e-06, | |
| "loss": 0.0458, | |
| "step": 2458 | |
| }, | |
| { | |
| "epoch": 3.014705882352941, | |
| "grad_norm": 0.1943359375, | |
| "learning_rate": 1.7407827189465893e-06, | |
| "loss": 0.0407, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.017156862745098, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 1.7326811314839931e-06, | |
| "loss": 0.0533, | |
| "step": 2462 | |
| }, | |
| { | |
| "epoch": 3.019607843137255, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 1.7245944873555893e-06, | |
| "loss": 0.0524, | |
| "step": 2464 | |
| }, | |
| { | |
| "epoch": 3.0220588235294117, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 1.716522823546264e-06, | |
| "loss": 0.0486, | |
| "step": 2466 | |
| }, | |
| { | |
| "epoch": 3.0245098039215685, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 1.7084661769723942e-06, | |
| "loss": 0.0484, | |
| "step": 2468 | |
| }, | |
| { | |
| "epoch": 3.0269607843137254, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 1.700424584481674e-06, | |
| "loss": 0.045, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.0294117647058822, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 1.6923980828529424e-06, | |
| "loss": 0.0513, | |
| "step": 2472 | |
| }, | |
| { | |
| "epoch": 3.031862745098039, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 1.6843867087960252e-06, | |
| "loss": 0.0471, | |
| "step": 2474 | |
| }, | |
| { | |
| "epoch": 3.034313725490196, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 1.6763904989515545e-06, | |
| "loss": 0.0441, | |
| "step": 2476 | |
| }, | |
| { | |
| "epoch": 3.036764705882353, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 1.6684094898908131e-06, | |
| "loss": 0.0477, | |
| "step": 2478 | |
| }, | |
| { | |
| "epoch": 3.0392156862745097, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 1.6604437181155613e-06, | |
| "loss": 0.0463, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.0416666666666665, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 1.652493220057866e-06, | |
| "loss": 0.0456, | |
| "step": 2482 | |
| }, | |
| { | |
| "epoch": 3.0441176470588234, | |
| "grad_norm": 0.208984375, | |
| "learning_rate": 1.6445580320799441e-06, | |
| "loss": 0.0463, | |
| "step": 2484 | |
| }, | |
| { | |
| "epoch": 3.0465686274509802, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 1.636638190473987e-06, | |
| "loss": 0.0483, | |
| "step": 2486 | |
| }, | |
| { | |
| "epoch": 3.049019607843137, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 1.628733731461999e-06, | |
| "loss": 0.0488, | |
| "step": 2488 | |
| }, | |
| { | |
| "epoch": 3.051470588235294, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 1.6208446911956344e-06, | |
| "loss": 0.0441, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.053921568627451, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.6129711057560215e-06, | |
| "loss": 0.0411, | |
| "step": 2492 | |
| }, | |
| { | |
| "epoch": 3.0563725490196076, | |
| "grad_norm": 0.3515625, | |
| "learning_rate": 1.6051130111536134e-06, | |
| "loss": 0.0514, | |
| "step": 2494 | |
| }, | |
| { | |
| "epoch": 3.0588235294117645, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.5972704433280063e-06, | |
| "loss": 0.0449, | |
| "step": 2496 | |
| }, | |
| { | |
| "epoch": 3.0612745098039214, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 1.589443438147789e-06, | |
| "loss": 0.0425, | |
| "step": 2498 | |
| }, | |
| { | |
| "epoch": 3.063725490196078, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 1.581632031410374e-06, | |
| "loss": 0.0487, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.0661764705882355, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 1.5738362588418265e-06, | |
| "loss": 0.0469, | |
| "step": 2502 | |
| }, | |
| { | |
| "epoch": 3.0686274509803924, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 1.5660561560967169e-06, | |
| "loss": 0.0475, | |
| "step": 2504 | |
| }, | |
| { | |
| "epoch": 3.071078431372549, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 1.5582917587579394e-06, | |
| "loss": 0.0463, | |
| "step": 2506 | |
| }, | |
| { | |
| "epoch": 3.073529411764706, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 1.5505431023365642e-06, | |
| "loss": 0.0488, | |
| "step": 2508 | |
| }, | |
| { | |
| "epoch": 3.075980392156863, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 1.5428102222716696e-06, | |
| "loss": 0.0499, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.0784313725490198, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 1.5350931539301734e-06, | |
| "loss": 0.0461, | |
| "step": 2512 | |
| }, | |
| { | |
| "epoch": 3.0808823529411766, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 1.527391932606684e-06, | |
| "loss": 0.0545, | |
| "step": 2514 | |
| }, | |
| { | |
| "epoch": 3.0833333333333335, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 1.5197065935233301e-06, | |
| "loss": 0.0405, | |
| "step": 2516 | |
| }, | |
| { | |
| "epoch": 3.0857843137254903, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 1.5120371718295984e-06, | |
| "loss": 0.0438, | |
| "step": 2518 | |
| }, | |
| { | |
| "epoch": 3.088235294117647, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 1.5043837026021823e-06, | |
| "loss": 0.0503, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.090686274509804, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 1.4967462208448092e-06, | |
| "loss": 0.0443, | |
| "step": 2522 | |
| }, | |
| { | |
| "epoch": 3.093137254901961, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.4891247614880904e-06, | |
| "loss": 0.0448, | |
| "step": 2524 | |
| }, | |
| { | |
| "epoch": 3.0955882352941178, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 1.4815193593893583e-06, | |
| "loss": 0.047, | |
| "step": 2526 | |
| }, | |
| { | |
| "epoch": 3.0980392156862746, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 1.473930049332502e-06, | |
| "loss": 0.0521, | |
| "step": 2528 | |
| }, | |
| { | |
| "epoch": 3.1004901960784315, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 1.4663568660278154e-06, | |
| "loss": 0.0451, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 3.1029411764705883, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 1.4587998441118356e-06, | |
| "loss": 0.0523, | |
| "step": 2532 | |
| }, | |
| { | |
| "epoch": 3.105392156862745, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 1.4512590181471853e-06, | |
| "loss": 0.0418, | |
| "step": 2534 | |
| }, | |
| { | |
| "epoch": 3.107843137254902, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 1.4437344226224083e-06, | |
| "loss": 0.0432, | |
| "step": 2536 | |
| }, | |
| { | |
| "epoch": 3.110294117647059, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 1.4362260919518234e-06, | |
| "loss": 0.0491, | |
| "step": 2538 | |
| }, | |
| { | |
| "epoch": 3.1127450980392157, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 1.4287340604753596e-06, | |
| "loss": 0.0437, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 3.1151960784313726, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 1.4212583624583964e-06, | |
| "loss": 0.0536, | |
| "step": 2542 | |
| }, | |
| { | |
| "epoch": 3.1176470588235294, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 1.413799032091615e-06, | |
| "loss": 0.0486, | |
| "step": 2544 | |
| }, | |
| { | |
| "epoch": 3.1200980392156863, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 1.4063561034908385e-06, | |
| "loss": 0.045, | |
| "step": 2546 | |
| }, | |
| { | |
| "epoch": 3.122549019607843, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 1.3989296106968702e-06, | |
| "loss": 0.0441, | |
| "step": 2548 | |
| }, | |
| { | |
| "epoch": 3.125, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 1.3915195876753495e-06, | |
| "loss": 0.0458, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.127450980392157, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 1.3841260683165836e-06, | |
| "loss": 0.0453, | |
| "step": 2552 | |
| }, | |
| { | |
| "epoch": 3.1299019607843137, | |
| "grad_norm": 0.25, | |
| "learning_rate": 1.3767490864354034e-06, | |
| "loss": 0.0479, | |
| "step": 2554 | |
| }, | |
| { | |
| "epoch": 3.1323529411764706, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 1.369388675771004e-06, | |
| "loss": 0.0481, | |
| "step": 2556 | |
| }, | |
| { | |
| "epoch": 3.1348039215686274, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 1.3620448699867877e-06, | |
| "loss": 0.0444, | |
| "step": 2558 | |
| }, | |
| { | |
| "epoch": 3.1372549019607843, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 1.354717702670218e-06, | |
| "loss": 0.0519, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 3.139705882352941, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 1.347407207332655e-06, | |
| "loss": 0.0439, | |
| "step": 2562 | |
| }, | |
| { | |
| "epoch": 3.142156862745098, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 1.3401134174092128e-06, | |
| "loss": 0.0555, | |
| "step": 2564 | |
| }, | |
| { | |
| "epoch": 3.144607843137255, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 1.332836366258603e-06, | |
| "loss": 0.0413, | |
| "step": 2566 | |
| }, | |
| { | |
| "epoch": 3.1470588235294117, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 1.3255760871629753e-06, | |
| "loss": 0.0422, | |
| "step": 2568 | |
| }, | |
| { | |
| "epoch": 3.1495098039215685, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.3183326133277774e-06, | |
| "loss": 0.0453, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 3.1519607843137254, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.3111059778815915e-06, | |
| "loss": 0.0503, | |
| "step": 2572 | |
| }, | |
| { | |
| "epoch": 3.1544117647058822, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 1.3038962138759924e-06, | |
| "loss": 0.0518, | |
| "step": 2574 | |
| }, | |
| { | |
| "epoch": 3.156862745098039, | |
| "grad_norm": 0.25, | |
| "learning_rate": 1.2967033542853918e-06, | |
| "loss": 0.0409, | |
| "step": 2576 | |
| }, | |
| { | |
| "epoch": 3.159313725490196, | |
| "grad_norm": 0.2080078125, | |
| "learning_rate": 1.2895274320068829e-06, | |
| "loss": 0.0457, | |
| "step": 2578 | |
| }, | |
| { | |
| "epoch": 3.161764705882353, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.2823684798601021e-06, | |
| "loss": 0.0484, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 3.1642156862745097, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 1.2752265305870637e-06, | |
| "loss": 0.0442, | |
| "step": 2582 | |
| }, | |
| { | |
| "epoch": 3.1666666666666665, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 1.2681016168520244e-06, | |
| "loss": 0.0458, | |
| "step": 2584 | |
| }, | |
| { | |
| "epoch": 3.1691176470588234, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.2609937712413267e-06, | |
| "loss": 0.0453, | |
| "step": 2586 | |
| }, | |
| { | |
| "epoch": 3.1715686274509802, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.2539030262632461e-06, | |
| "loss": 0.0502, | |
| "step": 2588 | |
| }, | |
| { | |
| "epoch": 3.174019607843137, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 1.2468294143478515e-06, | |
| "loss": 0.046, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 3.176470588235294, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 1.2397729678468534e-06, | |
| "loss": 0.0443, | |
| "step": 2592 | |
| }, | |
| { | |
| "epoch": 3.178921568627451, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.2327337190334488e-06, | |
| "loss": 0.0484, | |
| "step": 2594 | |
| }, | |
| { | |
| "epoch": 3.1813725490196076, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 1.2257117001021874e-06, | |
| "loss": 0.0455, | |
| "step": 2596 | |
| }, | |
| { | |
| "epoch": 3.1838235294117645, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 1.2187069431688092e-06, | |
| "loss": 0.0499, | |
| "step": 2598 | |
| }, | |
| { | |
| "epoch": 3.186274509803922, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 1.211719480270111e-06, | |
| "loss": 0.0427, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.188725490196078, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 1.2047493433637935e-06, | |
| "loss": 0.0399, | |
| "step": 2602 | |
| }, | |
| { | |
| "epoch": 3.1911764705882355, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.1977965643283106e-06, | |
| "loss": 0.0448, | |
| "step": 2604 | |
| }, | |
| { | |
| "epoch": 3.1936274509803924, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 1.1908611749627352e-06, | |
| "loss": 0.042, | |
| "step": 2606 | |
| }, | |
| { | |
| "epoch": 3.196078431372549, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.1839432069866014e-06, | |
| "loss": 0.0453, | |
| "step": 2608 | |
| }, | |
| { | |
| "epoch": 3.198529411764706, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 1.1770426920397688e-06, | |
| "loss": 0.0501, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 3.200980392156863, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 1.1701596616822752e-06, | |
| "loss": 0.0435, | |
| "step": 2612 | |
| }, | |
| { | |
| "epoch": 3.2034313725490198, | |
| "grad_norm": 0.205078125, | |
| "learning_rate": 1.1632941473941867e-06, | |
| "loss": 0.0416, | |
| "step": 2614 | |
| }, | |
| { | |
| "epoch": 3.2058823529411766, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 1.1564461805754635e-06, | |
| "loss": 0.049, | |
| "step": 2616 | |
| }, | |
| { | |
| "epoch": 3.2083333333333335, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 1.1496157925458057e-06, | |
| "loss": 0.0442, | |
| "step": 2618 | |
| }, | |
| { | |
| "epoch": 3.2107843137254903, | |
| "grad_norm": 0.203125, | |
| "learning_rate": 1.1428030145445207e-06, | |
| "loss": 0.0449, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.213235294117647, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 1.1360078777303729e-06, | |
| "loss": 0.0443, | |
| "step": 2622 | |
| }, | |
| { | |
| "epoch": 3.215686274509804, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 1.1292304131814419e-06, | |
| "loss": 0.0493, | |
| "step": 2624 | |
| }, | |
| { | |
| "epoch": 3.218137254901961, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 1.1224706518949845e-06, | |
| "loss": 0.048, | |
| "step": 2626 | |
| }, | |
| { | |
| "epoch": 3.2205882352941178, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 1.1157286247872873e-06, | |
| "loss": 0.046, | |
| "step": 2628 | |
| }, | |
| { | |
| "epoch": 3.2230392156862746, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 1.109004362693531e-06, | |
| "loss": 0.0498, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.2254901960784315, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 1.1022978963676462e-06, | |
| "loss": 0.0484, | |
| "step": 2632 | |
| }, | |
| { | |
| "epoch": 3.2279411764705883, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 1.0956092564821712e-06, | |
| "loss": 0.0458, | |
| "step": 2634 | |
| }, | |
| { | |
| "epoch": 3.230392156862745, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.088938473628115e-06, | |
| "loss": 0.0483, | |
| "step": 2636 | |
| }, | |
| { | |
| "epoch": 3.232843137254902, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.0822855783148184e-06, | |
| "loss": 0.0458, | |
| "step": 2638 | |
| }, | |
| { | |
| "epoch": 3.235294117647059, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 1.0756506009698075e-06, | |
| "loss": 0.0437, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.2377450980392157, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.0690335719386618e-06, | |
| "loss": 0.0436, | |
| "step": 2642 | |
| }, | |
| { | |
| "epoch": 3.2401960784313726, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 1.0624345214848746e-06, | |
| "loss": 0.0478, | |
| "step": 2644 | |
| }, | |
| { | |
| "epoch": 3.2426470588235294, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 1.0558534797897086e-06, | |
| "loss": 0.0409, | |
| "step": 2646 | |
| }, | |
| { | |
| "epoch": 3.2450980392156863, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 1.0492904769520646e-06, | |
| "loss": 0.0471, | |
| "step": 2648 | |
| }, | |
| { | |
| "epoch": 3.247549019607843, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 1.0427455429883426e-06, | |
| "loss": 0.0462, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 1.0362187078322994e-06, | |
| "loss": 0.0457, | |
| "step": 2652 | |
| }, | |
| { | |
| "epoch": 3.252450980392157, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 1.0297100013349181e-06, | |
| "loss": 0.0473, | |
| "step": 2654 | |
| }, | |
| { | |
| "epoch": 3.2549019607843137, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 1.0232194532642698e-06, | |
| "loss": 0.0472, | |
| "step": 2656 | |
| }, | |
| { | |
| "epoch": 3.2573529411764706, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 1.016747093305376e-06, | |
| "loss": 0.0477, | |
| "step": 2658 | |
| }, | |
| { | |
| "epoch": 3.2598039215686274, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.0102929510600701e-06, | |
| "loss": 0.0459, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.2622549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.0038570560468707e-06, | |
| "loss": 0.0471, | |
| "step": 2662 | |
| }, | |
| { | |
| "epoch": 3.264705882352941, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 9.974394377008367e-07, | |
| "loss": 0.0459, | |
| "step": 2664 | |
| }, | |
| { | |
| "epoch": 3.267156862745098, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 9.910401253734402e-07, | |
| "loss": 0.0431, | |
| "step": 2666 | |
| }, | |
| { | |
| "epoch": 3.269607843137255, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 9.846591483324296e-07, | |
| "loss": 0.0467, | |
| "step": 2668 | |
| }, | |
| { | |
| "epoch": 3.2720588235294117, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 9.782965357616925e-07, | |
| "loss": 0.0484, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.2745098039215685, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.719523167611283e-07, | |
| "loss": 0.0417, | |
| "step": 2672 | |
| }, | |
| { | |
| "epoch": 3.2769607843137254, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 9.656265203465093e-07, | |
| "loss": 0.0453, | |
| "step": 2674 | |
| }, | |
| { | |
| "epoch": 3.2794117647058822, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 9.593191754493519e-07, | |
| "loss": 0.0476, | |
| "step": 2676 | |
| }, | |
| { | |
| "epoch": 3.281862745098039, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 9.53030310916786e-07, | |
| "loss": 0.0511, | |
| "step": 2678 | |
| }, | |
| { | |
| "epoch": 3.284313725490196, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 9.467599555114137e-07, | |
| "loss": 0.0461, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.286764705882353, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 9.405081379111907e-07, | |
| "loss": 0.0472, | |
| "step": 2682 | |
| }, | |
| { | |
| "epoch": 3.2892156862745097, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 9.342748867092832e-07, | |
| "loss": 0.0442, | |
| "step": 2684 | |
| }, | |
| { | |
| "epoch": 3.2916666666666665, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 9.280602304139457e-07, | |
| "loss": 0.0422, | |
| "step": 2686 | |
| }, | |
| { | |
| "epoch": 3.2941176470588234, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 9.218641974483894e-07, | |
| "loss": 0.0415, | |
| "step": 2688 | |
| }, | |
| { | |
| "epoch": 3.2965686274509802, | |
| "grad_norm": 0.2099609375, | |
| "learning_rate": 9.156868161506449e-07, | |
| "loss": 0.0442, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.299019607843137, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 9.095281147734425e-07, | |
| "loss": 0.049, | |
| "step": 2692 | |
| }, | |
| { | |
| "epoch": 3.301470588235294, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 9.033881214840745e-07, | |
| "loss": 0.044, | |
| "step": 2694 | |
| }, | |
| { | |
| "epoch": 3.303921568627451, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 8.97266864364274e-07, | |
| "loss": 0.0487, | |
| "step": 2696 | |
| }, | |
| { | |
| "epoch": 3.306372549019608, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 8.911643714100821e-07, | |
| "loss": 0.0456, | |
| "step": 2698 | |
| }, | |
| { | |
| "epoch": 3.3088235294117645, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 8.850806705317183e-07, | |
| "loss": 0.048, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.311274509803922, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 8.790157895534579e-07, | |
| "loss": 0.0451, | |
| "step": 2702 | |
| }, | |
| { | |
| "epoch": 3.313725490196078, | |
| "grad_norm": 0.2001953125, | |
| "learning_rate": 8.729697562134992e-07, | |
| "loss": 0.0428, | |
| "step": 2704 | |
| }, | |
| { | |
| "epoch": 3.3161764705882355, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 8.669425981638413e-07, | |
| "loss": 0.0459, | |
| "step": 2706 | |
| }, | |
| { | |
| "epoch": 3.318627450980392, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 8.609343429701561e-07, | |
| "loss": 0.0466, | |
| "step": 2708 | |
| }, | |
| { | |
| "epoch": 3.321078431372549, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 8.549450181116598e-07, | |
| "loss": 0.0411, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.323529411764706, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 8.489746509809915e-07, | |
| "loss": 0.0477, | |
| "step": 2712 | |
| }, | |
| { | |
| "epoch": 3.325980392156863, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 8.430232688840828e-07, | |
| "loss": 0.0442, | |
| "step": 2714 | |
| }, | |
| { | |
| "epoch": 3.3284313725490198, | |
| "grad_norm": 0.203125, | |
| "learning_rate": 8.370908990400384e-07, | |
| "loss": 0.0398, | |
| "step": 2716 | |
| }, | |
| { | |
| "epoch": 3.3308823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.311775685810097e-07, | |
| "loss": 0.0442, | |
| "step": 2718 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 8.252833045520659e-07, | |
| "loss": 0.0487, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.3357843137254903, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 8.194081339110776e-07, | |
| "loss": 0.0459, | |
| "step": 2722 | |
| }, | |
| { | |
| "epoch": 3.338235294117647, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 8.13552083528592e-07, | |
| "loss": 0.0464, | |
| "step": 2724 | |
| }, | |
| { | |
| "epoch": 3.340686274509804, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 8.077151801877032e-07, | |
| "loss": 0.0533, | |
| "step": 2726 | |
| }, | |
| { | |
| "epoch": 3.343137254901961, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 8.018974505839394e-07, | |
| "loss": 0.0431, | |
| "step": 2728 | |
| }, | |
| { | |
| "epoch": 3.3455882352941178, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 7.96098921325133e-07, | |
| "loss": 0.0467, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.3480392156862746, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 7.903196189313039e-07, | |
| "loss": 0.0426, | |
| "step": 2732 | |
| }, | |
| { | |
| "epoch": 3.3504901960784315, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 7.845595698345382e-07, | |
| "loss": 0.0467, | |
| "step": 2734 | |
| }, | |
| { | |
| "epoch": 3.3529411764705883, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 7.788188003788611e-07, | |
| "loss": 0.0432, | |
| "step": 2736 | |
| }, | |
| { | |
| "epoch": 3.355392156862745, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 7.73097336820125e-07, | |
| "loss": 0.0508, | |
| "step": 2738 | |
| }, | |
| { | |
| "epoch": 3.357843137254902, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 7.673952053258821e-07, | |
| "loss": 0.0433, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.360294117647059, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 7.6171243197527e-07, | |
| "loss": 0.0484, | |
| "step": 2742 | |
| }, | |
| { | |
| "epoch": 3.3627450980392157, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 7.560490427588907e-07, | |
| "loss": 0.05, | |
| "step": 2744 | |
| }, | |
| { | |
| "epoch": 3.3651960784313726, | |
| "grad_norm": 0.2177734375, | |
| "learning_rate": 7.504050635786891e-07, | |
| "loss": 0.0433, | |
| "step": 2746 | |
| }, | |
| { | |
| "epoch": 3.3676470588235294, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 7.447805202478392e-07, | |
| "loss": 0.044, | |
| "step": 2748 | |
| }, | |
| { | |
| "epoch": 3.3700980392156863, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 7.391754384906224e-07, | |
| "loss": 0.0505, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.372549019607843, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 7.335898439423106e-07, | |
| "loss": 0.0456, | |
| "step": 2752 | |
| }, | |
| { | |
| "epoch": 3.375, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 7.28023762149051e-07, | |
| "loss": 0.0418, | |
| "step": 2754 | |
| }, | |
| { | |
| "epoch": 3.377450980392157, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 7.22477218567747e-07, | |
| "loss": 0.0481, | |
| "step": 2756 | |
| }, | |
| { | |
| "epoch": 3.3799019607843137, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 7.16950238565941e-07, | |
| "loss": 0.0477, | |
| "step": 2758 | |
| }, | |
| { | |
| "epoch": 3.3823529411764706, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 7.114428474217022e-07, | |
| "loss": 0.0441, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.3848039215686274, | |
| "grad_norm": 0.2236328125, | |
| "learning_rate": 7.059550703235085e-07, | |
| "loss": 0.0466, | |
| "step": 2762 | |
| }, | |
| { | |
| "epoch": 3.3872549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 7.004869323701286e-07, | |
| "loss": 0.0455, | |
| "step": 2764 | |
| }, | |
| { | |
| "epoch": 3.389705882352941, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 6.950384585705133e-07, | |
| "loss": 0.0466, | |
| "step": 2766 | |
| }, | |
| { | |
| "epoch": 3.392156862745098, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 6.896096738436775e-07, | |
| "loss": 0.0436, | |
| "step": 2768 | |
| }, | |
| { | |
| "epoch": 3.394607843137255, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 6.842006030185833e-07, | |
| "loss": 0.0494, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.3970588235294117, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 6.788112708340345e-07, | |
| "loss": 0.0449, | |
| "step": 2772 | |
| }, | |
| { | |
| "epoch": 3.3995098039215685, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 6.734417019385564e-07, | |
| "loss": 0.0463, | |
| "step": 2774 | |
| }, | |
| { | |
| "epoch": 3.4019607843137254, | |
| "grad_norm": 0.2021484375, | |
| "learning_rate": 6.680919208902831e-07, | |
| "loss": 0.0414, | |
| "step": 2776 | |
| }, | |
| { | |
| "epoch": 3.4044117647058822, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 6.627619521568529e-07, | |
| "loss": 0.0451, | |
| "step": 2778 | |
| }, | |
| { | |
| "epoch": 3.406862745098039, | |
| "grad_norm": 0.25, | |
| "learning_rate": 6.574518201152852e-07, | |
| "loss": 0.0489, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.409313725490196, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 6.521615490518778e-07, | |
| "loss": 0.0464, | |
| "step": 2782 | |
| }, | |
| { | |
| "epoch": 3.411764705882353, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 6.46891163162095e-07, | |
| "loss": 0.0413, | |
| "step": 2784 | |
| }, | |
| { | |
| "epoch": 3.4142156862745097, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 6.416406865504482e-07, | |
| "loss": 0.0418, | |
| "step": 2786 | |
| }, | |
| { | |
| "epoch": 3.4166666666666665, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 6.364101432303987e-07, | |
| "loss": 0.0514, | |
| "step": 2788 | |
| }, | |
| { | |
| "epoch": 3.4191176470588234, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 6.311995571242385e-07, | |
| "loss": 0.0465, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.4215686274509802, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 6.26008952062983e-07, | |
| "loss": 0.0475, | |
| "step": 2792 | |
| }, | |
| { | |
| "epoch": 3.424019607843137, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 6.208383517862648e-07, | |
| "loss": 0.0454, | |
| "step": 2794 | |
| }, | |
| { | |
| "epoch": 3.426470588235294, | |
| "grad_norm": 0.1982421875, | |
| "learning_rate": 6.156877799422212e-07, | |
| "loss": 0.0469, | |
| "step": 2796 | |
| }, | |
| { | |
| "epoch": 3.428921568627451, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 6.105572600873883e-07, | |
| "loss": 0.048, | |
| "step": 2798 | |
| }, | |
| { | |
| "epoch": 3.431372549019608, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 6.054468156865955e-07, | |
| "loss": 0.0479, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.4338235294117645, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 6.003564701128506e-07, | |
| "loss": 0.0497, | |
| "step": 2802 | |
| }, | |
| { | |
| "epoch": 3.436274509803922, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 5.952862466472436e-07, | |
| "loss": 0.0459, | |
| "step": 2804 | |
| }, | |
| { | |
| "epoch": 3.438725490196078, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 5.902361684788288e-07, | |
| "loss": 0.0424, | |
| "step": 2806 | |
| }, | |
| { | |
| "epoch": 3.4411764705882355, | |
| "grad_norm": 0.3515625, | |
| "learning_rate": 5.852062587045282e-07, | |
| "loss": 0.0467, | |
| "step": 2808 | |
| }, | |
| { | |
| "epoch": 3.443627450980392, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 5.801965403290221e-07, | |
| "loss": 0.044, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.446078431372549, | |
| "grad_norm": 0.30859375, | |
| "learning_rate": 5.752070362646417e-07, | |
| "loss": 0.0405, | |
| "step": 2812 | |
| }, | |
| { | |
| "epoch": 3.448529411764706, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 5.702377693312683e-07, | |
| "loss": 0.0441, | |
| "step": 2814 | |
| }, | |
| { | |
| "epoch": 3.450980392156863, | |
| "grad_norm": 0.212890625, | |
| "learning_rate": 5.652887622562259e-07, | |
| "loss": 0.0481, | |
| "step": 2816 | |
| }, | |
| { | |
| "epoch": 3.4534313725490198, | |
| "grad_norm": 0.3828125, | |
| "learning_rate": 5.603600376741791e-07, | |
| "loss": 0.043, | |
| "step": 2818 | |
| }, | |
| { | |
| "epoch": 3.4558823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 5.554516181270303e-07, | |
| "loss": 0.049, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.4583333333333335, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 5.505635260638114e-07, | |
| "loss": 0.0463, | |
| "step": 2822 | |
| }, | |
| { | |
| "epoch": 3.4607843137254903, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 5.456957838405902e-07, | |
| "loss": 0.0449, | |
| "step": 2824 | |
| }, | |
| { | |
| "epoch": 3.463235294117647, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 5.408484137203579e-07, | |
| "loss": 0.0448, | |
| "step": 2826 | |
| }, | |
| { | |
| "epoch": 3.465686274509804, | |
| "grad_norm": 0.193359375, | |
| "learning_rate": 5.360214378729361e-07, | |
| "loss": 0.0445, | |
| "step": 2828 | |
| }, | |
| { | |
| "epoch": 3.468137254901961, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 5.3121487837487e-07, | |
| "loss": 0.049, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.4705882352941178, | |
| "grad_norm": 0.31640625, | |
| "learning_rate": 5.264287572093285e-07, | |
| "loss": 0.0529, | |
| "step": 2832 | |
| }, | |
| { | |
| "epoch": 3.4730392156862746, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 5.216630962660057e-07, | |
| "loss": 0.0471, | |
| "step": 2834 | |
| }, | |
| { | |
| "epoch": 3.4754901960784315, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 5.169179173410178e-07, | |
| "loss": 0.046, | |
| "step": 2836 | |
| }, | |
| { | |
| "epoch": 3.4779411764705883, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 5.121932421368059e-07, | |
| "loss": 0.0478, | |
| "step": 2838 | |
| }, | |
| { | |
| "epoch": 3.480392156862745, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 5.074890922620368e-07, | |
| "loss": 0.0465, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.482843137254902, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 5.028054892314993e-07, | |
| "loss": 0.0407, | |
| "step": 2842 | |
| }, | |
| { | |
| "epoch": 3.485294117647059, | |
| "grad_norm": 0.2275390625, | |
| "learning_rate": 4.981424544660147e-07, | |
| "loss": 0.042, | |
| "step": 2844 | |
| }, | |
| { | |
| "epoch": 3.4877450980392157, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 4.935000092923292e-07, | |
| "loss": 0.0363, | |
| "step": 2846 | |
| }, | |
| { | |
| "epoch": 3.4901960784313726, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 4.888781749430238e-07, | |
| "loss": 0.0441, | |
| "step": 2848 | |
| }, | |
| { | |
| "epoch": 3.4926470588235294, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 4.842769725564139e-07, | |
| "loss": 0.0431, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.4950980392156863, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 4.796964231764517e-07, | |
| "loss": 0.0437, | |
| "step": 2852 | |
| }, | |
| { | |
| "epoch": 3.497549019607843, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 4.7513654775263316e-07, | |
| "loss": 0.048, | |
| "step": 2854 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 4.705973671398978e-07, | |
| "loss": 0.0453, | |
| "step": 2856 | |
| }, | |
| { | |
| "epoch": 3.502450980392157, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 4.6607890209853846e-07, | |
| "loss": 0.0471, | |
| "step": 2858 | |
| }, | |
| { | |
| "epoch": 3.5049019607843137, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 4.6158117329410334e-07, | |
| "loss": 0.0469, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.5073529411764706, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 4.571042012972993e-07, | |
| "loss": 0.0469, | |
| "step": 2862 | |
| }, | |
| { | |
| "epoch": 3.5098039215686274, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 4.5264800658390284e-07, | |
| "loss": 0.0391, | |
| "step": 2864 | |
| }, | |
| { | |
| "epoch": 3.5122549019607843, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 4.482126095346645e-07, | |
| "loss": 0.0452, | |
| "step": 2866 | |
| }, | |
| { | |
| "epoch": 3.514705882352941, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 4.4379803043521187e-07, | |
| "loss": 0.0434, | |
| "step": 2868 | |
| }, | |
| { | |
| "epoch": 3.517156862745098, | |
| "grad_norm": 0.2236328125, | |
| "learning_rate": 4.394042894759626e-07, | |
| "loss": 0.049, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.519607843137255, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 4.3503140675203e-07, | |
| "loss": 0.0464, | |
| "step": 2872 | |
| }, | |
| { | |
| "epoch": 3.5220588235294117, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 4.3067940226313e-07, | |
| "loss": 0.0451, | |
| "step": 2874 | |
| }, | |
| { | |
| "epoch": 3.5245098039215685, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 4.2634829591348793e-07, | |
| "loss": 0.0423, | |
| "step": 2876 | |
| }, | |
| { | |
| "epoch": 3.5269607843137254, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 4.220381075117524e-07, | |
| "loss": 0.0436, | |
| "step": 2878 | |
| }, | |
| { | |
| "epoch": 3.5294117647058822, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 4.177488567709037e-07, | |
| "loss": 0.0509, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.531862745098039, | |
| "grad_norm": 0.2080078125, | |
| "learning_rate": 4.1348056330815736e-07, | |
| "loss": 0.0435, | |
| "step": 2882 | |
| }, | |
| { | |
| "epoch": 3.534313725490196, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 4.0923324664488286e-07, | |
| "loss": 0.0438, | |
| "step": 2884 | |
| }, | |
| { | |
| "epoch": 3.536764705882353, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 4.0500692620651094e-07, | |
| "loss": 0.0446, | |
| "step": 2886 | |
| }, | |
| { | |
| "epoch": 3.5392156862745097, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 4.008016213224408e-07, | |
| "loss": 0.0492, | |
| "step": 2888 | |
| }, | |
| { | |
| "epoch": 3.5416666666666665, | |
| "grad_norm": 0.32421875, | |
| "learning_rate": 3.966173512259602e-07, | |
| "loss": 0.0456, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.5441176470588234, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 3.9245413505414887e-07, | |
| "loss": 0.0482, | |
| "step": 2892 | |
| }, | |
| { | |
| "epoch": 3.5465686274509802, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 3.883119918477962e-07, | |
| "loss": 0.0473, | |
| "step": 2894 | |
| }, | |
| { | |
| "epoch": 3.549019607843137, | |
| "grad_norm": 0.2275390625, | |
| "learning_rate": 3.841909405513139e-07, | |
| "loss": 0.0423, | |
| "step": 2896 | |
| }, | |
| { | |
| "epoch": 3.5514705882352944, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 3.800910000126462e-07, | |
| "loss": 0.0411, | |
| "step": 2898 | |
| }, | |
| { | |
| "epoch": 3.553921568627451, | |
| "grad_norm": 0.28515625, | |
| "learning_rate": 3.760121889831869e-07, | |
| "loss": 0.049, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.556372549019608, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.719545261176916e-07, | |
| "loss": 0.0478, | |
| "step": 2902 | |
| }, | |
| { | |
| "epoch": 3.5588235294117645, | |
| "grad_norm": 0.345703125, | |
| "learning_rate": 3.679180299741936e-07, | |
| "loss": 0.0467, | |
| "step": 2904 | |
| }, | |
| { | |
| "epoch": 3.561274509803922, | |
| "grad_norm": 0.322265625, | |
| "learning_rate": 3.639027190139194e-07, | |
| "loss": 0.0534, | |
| "step": 2906 | |
| }, | |
| { | |
| "epoch": 3.563725490196078, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 3.5990861160120126e-07, | |
| "loss": 0.048, | |
| "step": 2908 | |
| }, | |
| { | |
| "epoch": 3.5661764705882355, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 3.5593572600339856e-07, | |
| "loss": 0.046, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.568627450980392, | |
| "grad_norm": 0.201171875, | |
| "learning_rate": 3.519840803908064e-07, | |
| "loss": 0.0453, | |
| "step": 2912 | |
| }, | |
| { | |
| "epoch": 3.571078431372549, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 3.480536928365824e-07, | |
| "loss": 0.049, | |
| "step": 2914 | |
| }, | |
| { | |
| "epoch": 3.5735294117647056, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 3.4414458131665607e-07, | |
| "loss": 0.0457, | |
| "step": 2916 | |
| }, | |
| { | |
| "epoch": 3.575980392156863, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 3.4025676370964824e-07, | |
| "loss": 0.0569, | |
| "step": 2918 | |
| }, | |
| { | |
| "epoch": 3.5784313725490198, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 3.3639025779679367e-07, | |
| "loss": 0.0448, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.5808823529411766, | |
| "grad_norm": 0.2158203125, | |
| "learning_rate": 3.3254508126185303e-07, | |
| "loss": 0.0439, | |
| "step": 2922 | |
| }, | |
| { | |
| "epoch": 3.5833333333333335, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 3.287212516910371e-07, | |
| "loss": 0.0449, | |
| "step": 2924 | |
| }, | |
| { | |
| "epoch": 3.5857843137254903, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 3.2491878657292643e-07, | |
| "loss": 0.0428, | |
| "step": 2926 | |
| }, | |
| { | |
| "epoch": 3.588235294117647, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 3.2113770329838536e-07, | |
| "loss": 0.048, | |
| "step": 2928 | |
| }, | |
| { | |
| "epoch": 3.590686274509804, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.1737801916049026e-07, | |
| "loss": 0.0472, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.593137254901961, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 3.136397513544465e-07, | |
| "loss": 0.0476, | |
| "step": 2932 | |
| }, | |
| { | |
| "epoch": 3.5955882352941178, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 3.099229169775081e-07, | |
| "loss": 0.0396, | |
| "step": 2934 | |
| }, | |
| { | |
| "epoch": 3.5980392156862746, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 3.0622753302890384e-07, | |
| "loss": 0.0495, | |
| "step": 2936 | |
| }, | |
| { | |
| "epoch": 3.6004901960784315, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 3.025536164097559e-07, | |
| "loss": 0.0459, | |
| "step": 2938 | |
| }, | |
| { | |
| "epoch": 3.6029411764705883, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 2.9890118392300493e-07, | |
| "loss": 0.0484, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.605392156862745, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 2.9527025227333196e-07, | |
| "loss": 0.0445, | |
| "step": 2942 | |
| }, | |
| { | |
| "epoch": 3.607843137254902, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 2.9166083806708154e-07, | |
| "loss": 0.052, | |
| "step": 2944 | |
| }, | |
| { | |
| "epoch": 3.610294117647059, | |
| "grad_norm": 0.1875, | |
| "learning_rate": 2.880729578121888e-07, | |
| "loss": 0.0398, | |
| "step": 2946 | |
| }, | |
| { | |
| "epoch": 3.6127450980392157, | |
| "grad_norm": 0.263671875, | |
| "learning_rate": 2.84506627918098e-07, | |
| "loss": 0.0438, | |
| "step": 2948 | |
| }, | |
| { | |
| "epoch": 3.6151960784313726, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 2.8096186469569466e-07, | |
| "loss": 0.0472, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.6176470588235294, | |
| "grad_norm": 0.259765625, | |
| "learning_rate": 2.774386843572263e-07, | |
| "loss": 0.0434, | |
| "step": 2952 | |
| }, | |
| { | |
| "epoch": 3.6200980392156863, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 2.7393710301622746e-07, | |
| "loss": 0.0414, | |
| "step": 2954 | |
| }, | |
| { | |
| "epoch": 3.622549019607843, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 2.70457136687452e-07, | |
| "loss": 0.0467, | |
| "step": 2956 | |
| }, | |
| { | |
| "epoch": 3.625, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 2.669988012867919e-07, | |
| "loss": 0.0484, | |
| "step": 2958 | |
| }, | |
| { | |
| "epoch": 3.627450980392157, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 2.6356211263121043e-07, | |
| "loss": 0.0468, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.6299019607843137, | |
| "grad_norm": 0.21875, | |
| "learning_rate": 2.60147086438669e-07, | |
| "loss": 0.0488, | |
| "step": 2962 | |
| }, | |
| { | |
| "epoch": 3.6323529411764706, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 2.567537383280505e-07, | |
| "loss": 0.0423, | |
| "step": 2964 | |
| }, | |
| { | |
| "epoch": 3.6348039215686274, | |
| "grad_norm": 0.248046875, | |
| "learning_rate": 2.533820838190959e-07, | |
| "loss": 0.0446, | |
| "step": 2966 | |
| }, | |
| { | |
| "epoch": 3.6372549019607843, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 2.50032138332324e-07, | |
| "loss": 0.0455, | |
| "step": 2968 | |
| }, | |
| { | |
| "epoch": 3.639705882352941, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 2.46703917188969e-07, | |
| "loss": 0.0479, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.642156862745098, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 2.433974356109081e-07, | |
| "loss": 0.0433, | |
| "step": 2972 | |
| }, | |
| { | |
| "epoch": 3.644607843137255, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 2.4011270872058625e-07, | |
| "loss": 0.0509, | |
| "step": 2974 | |
| }, | |
| { | |
| "epoch": 3.6470588235294117, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 2.3684975154095625e-07, | |
| "loss": 0.0399, | |
| "step": 2976 | |
| }, | |
| { | |
| "epoch": 3.6495098039215685, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 2.3360857899540457e-07, | |
| "loss": 0.0507, | |
| "step": 2978 | |
| }, | |
| { | |
| "epoch": 3.6519607843137254, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 2.303892059076812e-07, | |
| "loss": 0.0466, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.6544117647058822, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 2.271916470018376e-07, | |
| "loss": 0.0471, | |
| "step": 2982 | |
| }, | |
| { | |
| "epoch": 3.656862745098039, | |
| "grad_norm": 0.2451171875, | |
| "learning_rate": 2.2401591690215663e-07, | |
| "loss": 0.0473, | |
| "step": 2984 | |
| }, | |
| { | |
| "epoch": 3.659313725490196, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 2.208620301330816e-07, | |
| "loss": 0.0417, | |
| "step": 2986 | |
| }, | |
| { | |
| "epoch": 3.661764705882353, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 2.1773000111915798e-07, | |
| "loss": 0.045, | |
| "step": 2988 | |
| }, | |
| { | |
| "epoch": 3.6642156862745097, | |
| "grad_norm": 0.296875, | |
| "learning_rate": 2.1461984418496062e-07, | |
| "loss": 0.0524, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 0.283203125, | |
| "learning_rate": 2.1153157355503274e-07, | |
| "loss": 0.0443, | |
| "step": 2992 | |
| }, | |
| { | |
| "epoch": 3.6691176470588234, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 2.084652033538165e-07, | |
| "loss": 0.046, | |
| "step": 2994 | |
| }, | |
| { | |
| "epoch": 3.6715686274509802, | |
| "grad_norm": 0.28125, | |
| "learning_rate": 2.0542074760559193e-07, | |
| "loss": 0.0435, | |
| "step": 2996 | |
| }, | |
| { | |
| "epoch": 3.674019607843137, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 2.023982202344127e-07, | |
| "loss": 0.0422, | |
| "step": 2998 | |
| }, | |
| { | |
| "epoch": 3.6764705882352944, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.993976350640392e-07, | |
| "loss": 0.0452, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.678921568627451, | |
| "grad_norm": 0.2412109375, | |
| "learning_rate": 1.9641900581787887e-07, | |
| "loss": 0.047, | |
| "step": 3002 | |
| }, | |
| { | |
| "epoch": 3.681372549019608, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 1.9346234611892112e-07, | |
| "loss": 0.0523, | |
| "step": 3004 | |
| }, | |
| { | |
| "epoch": 3.6838235294117645, | |
| "grad_norm": 0.1982421875, | |
| "learning_rate": 1.9052766948967738e-07, | |
| "loss": 0.0414, | |
| "step": 3006 | |
| }, | |
| { | |
| "epoch": 3.686274509803922, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 1.8761498935211675e-07, | |
| "loss": 0.0447, | |
| "step": 3008 | |
| }, | |
| { | |
| "epoch": 3.688725490196078, | |
| "grad_norm": 0.1943359375, | |
| "learning_rate": 1.8472431902760546e-07, | |
| "loss": 0.0414, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.6911764705882355, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 1.818556717368475e-07, | |
| "loss": 0.0443, | |
| "step": 3012 | |
| }, | |
| { | |
| "epoch": 3.693627450980392, | |
| "grad_norm": 0.2431640625, | |
| "learning_rate": 1.790090605998207e-07, | |
| "loss": 0.0443, | |
| "step": 3014 | |
| }, | |
| { | |
| "epoch": 3.696078431372549, | |
| "grad_norm": 0.294921875, | |
| "learning_rate": 1.7618449863572085e-07, | |
| "loss": 0.0461, | |
| "step": 3016 | |
| }, | |
| { | |
| "epoch": 3.6985294117647056, | |
| "grad_norm": 0.23828125, | |
| "learning_rate": 1.7338199876289984e-07, | |
| "loss": 0.0461, | |
| "step": 3018 | |
| }, | |
| { | |
| "epoch": 3.700980392156863, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.7060157379880537e-07, | |
| "loss": 0.0517, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.7034313725490198, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 1.6784323645992585e-07, | |
| "loss": 0.046, | |
| "step": 3022 | |
| }, | |
| { | |
| "epoch": 3.7058823529411766, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.6510699936172825e-07, | |
| "loss": 0.0474, | |
| "step": 3024 | |
| }, | |
| { | |
| "epoch": 3.7083333333333335, | |
| "grad_norm": 0.333984375, | |
| "learning_rate": 1.623928750186038e-07, | |
| "loss": 0.0418, | |
| "step": 3026 | |
| }, | |
| { | |
| "epoch": 3.7107843137254903, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 1.597008758438101e-07, | |
| "loss": 0.0469, | |
| "step": 3028 | |
| }, | |
| { | |
| "epoch": 3.713235294117647, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.570310141494108e-07, | |
| "loss": 0.0436, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.715686274509804, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 1.5438330214622544e-07, | |
| "loss": 0.0484, | |
| "step": 3032 | |
| }, | |
| { | |
| "epoch": 3.718137254901961, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 1.5175775194376686e-07, | |
| "loss": 0.0485, | |
| "step": 3034 | |
| }, | |
| { | |
| "epoch": 3.7205882352941178, | |
| "grad_norm": 0.224609375, | |
| "learning_rate": 1.491543755501912e-07, | |
| "loss": 0.0445, | |
| "step": 3036 | |
| }, | |
| { | |
| "epoch": 3.7230392156862746, | |
| "grad_norm": 0.236328125, | |
| "learning_rate": 1.4657318487224182e-07, | |
| "loss": 0.0413, | |
| "step": 3038 | |
| }, | |
| { | |
| "epoch": 3.7254901960784315, | |
| "grad_norm": 0.27734375, | |
| "learning_rate": 1.4401419171519215e-07, | |
| "loss": 0.0457, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.7279411764705883, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 1.4147740778279406e-07, | |
| "loss": 0.0426, | |
| "step": 3042 | |
| }, | |
| { | |
| "epoch": 3.730392156862745, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 1.3896284467722398e-07, | |
| "loss": 0.046, | |
| "step": 3044 | |
| }, | |
| { | |
| "epoch": 3.732843137254902, | |
| "grad_norm": 0.2294921875, | |
| "learning_rate": 1.3647051389903022e-07, | |
| "loss": 0.0459, | |
| "step": 3046 | |
| }, | |
| { | |
| "epoch": 3.735294117647059, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.3400042684707902e-07, | |
| "loss": 0.0494, | |
| "step": 3048 | |
| }, | |
| { | |
| "epoch": 3.7377450980392157, | |
| "grad_norm": 0.326171875, | |
| "learning_rate": 1.3155259481850313e-07, | |
| "loss": 0.0492, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.7401960784313726, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 1.2912702900865158e-07, | |
| "loss": 0.0437, | |
| "step": 3052 | |
| }, | |
| { | |
| "epoch": 3.7426470588235294, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.2672374051103554e-07, | |
| "loss": 0.0456, | |
| "step": 3054 | |
| }, | |
| { | |
| "epoch": 3.7450980392156863, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 1.2434274031727922e-07, | |
| "loss": 0.0495, | |
| "step": 3056 | |
| }, | |
| { | |
| "epoch": 3.747549019607843, | |
| "grad_norm": 0.24609375, | |
| "learning_rate": 1.219840393170718e-07, | |
| "loss": 0.0474, | |
| "step": 3058 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 1.1964764829811238e-07, | |
| "loss": 0.0424, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.752450980392157, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 1.1733357794606548e-07, | |
| "loss": 0.0458, | |
| "step": 3062 | |
| }, | |
| { | |
| "epoch": 3.7549019607843137, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 1.1504183884451014e-07, | |
| "loss": 0.0474, | |
| "step": 3064 | |
| }, | |
| { | |
| "epoch": 3.7573529411764706, | |
| "grad_norm": 0.35546875, | |
| "learning_rate": 1.1277244147489096e-07, | |
| "loss": 0.054, | |
| "step": 3066 | |
| }, | |
| { | |
| "epoch": 3.7598039215686274, | |
| "grad_norm": 0.2119140625, | |
| "learning_rate": 1.1052539621647207e-07, | |
| "loss": 0.0417, | |
| "step": 3068 | |
| }, | |
| { | |
| "epoch": 3.7622549019607843, | |
| "grad_norm": 0.388671875, | |
| "learning_rate": 1.0830071334628655e-07, | |
| "loss": 0.0426, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.764705882352941, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 1.060984030390938e-07, | |
| "loss": 0.0463, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 3.767156862745098, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 1.0391847536732946e-07, | |
| "loss": 0.0468, | |
| "step": 3074 | |
| }, | |
| { | |
| "epoch": 3.769607843137255, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.0176094030105943e-07, | |
| "loss": 0.0425, | |
| "step": 3076 | |
| }, | |
| { | |
| "epoch": 3.7720588235294117, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 9.962580770793706e-08, | |
| "loss": 0.0387, | |
| "step": 3078 | |
| }, | |
| { | |
| "epoch": 3.7745098039215685, | |
| "grad_norm": 0.25, | |
| "learning_rate": 9.751308735315545e-08, | |
| "loss": 0.0466, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.7769607843137254, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.54227888994036e-08, | |
| "loss": 0.0508, | |
| "step": 3082 | |
| }, | |
| { | |
| "epoch": 3.7794117647058822, | |
| "grad_norm": 0.291015625, | |
| "learning_rate": 9.33549219068225e-08, | |
| "loss": 0.0519, | |
| "step": 3084 | |
| }, | |
| { | |
| "epoch": 3.781862745098039, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 9.130949583296023e-08, | |
| "loss": 0.0417, | |
| "step": 3086 | |
| }, | |
| { | |
| "epoch": 3.784313725490196, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 8.928652003273087e-08, | |
| "loss": 0.0451, | |
| "step": 3088 | |
| }, | |
| { | |
| "epoch": 3.786764705882353, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 8.728600375837004e-08, | |
| "loss": 0.046, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.7892156862745097, | |
| "grad_norm": 0.2216796875, | |
| "learning_rate": 8.530795615939225e-08, | |
| "loss": 0.0453, | |
| "step": 3092 | |
| }, | |
| { | |
| "epoch": 3.7916666666666665, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 8.335238628255027e-08, | |
| "loss": 0.0423, | |
| "step": 3094 | |
| }, | |
| { | |
| "epoch": 3.7941176470588234, | |
| "grad_norm": 0.2421875, | |
| "learning_rate": 8.141930307179468e-08, | |
| "loss": 0.045, | |
| "step": 3096 | |
| }, | |
| { | |
| "epoch": 3.7965686274509802, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 7.950871536822891e-08, | |
| "loss": 0.0395, | |
| "step": 3098 | |
| }, | |
| { | |
| "epoch": 3.799019607843137, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 7.762063191007252e-08, | |
| "loss": 0.0406, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.8014705882352944, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 7.57550613326219e-08, | |
| "loss": 0.0473, | |
| "step": 3102 | |
| }, | |
| { | |
| "epoch": 3.803921568627451, | |
| "grad_norm": 0.2255859375, | |
| "learning_rate": 7.391201216820687e-08, | |
| "loss": 0.0494, | |
| "step": 3104 | |
| }, | |
| { | |
| "epoch": 3.806372549019608, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 7.209149284615413e-08, | |
| "loss": 0.0496, | |
| "step": 3106 | |
| }, | |
| { | |
| "epoch": 3.8088235294117645, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 7.029351169274945e-08, | |
| "loss": 0.0451, | |
| "step": 3108 | |
| }, | |
| { | |
| "epoch": 3.811274509803922, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 6.851807693119772e-08, | |
| "loss": 0.0415, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.813725490196078, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 6.676519668158576e-08, | |
| "loss": 0.0485, | |
| "step": 3112 | |
| }, | |
| { | |
| "epoch": 3.8161764705882355, | |
| "grad_norm": 0.396484375, | |
| "learning_rate": 6.503487896084792e-08, | |
| "loss": 0.05, | |
| "step": 3114 | |
| }, | |
| { | |
| "epoch": 3.818627450980392, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 6.332713168272386e-08, | |
| "loss": 0.0435, | |
| "step": 3116 | |
| }, | |
| { | |
| "epoch": 3.821078431372549, | |
| "grad_norm": 0.216796875, | |
| "learning_rate": 6.164196265772804e-08, | |
| "loss": 0.0457, | |
| "step": 3118 | |
| }, | |
| { | |
| "epoch": 3.8235294117647056, | |
| "grad_norm": 0.2353515625, | |
| "learning_rate": 5.997937959310974e-08, | |
| "loss": 0.0492, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.825980392156863, | |
| "grad_norm": 0.22265625, | |
| "learning_rate": 5.833939009282086e-08, | |
| "loss": 0.0438, | |
| "step": 3122 | |
| }, | |
| { | |
| "epoch": 3.8284313725490198, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 5.672200165748043e-08, | |
| "loss": 0.0431, | |
| "step": 3124 | |
| }, | |
| { | |
| "epoch": 3.8308823529411766, | |
| "grad_norm": 0.3046875, | |
| "learning_rate": 5.5127221684337376e-08, | |
| "loss": 0.0474, | |
| "step": 3126 | |
| }, | |
| { | |
| "epoch": 3.8333333333333335, | |
| "grad_norm": 0.2265625, | |
| "learning_rate": 5.355505746724166e-08, | |
| "loss": 0.0427, | |
| "step": 3128 | |
| }, | |
| { | |
| "epoch": 3.8357843137254903, | |
| "grad_norm": 0.298828125, | |
| "learning_rate": 5.2005516196607094e-08, | |
| "loss": 0.0432, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.838235294117647, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 5.047860495937917e-08, | |
| "loss": 0.0435, | |
| "step": 3132 | |
| }, | |
| { | |
| "epoch": 3.840686274509804, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 4.897433073900504e-08, | |
| "loss": 0.0456, | |
| "step": 3134 | |
| }, | |
| { | |
| "epoch": 3.843137254901961, | |
| "grad_norm": 0.2314453125, | |
| "learning_rate": 4.7492700415398e-08, | |
| "loss": 0.0435, | |
| "step": 3136 | |
| }, | |
| { | |
| "epoch": 3.8455882352941178, | |
| "grad_norm": 0.1865234375, | |
| "learning_rate": 4.6033720764908084e-08, | |
| "loss": 0.0429, | |
| "step": 3138 | |
| }, | |
| { | |
| "epoch": 3.8480392156862746, | |
| "grad_norm": 0.314453125, | |
| "learning_rate": 4.4597398460290985e-08, | |
| "loss": 0.0475, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.8504901960784315, | |
| "grad_norm": 0.306640625, | |
| "learning_rate": 4.3183740070677496e-08, | |
| "loss": 0.0421, | |
| "step": 3142 | |
| }, | |
| { | |
| "epoch": 3.8529411764705883, | |
| "grad_norm": 0.2060546875, | |
| "learning_rate": 4.179275206154354e-08, | |
| "loss": 0.0397, | |
| "step": 3144 | |
| }, | |
| { | |
| "epoch": 3.855392156862745, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 4.0424440794678555e-08, | |
| "loss": 0.0476, | |
| "step": 3146 | |
| }, | |
| { | |
| "epoch": 3.857843137254902, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 3.907881252816048e-08, | |
| "loss": 0.0498, | |
| "step": 3148 | |
| }, | |
| { | |
| "epoch": 3.860294117647059, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 3.775587341632303e-08, | |
| "loss": 0.0447, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.8627450980392157, | |
| "grad_norm": 0.21484375, | |
| "learning_rate": 3.645562950973014e-08, | |
| "loss": 0.0439, | |
| "step": 3152 | |
| }, | |
| { | |
| "epoch": 3.8651960784313726, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 3.517808675514767e-08, | |
| "loss": 0.0425, | |
| "step": 3154 | |
| }, | |
| { | |
| "epoch": 3.8676470588235294, | |
| "grad_norm": 0.2197265625, | |
| "learning_rate": 3.3923250995515076e-08, | |
| "loss": 0.0436, | |
| "step": 3156 | |
| }, | |
| { | |
| "epoch": 3.8700980392156863, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 3.269112796992102e-08, | |
| "loss": 0.0442, | |
| "step": 3158 | |
| }, | |
| { | |
| "epoch": 3.872549019607843, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 3.148172331357447e-08, | |
| "loss": 0.044, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.875, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 3.029504255778026e-08, | |
| "loss": 0.0444, | |
| "step": 3162 | |
| }, | |
| { | |
| "epoch": 3.877450980392157, | |
| "grad_norm": 0.328125, | |
| "learning_rate": 2.91310911299153e-08, | |
| "loss": 0.0452, | |
| "step": 3164 | |
| }, | |
| { | |
| "epoch": 3.8799019607843137, | |
| "grad_norm": 0.240234375, | |
| "learning_rate": 2.7989874353399616e-08, | |
| "loss": 0.0513, | |
| "step": 3166 | |
| }, | |
| { | |
| "epoch": 3.8823529411764706, | |
| "grad_norm": 0.234375, | |
| "learning_rate": 2.6871397447676418e-08, | |
| "loss": 0.0445, | |
| "step": 3168 | |
| }, | |
| { | |
| "epoch": 3.8848039215686274, | |
| "grad_norm": 0.255859375, | |
| "learning_rate": 2.5775665528186556e-08, | |
| "loss": 0.0434, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.8872549019607843, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 2.4702683606343537e-08, | |
| "loss": 0.0472, | |
| "step": 3172 | |
| }, | |
| { | |
| "epoch": 3.889705882352941, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 2.3652456589512983e-08, | |
| "loss": 0.0452, | |
| "step": 3174 | |
| }, | |
| { | |
| "epoch": 3.892156862745098, | |
| "grad_norm": 0.2373046875, | |
| "learning_rate": 2.2624989280988773e-08, | |
| "loss": 0.044, | |
| "step": 3176 | |
| }, | |
| { | |
| "epoch": 3.894607843137255, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 2.1620286379971932e-08, | |
| "loss": 0.0491, | |
| "step": 3178 | |
| }, | |
| { | |
| "epoch": 3.8970588235294117, | |
| "grad_norm": 0.33984375, | |
| "learning_rate": 2.063835248154844e-08, | |
| "loss": 0.0424, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.8995098039215685, | |
| "grad_norm": 0.265625, | |
| "learning_rate": 1.967919207666813e-08, | |
| "loss": 0.0477, | |
| "step": 3182 | |
| }, | |
| { | |
| "epoch": 3.9019607843137254, | |
| "grad_norm": 0.228515625, | |
| "learning_rate": 1.8742809552125264e-08, | |
| "loss": 0.0501, | |
| "step": 3184 | |
| }, | |
| { | |
| "epoch": 3.9044117647058822, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.782920919053688e-08, | |
| "loss": 0.0454, | |
| "step": 3186 | |
| }, | |
| { | |
| "epoch": 3.906862745098039, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.6938395170325583e-08, | |
| "loss": 0.0468, | |
| "step": 3188 | |
| }, | |
| { | |
| "epoch": 3.909313725490196, | |
| "grad_norm": 0.23046875, | |
| "learning_rate": 1.6070371565696797e-08, | |
| "loss": 0.0472, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.911764705882353, | |
| "grad_norm": 0.287109375, | |
| "learning_rate": 1.52251423466232e-08, | |
| "loss": 0.0459, | |
| "step": 3192 | |
| }, | |
| { | |
| "epoch": 3.9142156862745097, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 1.4402711378826429e-08, | |
| "loss": 0.0496, | |
| "step": 3194 | |
| }, | |
| { | |
| "epoch": 3.9166666666666665, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 1.3603082423757629e-08, | |
| "loss": 0.0459, | |
| "step": 3196 | |
| }, | |
| { | |
| "epoch": 3.9191176470588234, | |
| "grad_norm": 0.275390625, | |
| "learning_rate": 1.2826259138580266e-08, | |
| "loss": 0.0461, | |
| "step": 3198 | |
| }, | |
| { | |
| "epoch": 3.9215686274509802, | |
| "grad_norm": 0.267578125, | |
| "learning_rate": 1.2072245076156786e-08, | |
| "loss": 0.0413, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.924019607843137, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.1341043685028087e-08, | |
| "loss": 0.0516, | |
| "step": 3202 | |
| }, | |
| { | |
| "epoch": 3.9264705882352944, | |
| "grad_norm": 0.318359375, | |
| "learning_rate": 1.0632658309399636e-08, | |
| "loss": 0.0474, | |
| "step": 3204 | |
| }, | |
| { | |
| "epoch": 3.928921568627451, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 9.947092189126483e-09, | |
| "loss": 0.0498, | |
| "step": 3206 | |
| }, | |
| { | |
| "epoch": 3.931372549019608, | |
| "grad_norm": 0.29296875, | |
| "learning_rate": 9.284348459698277e-09, | |
| "loss": 0.0495, | |
| "step": 3208 | |
| }, | |
| { | |
| "epoch": 3.9338235294117645, | |
| "grad_norm": 0.26171875, | |
| "learning_rate": 8.64443015222427e-09, | |
| "loss": 0.0506, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.936274509803922, | |
| "grad_norm": 0.2392578125, | |
| "learning_rate": 8.027340193420551e-09, | |
| "loss": 0.0418, | |
| "step": 3212 | |
| }, | |
| { | |
| "epoch": 3.938725490196078, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 7.433081405594511e-09, | |
| "loss": 0.0448, | |
| "step": 3214 | |
| }, | |
| { | |
| "epoch": 3.9411764705882355, | |
| "grad_norm": 0.2138671875, | |
| "learning_rate": 6.861656506635395e-09, | |
| "loss": 0.0452, | |
| "step": 3216 | |
| }, | |
| { | |
| "epoch": 3.943627450980392, | |
| "grad_norm": 0.244140625, | |
| "learning_rate": 6.313068109998766e-09, | |
| "loss": 0.0469, | |
| "step": 3218 | |
| }, | |
| { | |
| "epoch": 3.946078431372549, | |
| "grad_norm": 0.2490234375, | |
| "learning_rate": 5.787318724695401e-09, | |
| "loss": 0.048, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.9485294117647056, | |
| "grad_norm": 0.26953125, | |
| "learning_rate": 5.284410755280745e-09, | |
| "loss": 0.0449, | |
| "step": 3222 | |
| }, | |
| { | |
| "epoch": 3.950980392156863, | |
| "grad_norm": 0.2890625, | |
| "learning_rate": 4.804346501843249e-09, | |
| "loss": 0.0448, | |
| "step": 3224 | |
| }, | |
| { | |
| "epoch": 3.9534313725490198, | |
| "grad_norm": 0.25, | |
| "learning_rate": 4.347128159993829e-09, | |
| "loss": 0.0453, | |
| "step": 3226 | |
| }, | |
| { | |
| "epoch": 3.9558823529411766, | |
| "grad_norm": 0.3125, | |
| "learning_rate": 3.912757820854762e-09, | |
| "loss": 0.0547, | |
| "step": 3228 | |
| }, | |
| { | |
| "epoch": 3.9583333333333335, | |
| "grad_norm": 0.2578125, | |
| "learning_rate": 3.5012374710535757e-09, | |
| "loss": 0.0457, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.9607843137254903, | |
| "grad_norm": 0.25, | |
| "learning_rate": 3.1125689927097303e-09, | |
| "loss": 0.0473, | |
| "step": 3232 | |
| }, | |
| { | |
| "epoch": 3.963235294117647, | |
| "grad_norm": 0.2333984375, | |
| "learning_rate": 2.746754163428511e-09, | |
| "loss": 0.0485, | |
| "step": 3234 | |
| }, | |
| { | |
| "epoch": 3.965686274509804, | |
| "grad_norm": 0.271484375, | |
| "learning_rate": 2.4037946562915914e-09, | |
| "loss": 0.0453, | |
| "step": 3236 | |
| }, | |
| { | |
| "epoch": 3.968137254901961, | |
| "grad_norm": 0.33203125, | |
| "learning_rate": 2.083692039850926e-09, | |
| "loss": 0.0501, | |
| "step": 3238 | |
| }, | |
| { | |
| "epoch": 3.9705882352941178, | |
| "grad_norm": 0.2236328125, | |
| "learning_rate": 1.786447778120981e-09, | |
| "loss": 0.0467, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.9730392156862746, | |
| "grad_norm": 0.232421875, | |
| "learning_rate": 1.512063230570404e-09, | |
| "loss": 0.0401, | |
| "step": 3242 | |
| }, | |
| { | |
| "epoch": 3.9754901960784315, | |
| "grad_norm": 0.2734375, | |
| "learning_rate": 1.2605396521192526e-09, | |
| "loss": 0.0501, | |
| "step": 3244 | |
| }, | |
| { | |
| "epoch": 3.9779411764705883, | |
| "grad_norm": 0.279296875, | |
| "learning_rate": 1.0318781931284438e-09, | |
| "loss": 0.0463, | |
| "step": 3246 | |
| }, | |
| { | |
| "epoch": 3.980392156862745, | |
| "grad_norm": 0.220703125, | |
| "learning_rate": 8.260798993997566e-10, | |
| "loss": 0.0447, | |
| "step": 3248 | |
| }, | |
| { | |
| "epoch": 3.982843137254902, | |
| "grad_norm": 0.302734375, | |
| "learning_rate": 6.431457121675034e-10, | |
| "loss": 0.0447, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.985294117647059, | |
| "grad_norm": 0.2470703125, | |
| "learning_rate": 4.830764680946453e-10, | |
| "loss": 0.0399, | |
| "step": 3252 | |
| }, | |
| { | |
| "epoch": 3.9877450980392157, | |
| "grad_norm": 0.2109375, | |
| "learning_rate": 3.4587289927112687e-10, | |
| "loss": 0.0445, | |
| "step": 3254 | |
| }, | |
| { | |
| "epoch": 3.9901960784313726, | |
| "grad_norm": 0.310546875, | |
| "learning_rate": 2.3153563320721385e-10, | |
| "loss": 0.055, | |
| "step": 3256 | |
| }, | |
| { | |
| "epoch": 3.9926470588235294, | |
| "grad_norm": 0.25390625, | |
| "learning_rate": 1.4006519283349397e-10, | |
| "loss": 0.0424, | |
| "step": 3258 | |
| }, | |
| { | |
| "epoch": 3.9950980392156863, | |
| "grad_norm": 0.30078125, | |
| "learning_rate": 7.146199649643581e-11, | |
| "loss": 0.0524, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.997549019607843, | |
| "grad_norm": 0.251953125, | |
| "learning_rate": 2.572635795949907e-11, | |
| "loss": 0.0502, | |
| "step": 3262 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.40234375, | |
| "learning_rate": 2.858486396473126e-12, | |
| "loss": 0.0443, | |
| "step": 3264 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 0.05356348678469658, | |
| "eval_runtime": 37.3527, | |
| "eval_samples_per_second": 134.288, | |
| "eval_steps_per_second": 1.071, | |
| "step": 3264 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "step": 3264, | |
| "total_flos": 4.2537587819544576e+20, | |
| "train_loss": 0.0519512448569431, | |
| "train_runtime": 5570.9779, | |
| "train_samples_per_second": 74.994, | |
| "train_steps_per_second": 0.586 | |
| } | |
| ], | |
| "logging_steps": 2, | |
| "max_steps": 3264, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 816, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.2537587819544576e+20, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |