9b-17 / trainer_state.json
furproxy's picture
Upload folder using huggingface_hub
b40df11 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 2055,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00291970802919708,
"grad_norm": 2.4375,
"learning_rate": 9.67741935483871e-07,
"loss": 1.3209164142608643,
"step": 2
},
{
"epoch": 0.00583941605839416,
"grad_norm": 10.8125,
"learning_rate": 2.9032258064516128e-06,
"loss": 2.3312554359436035,
"step": 4
},
{
"epoch": 0.008759124087591242,
"grad_norm": 3.921875,
"learning_rate": 4.838709677419355e-06,
"loss": 1.9411218166351318,
"step": 6
},
{
"epoch": 0.01167883211678832,
"grad_norm": 1.1796875,
"learning_rate": 6.774193548387097e-06,
"loss": 1.6758718490600586,
"step": 8
},
{
"epoch": 0.014598540145985401,
"grad_norm": 1.8046875,
"learning_rate": 8.70967741935484e-06,
"loss": 1.7799521684646606,
"step": 10
},
{
"epoch": 0.017518248175182483,
"grad_norm": 3.171875,
"learning_rate": 1.0645161290322582e-05,
"loss": 1.885763168334961,
"step": 12
},
{
"epoch": 0.020437956204379562,
"grad_norm": 9.0,
"learning_rate": 1.2580645161290324e-05,
"loss": 1.5260975360870361,
"step": 14
},
{
"epoch": 0.02335766423357664,
"grad_norm": 2.890625,
"learning_rate": 1.4516129032258065e-05,
"loss": 1.6625151634216309,
"step": 16
},
{
"epoch": 0.026277372262773723,
"grad_norm": 1.828125,
"learning_rate": 1.6451612903225807e-05,
"loss": 1.7963693141937256,
"step": 18
},
{
"epoch": 0.029197080291970802,
"grad_norm": 2.15625,
"learning_rate": 1.838709677419355e-05,
"loss": 1.750580072402954,
"step": 20
},
{
"epoch": 0.032116788321167884,
"grad_norm": 1.90625,
"learning_rate": 2.032258064516129e-05,
"loss": 1.7857003211975098,
"step": 22
},
{
"epoch": 0.035036496350364967,
"grad_norm": 1.6328125,
"learning_rate": 2.2258064516129034e-05,
"loss": 1.6380910873413086,
"step": 24
},
{
"epoch": 0.03795620437956204,
"grad_norm": 6.375,
"learning_rate": 2.4193548387096773e-05,
"loss": 1.709096908569336,
"step": 26
},
{
"epoch": 0.040875912408759124,
"grad_norm": 3.734375,
"learning_rate": 2.6129032258064516e-05,
"loss": 1.9888923168182373,
"step": 28
},
{
"epoch": 0.043795620437956206,
"grad_norm": 1.53125,
"learning_rate": 2.806451612903226e-05,
"loss": 1.2525625228881836,
"step": 30
},
{
"epoch": 0.04671532846715328,
"grad_norm": 3.15625,
"learning_rate": 3e-05,
"loss": 1.5124564170837402,
"step": 32
},
{
"epoch": 0.049635036496350364,
"grad_norm": 1.515625,
"learning_rate": 3.193548387096774e-05,
"loss": 1.342097282409668,
"step": 34
},
{
"epoch": 0.052554744525547446,
"grad_norm": 5.34375,
"learning_rate": 3.387096774193549e-05,
"loss": 1.1437346935272217,
"step": 36
},
{
"epoch": 0.05547445255474453,
"grad_norm": 2.921875,
"learning_rate": 3.580645161290323e-05,
"loss": 1.6300029754638672,
"step": 38
},
{
"epoch": 0.058394160583941604,
"grad_norm": 0.9375,
"learning_rate": 3.774193548387097e-05,
"loss": 1.405299186706543,
"step": 40
},
{
"epoch": 0.061313868613138686,
"grad_norm": 4.40625,
"learning_rate": 3.967741935483871e-05,
"loss": 1.386954426765442,
"step": 42
},
{
"epoch": 0.06423357664233577,
"grad_norm": 1.5078125,
"learning_rate": 4.161290322580646e-05,
"loss": 1.3544518947601318,
"step": 44
},
{
"epoch": 0.06715328467153285,
"grad_norm": 3.40625,
"learning_rate": 4.3548387096774194e-05,
"loss": 1.6254360675811768,
"step": 46
},
{
"epoch": 0.07007299270072993,
"grad_norm": 3.875,
"learning_rate": 4.548387096774194e-05,
"loss": 1.3457388877868652,
"step": 48
},
{
"epoch": 0.072992700729927,
"grad_norm": 1.0625,
"learning_rate": 4.741935483870968e-05,
"loss": 1.232921838760376,
"step": 50
},
{
"epoch": 0.07591240875912408,
"grad_norm": 1.640625,
"learning_rate": 4.935483870967742e-05,
"loss": 1.1997480392456055,
"step": 52
},
{
"epoch": 0.07883211678832117,
"grad_norm": 1.203125,
"learning_rate": 5.1290322580645164e-05,
"loss": 1.5290566682815552,
"step": 54
},
{
"epoch": 0.08175182481751825,
"grad_norm": 1.234375,
"learning_rate": 5.3225806451612906e-05,
"loss": 1.553056240081787,
"step": 56
},
{
"epoch": 0.08467153284671533,
"grad_norm": 1.09375,
"learning_rate": 5.516129032258064e-05,
"loss": 1.3679280281066895,
"step": 58
},
{
"epoch": 0.08759124087591241,
"grad_norm": 1.5234375,
"learning_rate": 5.709677419354839e-05,
"loss": 1.4744219779968262,
"step": 60
},
{
"epoch": 0.0905109489051095,
"grad_norm": 4.53125,
"learning_rate": 5.9032258064516134e-05,
"loss": 1.6063017845153809,
"step": 62
},
{
"epoch": 0.09343065693430656,
"grad_norm": 1.828125,
"learning_rate": 5.9999968940456305e-05,
"loss": 1.4462177753448486,
"step": 64
},
{
"epoch": 0.09635036496350365,
"grad_norm": 5.90625,
"learning_rate": 5.999972046456981e-05,
"loss": 1.3050040006637573,
"step": 66
},
{
"epoch": 0.09927007299270073,
"grad_norm": 0.96875,
"learning_rate": 5.9999223515266435e-05,
"loss": 1.4365456104278564,
"step": 68
},
{
"epoch": 0.10218978102189781,
"grad_norm": 1.1953125,
"learning_rate": 5.999847809748539e-05,
"loss": 1.34994637966156,
"step": 70
},
{
"epoch": 0.10510948905109489,
"grad_norm": 6.28125,
"learning_rate": 5.99974842186354e-05,
"loss": 0.9953607320785522,
"step": 72
},
{
"epoch": 0.10802919708029197,
"grad_norm": 1.6953125,
"learning_rate": 5.999624188859468e-05,
"loss": 1.26670241355896,
"step": 74
},
{
"epoch": 0.11094890510948906,
"grad_norm": 1.3671875,
"learning_rate": 5.999475111971082e-05,
"loss": 1.3074488639831543,
"step": 76
},
{
"epoch": 0.11386861313868613,
"grad_norm": 2.015625,
"learning_rate": 5.999301192680064e-05,
"loss": 1.4332489967346191,
"step": 78
},
{
"epoch": 0.11678832116788321,
"grad_norm": 1.828125,
"learning_rate": 5.999102432715005e-05,
"loss": 1.0071167945861816,
"step": 80
},
{
"epoch": 0.11970802919708029,
"grad_norm": 1.5390625,
"learning_rate": 5.998878834051391e-05,
"loss": 1.5147018432617188,
"step": 82
},
{
"epoch": 0.12262773722627737,
"grad_norm": 1.9921875,
"learning_rate": 5.998630398911579e-05,
"loss": 1.3494911193847656,
"step": 84
},
{
"epoch": 0.12554744525547445,
"grad_norm": 1.5859375,
"learning_rate": 5.998357129764777e-05,
"loss": 1.5120820999145508,
"step": 86
},
{
"epoch": 0.12846715328467154,
"grad_norm": 1.25,
"learning_rate": 5.998059029327023e-05,
"loss": 1.4139865636825562,
"step": 88
},
{
"epoch": 0.13138686131386862,
"grad_norm": 2.3125,
"learning_rate": 5.9977361005611484e-05,
"loss": 1.223384141921997,
"step": 90
},
{
"epoch": 0.1343065693430657,
"grad_norm": 2.734375,
"learning_rate": 5.9973883466767594e-05,
"loss": 1.474729299545288,
"step": 92
},
{
"epoch": 0.13722627737226278,
"grad_norm": 0.796875,
"learning_rate": 5.997015771130197e-05,
"loss": 1.2821106910705566,
"step": 94
},
{
"epoch": 0.14014598540145987,
"grad_norm": 1.46875,
"learning_rate": 5.996618377624509e-05,
"loss": 1.6019928455352783,
"step": 96
},
{
"epoch": 0.14306569343065692,
"grad_norm": 1.171875,
"learning_rate": 5.996196170109409e-05,
"loss": 1.3877289295196533,
"step": 98
},
{
"epoch": 0.145985401459854,
"grad_norm": 0.6171875,
"learning_rate": 5.995749152781236e-05,
"loss": 1.3422930240631104,
"step": 100
},
{
"epoch": 0.14890510948905109,
"grad_norm": 1.5078125,
"learning_rate": 5.9952773300829165e-05,
"loss": 1.4275691509246826,
"step": 102
},
{
"epoch": 0.15182481751824817,
"grad_norm": 0.91796875,
"learning_rate": 5.9947807067039196e-05,
"loss": 1.3908946514129639,
"step": 104
},
{
"epoch": 0.15474452554744525,
"grad_norm": 3.265625,
"learning_rate": 5.9942592875802085e-05,
"loss": 1.590527057647705,
"step": 106
},
{
"epoch": 0.15766423357664233,
"grad_norm": 2.984375,
"learning_rate": 5.993713077894193e-05,
"loss": 1.6489572525024414,
"step": 108
},
{
"epoch": 0.16058394160583941,
"grad_norm": 2.015625,
"learning_rate": 5.993142083074676e-05,
"loss": 1.2806832790374756,
"step": 110
},
{
"epoch": 0.1635036496350365,
"grad_norm": 0.8046875,
"learning_rate": 5.992546308796806e-05,
"loss": 1.4471540451049805,
"step": 112
},
{
"epoch": 0.16642335766423358,
"grad_norm": 1.5859375,
"learning_rate": 5.991925760982008e-05,
"loss": 1.28468918800354,
"step": 114
},
{
"epoch": 0.16934306569343066,
"grad_norm": 0.84375,
"learning_rate": 5.9912804457979376e-05,
"loss": 1.4314072132110596,
"step": 116
},
{
"epoch": 0.17226277372262774,
"grad_norm": 2.390625,
"learning_rate": 5.990610369658414e-05,
"loss": 1.3306201696395874,
"step": 118
},
{
"epoch": 0.17518248175182483,
"grad_norm": 0.9296875,
"learning_rate": 5.989915539223357e-05,
"loss": 1.5853054523468018,
"step": 120
},
{
"epoch": 0.1781021897810219,
"grad_norm": 0.7890625,
"learning_rate": 5.989195961398716e-05,
"loss": 1.652395486831665,
"step": 122
},
{
"epoch": 0.181021897810219,
"grad_norm": 0.84765625,
"learning_rate": 5.988451643336413e-05,
"loss": 1.625920295715332,
"step": 124
},
{
"epoch": 0.18394160583941604,
"grad_norm": 1.21875,
"learning_rate": 5.9876825924342584e-05,
"loss": 1.5102442502975464,
"step": 126
},
{
"epoch": 0.18686131386861313,
"grad_norm": 0.734375,
"learning_rate": 5.9868888163358875e-05,
"loss": 1.6674467325210571,
"step": 128
},
{
"epoch": 0.1897810218978102,
"grad_norm": 0.6484375,
"learning_rate": 5.986070322930676e-05,
"loss": 1.6165754795074463,
"step": 130
},
{
"epoch": 0.1927007299270073,
"grad_norm": 2.65625,
"learning_rate": 5.985227120353674e-05,
"loss": 1.3306710720062256,
"step": 132
},
{
"epoch": 0.19562043795620437,
"grad_norm": 1.6328125,
"learning_rate": 5.9843592169855085e-05,
"loss": 1.5615007877349854,
"step": 134
},
{
"epoch": 0.19854014598540146,
"grad_norm": 0.8125,
"learning_rate": 5.983466621452313e-05,
"loss": 1.549973964691162,
"step": 136
},
{
"epoch": 0.20145985401459854,
"grad_norm": 0.365234375,
"learning_rate": 5.982549342625639e-05,
"loss": 1.3460943698883057,
"step": 138
},
{
"epoch": 0.20437956204379562,
"grad_norm": 1.7265625,
"learning_rate": 5.981607389622362e-05,
"loss": 1.4986615180969238,
"step": 140
},
{
"epoch": 0.2072992700729927,
"grad_norm": 0.83984375,
"learning_rate": 5.980640771804599e-05,
"loss": 1.6788625717163086,
"step": 142
},
{
"epoch": 0.21021897810218979,
"grad_norm": 0.82421875,
"learning_rate": 5.979649498779611e-05,
"loss": 1.4583231210708618,
"step": 144
},
{
"epoch": 0.21313868613138687,
"grad_norm": 1.875,
"learning_rate": 5.978633580399709e-05,
"loss": 1.6779685020446777,
"step": 146
},
{
"epoch": 0.21605839416058395,
"grad_norm": 0.7734375,
"learning_rate": 5.977593026762152e-05,
"loss": 1.3658714294433594,
"step": 148
},
{
"epoch": 0.21897810218978103,
"grad_norm": 0.68359375,
"learning_rate": 5.976527848209053e-05,
"loss": 1.4529236555099487,
"step": 150
},
{
"epoch": 0.22189781021897811,
"grad_norm": 0.84765625,
"learning_rate": 5.975438055327275e-05,
"loss": 0.9998324513435364,
"step": 152
},
{
"epoch": 0.22481751824817517,
"grad_norm": 0.6328125,
"learning_rate": 5.974323658948319e-05,
"loss": 1.1067932844161987,
"step": 154
},
{
"epoch": 0.22773722627737225,
"grad_norm": 3.34375,
"learning_rate": 5.973184670148224e-05,
"loss": 0.49964094161987305,
"step": 156
},
{
"epoch": 0.23065693430656933,
"grad_norm": 0.796875,
"learning_rate": 5.972021100247455e-05,
"loss": 1.6203181743621826,
"step": 158
},
{
"epoch": 0.23357664233576642,
"grad_norm": 0.73828125,
"learning_rate": 5.9708329608107895e-05,
"loss": 1.2763311862945557,
"step": 160
},
{
"epoch": 0.2364963503649635,
"grad_norm": 0.94140625,
"learning_rate": 5.9696202636472e-05,
"loss": 1.600427508354187,
"step": 162
},
{
"epoch": 0.23941605839416058,
"grad_norm": 0.8671875,
"learning_rate": 5.968383020809743e-05,
"loss": 1.2396719455718994,
"step": 164
},
{
"epoch": 0.24233576642335766,
"grad_norm": 2.265625,
"learning_rate": 5.9671212445954354e-05,
"loss": 1.539351224899292,
"step": 166
},
{
"epoch": 0.24525547445255474,
"grad_norm": 0.45703125,
"learning_rate": 5.965834947545128e-05,
"loss": 1.2752540111541748,
"step": 168
},
{
"epoch": 0.24817518248175183,
"grad_norm": 1.875,
"learning_rate": 5.964524142443392e-05,
"loss": 1.5187466144561768,
"step": 170
},
{
"epoch": 0.2510948905109489,
"grad_norm": 0.75,
"learning_rate": 5.96318884231838e-05,
"loss": 1.3301482200622559,
"step": 172
},
{
"epoch": 0.25401459854014596,
"grad_norm": 1.0390625,
"learning_rate": 5.961829060441704e-05,
"loss": 1.5802184343338013,
"step": 174
},
{
"epoch": 0.2569343065693431,
"grad_norm": 0.64453125,
"learning_rate": 5.960444810328303e-05,
"loss": 1.2438929080963135,
"step": 176
},
{
"epoch": 0.25985401459854013,
"grad_norm": 0.76953125,
"learning_rate": 5.959036105736303e-05,
"loss": 1.2162034511566162,
"step": 178
},
{
"epoch": 0.26277372262773724,
"grad_norm": 1.5,
"learning_rate": 5.9576029606668856e-05,
"loss": 1.469679832458496,
"step": 180
},
{
"epoch": 0.2656934306569343,
"grad_norm": 1.0546875,
"learning_rate": 5.9561453893641505e-05,
"loss": 1.5327503681182861,
"step": 182
},
{
"epoch": 0.2686131386861314,
"grad_norm": 0.5625,
"learning_rate": 5.954663406314968e-05,
"loss": 1.210360050201416,
"step": 184
},
{
"epoch": 0.27153284671532846,
"grad_norm": 0.77734375,
"learning_rate": 5.953157026248836e-05,
"loss": 1.4078571796417236,
"step": 186
},
{
"epoch": 0.27445255474452557,
"grad_norm": 0.9296875,
"learning_rate": 5.951626264137741e-05,
"loss": 1.2318487167358398,
"step": 188
},
{
"epoch": 0.2773722627737226,
"grad_norm": 0.6640625,
"learning_rate": 5.9500711351960006e-05,
"loss": 1.3978914022445679,
"step": 190
},
{
"epoch": 0.28029197080291973,
"grad_norm": 2.703125,
"learning_rate": 5.9484916548801124e-05,
"loss": 1.5884613990783691,
"step": 192
},
{
"epoch": 0.2832116788321168,
"grad_norm": 0.765625,
"learning_rate": 5.946887838888612e-05,
"loss": 1.6306736469268799,
"step": 194
},
{
"epoch": 0.28613138686131384,
"grad_norm": 0.88671875,
"learning_rate": 5.945259703161898e-05,
"loss": 1.4341470003128052,
"step": 196
},
{
"epoch": 0.28905109489051095,
"grad_norm": 0.8828125,
"learning_rate": 5.943607263882093e-05,
"loss": 1.4493606090545654,
"step": 198
},
{
"epoch": 0.291970802919708,
"grad_norm": 1.171875,
"learning_rate": 5.9419305374728686e-05,
"loss": 1.7190909385681152,
"step": 200
},
{
"epoch": 0.2948905109489051,
"grad_norm": 2.1875,
"learning_rate": 5.940229540599288e-05,
"loss": 1.4846527576446533,
"step": 202
},
{
"epoch": 0.29781021897810217,
"grad_norm": 0.9453125,
"learning_rate": 5.938504290167642e-05,
"loss": 1.6837434768676758,
"step": 204
},
{
"epoch": 0.3007299270072993,
"grad_norm": 0.73046875,
"learning_rate": 5.936754803325276e-05,
"loss": 1.4253846406936646,
"step": 206
},
{
"epoch": 0.30364963503649633,
"grad_norm": 0.84765625,
"learning_rate": 5.934981097460425e-05,
"loss": 1.7993555068969727,
"step": 208
},
{
"epoch": 0.30656934306569344,
"grad_norm": 0.71484375,
"learning_rate": 5.9331831902020325e-05,
"loss": 1.6041544675827026,
"step": 210
},
{
"epoch": 0.3094890510948905,
"grad_norm": 0.5546875,
"learning_rate": 5.9313610994195884e-05,
"loss": 1.1635921001434326,
"step": 212
},
{
"epoch": 0.3124087591240876,
"grad_norm": 0.74609375,
"learning_rate": 5.929514843222939e-05,
"loss": 1.0920753479003906,
"step": 214
},
{
"epoch": 0.31532846715328466,
"grad_norm": 1.9921875,
"learning_rate": 5.9276444399621147e-05,
"loss": 1.2639129161834717,
"step": 216
},
{
"epoch": 0.3182481751824818,
"grad_norm": 3.671875,
"learning_rate": 5.9257499082271386e-05,
"loss": 1.0223140716552734,
"step": 218
},
{
"epoch": 0.32116788321167883,
"grad_norm": 1.1171875,
"learning_rate": 5.9238312668478556e-05,
"loss": 1.6704933643341064,
"step": 220
},
{
"epoch": 0.32408759124087594,
"grad_norm": 1.7578125,
"learning_rate": 5.921888534893733e-05,
"loss": 1.6343793869018555,
"step": 222
},
{
"epoch": 0.327007299270073,
"grad_norm": 1.8984375,
"learning_rate": 5.919921731673677e-05,
"loss": 1.209860920906067,
"step": 224
},
{
"epoch": 0.32992700729927005,
"grad_norm": 1.03125,
"learning_rate": 5.917930876735839e-05,
"loss": 1.49936044216156,
"step": 226
},
{
"epoch": 0.33284671532846716,
"grad_norm": 10.4375,
"learning_rate": 5.9159159898674204e-05,
"loss": 1.279283046722412,
"step": 228
},
{
"epoch": 0.3357664233576642,
"grad_norm": 1.4609375,
"learning_rate": 5.913877091094479e-05,
"loss": 1.3200433254241943,
"step": 230
},
{
"epoch": 0.3386861313868613,
"grad_norm": 3.328125,
"learning_rate": 5.911814200681727e-05,
"loss": 0.9882813692092896,
"step": 232
},
{
"epoch": 0.3416058394160584,
"grad_norm": 1.09375,
"learning_rate": 5.909727339132331e-05,
"loss": 1.4972972869873047,
"step": 234
},
{
"epoch": 0.3445255474452555,
"grad_norm": 2.234375,
"learning_rate": 5.907616527187707e-05,
"loss": 1.658118724822998,
"step": 236
},
{
"epoch": 0.34744525547445254,
"grad_norm": 0.86328125,
"learning_rate": 5.905481785827316e-05,
"loss": 1.481203556060791,
"step": 238
},
{
"epoch": 0.35036496350364965,
"grad_norm": 1.765625,
"learning_rate": 5.9033231362684553e-05,
"loss": 1.8912521600723267,
"step": 240
},
{
"epoch": 0.3532846715328467,
"grad_norm": 1.1015625,
"learning_rate": 5.901140599966045e-05,
"loss": 1.414005994796753,
"step": 242
},
{
"epoch": 0.3562043795620438,
"grad_norm": 1.1328125,
"learning_rate": 5.8989341986124194e-05,
"loss": 1.3400169610977173,
"step": 244
},
{
"epoch": 0.35912408759124087,
"grad_norm": 0.78125,
"learning_rate": 5.896703954137103e-05,
"loss": 1.231393814086914,
"step": 246
},
{
"epoch": 0.362043795620438,
"grad_norm": 1.2578125,
"learning_rate": 5.894449888706608e-05,
"loss": 1.6056867837905884,
"step": 248
},
{
"epoch": 0.36496350364963503,
"grad_norm": 1.3359375,
"learning_rate": 5.892172024724195e-05,
"loss": 1.2110869884490967,
"step": 250
},
{
"epoch": 0.3678832116788321,
"grad_norm": 3.515625,
"learning_rate": 5.889870384829663e-05,
"loss": 1.5161144733428955,
"step": 252
},
{
"epoch": 0.3708029197080292,
"grad_norm": 2.015625,
"learning_rate": 5.8875449918991225e-05,
"loss": 1.719308853149414,
"step": 254
},
{
"epoch": 0.37372262773722625,
"grad_norm": 0.89453125,
"learning_rate": 5.885195869044763e-05,
"loss": 1.349236249923706,
"step": 256
},
{
"epoch": 0.37664233576642336,
"grad_norm": 1.046875,
"learning_rate": 5.8828230396146294e-05,
"loss": 1.4929360151290894,
"step": 258
},
{
"epoch": 0.3795620437956204,
"grad_norm": 1.765625,
"learning_rate": 5.8804265271923895e-05,
"loss": 1.4849441051483154,
"step": 260
},
{
"epoch": 0.38248175182481753,
"grad_norm": 1.15625,
"learning_rate": 5.87800635559709e-05,
"loss": 1.3683178424835205,
"step": 262
},
{
"epoch": 0.3854014598540146,
"grad_norm": 0.8046875,
"learning_rate": 5.875562548882938e-05,
"loss": 1.3490722179412842,
"step": 264
},
{
"epoch": 0.3883211678832117,
"grad_norm": 0.78125,
"learning_rate": 5.87309513133904e-05,
"loss": 1.5665984153747559,
"step": 266
},
{
"epoch": 0.39124087591240875,
"grad_norm": 0.6484375,
"learning_rate": 5.870604127489179e-05,
"loss": 1.312342643737793,
"step": 268
},
{
"epoch": 0.39416058394160586,
"grad_norm": 2.46875,
"learning_rate": 5.868089562091562e-05,
"loss": 1.5042738914489746,
"step": 270
},
{
"epoch": 0.3970802919708029,
"grad_norm": 0.74609375,
"learning_rate": 5.865551460138574e-05,
"loss": 1.2545225620269775,
"step": 272
},
{
"epoch": 0.4,
"grad_norm": 1.6484375,
"learning_rate": 5.862989846856532e-05,
"loss": 1.7891290187835693,
"step": 274
},
{
"epoch": 0.4029197080291971,
"grad_norm": 0.7890625,
"learning_rate": 5.860404747705432e-05,
"loss": 1.427787184715271,
"step": 276
},
{
"epoch": 0.4058394160583942,
"grad_norm": 1.1015625,
"learning_rate": 5.857796188378699e-05,
"loss": 1.2621533870697021,
"step": 278
},
{
"epoch": 0.40875912408759124,
"grad_norm": 0.5390625,
"learning_rate": 5.855164194802929e-05,
"loss": 1.6562426090240479,
"step": 280
},
{
"epoch": 0.4116788321167883,
"grad_norm": 0.87890625,
"learning_rate": 5.85250879313763e-05,
"loss": 1.4017126560211182,
"step": 282
},
{
"epoch": 0.4145985401459854,
"grad_norm": 1.0703125,
"learning_rate": 5.8498300097749684e-05,
"loss": 1.4500172138214111,
"step": 284
},
{
"epoch": 0.41751824817518246,
"grad_norm": 0.609375,
"learning_rate": 5.847127871339499e-05,
"loss": 1.394424319267273,
"step": 286
},
{
"epoch": 0.42043795620437957,
"grad_norm": 0.88671875,
"learning_rate": 5.844402404687906e-05,
"loss": 1.4016766548156738,
"step": 288
},
{
"epoch": 0.4233576642335766,
"grad_norm": 0.80078125,
"learning_rate": 5.8416536369087306e-05,
"loss": 1.4250332117080688,
"step": 290
},
{
"epoch": 0.42627737226277373,
"grad_norm": 0.828125,
"learning_rate": 5.8388815953221104e-05,
"loss": 1.2282607555389404,
"step": 292
},
{
"epoch": 0.4291970802919708,
"grad_norm": 0.9296875,
"learning_rate": 5.836086307479496e-05,
"loss": 1.278720736503601,
"step": 294
},
{
"epoch": 0.4321167883211679,
"grad_norm": 1.3671875,
"learning_rate": 5.833267801163389e-05,
"loss": 1.6063511371612549,
"step": 296
},
{
"epoch": 0.43503649635036495,
"grad_norm": 0.66796875,
"learning_rate": 5.8304261043870564e-05,
"loss": 1.606541633605957,
"step": 298
},
{
"epoch": 0.43795620437956206,
"grad_norm": 1.8359375,
"learning_rate": 5.8275612453942596e-05,
"loss": 1.1832222938537598,
"step": 300
},
{
"epoch": 0.4408759124087591,
"grad_norm": 1.5703125,
"learning_rate": 5.8246732526589704e-05,
"loss": 1.3833293914794922,
"step": 302
},
{
"epoch": 0.44379562043795623,
"grad_norm": 0.83203125,
"learning_rate": 5.821762154885085e-05,
"loss": 1.4438910484313965,
"step": 304
},
{
"epoch": 0.4467153284671533,
"grad_norm": 1.09375,
"learning_rate": 5.818827981006144e-05,
"loss": 1.263401985168457,
"step": 306
},
{
"epoch": 0.44963503649635034,
"grad_norm": 3.5625,
"learning_rate": 5.815870760185041e-05,
"loss": 1.0707682371139526,
"step": 308
},
{
"epoch": 0.45255474452554745,
"grad_norm": 2.28125,
"learning_rate": 5.812890521813736e-05,
"loss": 1.8427000045776367,
"step": 310
},
{
"epoch": 0.4554744525547445,
"grad_norm": 0.98046875,
"learning_rate": 5.8098872955129595e-05,
"loss": 1.423462152481079,
"step": 312
},
{
"epoch": 0.4583941605839416,
"grad_norm": 0.69921875,
"learning_rate": 5.806861111131923e-05,
"loss": 1.3223985433578491,
"step": 314
},
{
"epoch": 0.46131386861313867,
"grad_norm": 0.76953125,
"learning_rate": 5.803811998748017e-05,
"loss": 1.369985818862915,
"step": 316
},
{
"epoch": 0.4642335766423358,
"grad_norm": 0.91015625,
"learning_rate": 5.8007399886665145e-05,
"loss": 1.402444839477539,
"step": 318
},
{
"epoch": 0.46715328467153283,
"grad_norm": 0.76171875,
"learning_rate": 5.797645111420271e-05,
"loss": 1.665392518043518,
"step": 320
},
{
"epoch": 0.47007299270072994,
"grad_norm": 0.84375,
"learning_rate": 5.7945273977694205e-05,
"loss": 1.399198055267334,
"step": 322
},
{
"epoch": 0.472992700729927,
"grad_norm": 0.72265625,
"learning_rate": 5.791386878701066e-05,
"loss": 1.3573551177978516,
"step": 324
},
{
"epoch": 0.4759124087591241,
"grad_norm": 1.2265625,
"learning_rate": 5.7882235854289785e-05,
"loss": 1.3456391096115112,
"step": 326
},
{
"epoch": 0.47883211678832116,
"grad_norm": 1.078125,
"learning_rate": 5.78503754939328e-05,
"loss": 1.5439651012420654,
"step": 328
},
{
"epoch": 0.48175182481751827,
"grad_norm": 0.9453125,
"learning_rate": 5.7818288022601355e-05,
"loss": 1.1991386413574219,
"step": 330
},
{
"epoch": 0.4846715328467153,
"grad_norm": 1.0,
"learning_rate": 5.7785973759214365e-05,
"loss": 1.4791953563690186,
"step": 332
},
{
"epoch": 0.48759124087591244,
"grad_norm": 1.4609375,
"learning_rate": 5.7753433024944834e-05,
"loss": 1.6375491619110107,
"step": 334
},
{
"epoch": 0.4905109489051095,
"grad_norm": 0.419921875,
"learning_rate": 5.772066614321667e-05,
"loss": 1.0832901000976562,
"step": 336
},
{
"epoch": 0.49343065693430654,
"grad_norm": 2.546875,
"learning_rate": 5.7687673439701484e-05,
"loss": 1.662909746170044,
"step": 338
},
{
"epoch": 0.49635036496350365,
"grad_norm": 1.25,
"learning_rate": 5.7654455242315315e-05,
"loss": 1.498556137084961,
"step": 340
},
{
"epoch": 0.4992700729927007,
"grad_norm": 2.53125,
"learning_rate": 5.7621011881215456e-05,
"loss": 1.5630645751953125,
"step": 342
},
{
"epoch": 0.5021897810218978,
"grad_norm": 1.21875,
"learning_rate": 5.758734368879703e-05,
"loss": 1.4207791090011597,
"step": 344
},
{
"epoch": 0.5051094890510949,
"grad_norm": 0.80859375,
"learning_rate": 5.755345099968983e-05,
"loss": 1.4251861572265625,
"step": 346
},
{
"epoch": 0.5080291970802919,
"grad_norm": 0.6015625,
"learning_rate": 5.751933415075492e-05,
"loss": 1.4990272521972656,
"step": 348
},
{
"epoch": 0.5109489051094891,
"grad_norm": 0.78125,
"learning_rate": 5.74849934810813e-05,
"loss": 1.2336238622665405,
"step": 350
},
{
"epoch": 0.5138686131386861,
"grad_norm": 1.0859375,
"learning_rate": 5.7450429331982476e-05,
"loss": 1.613104224205017,
"step": 352
},
{
"epoch": 0.5167883211678832,
"grad_norm": 0.69140625,
"learning_rate": 5.741564204699324e-05,
"loss": 1.3290159702301025,
"step": 354
},
{
"epoch": 0.5197080291970803,
"grad_norm": 0.6796875,
"learning_rate": 5.738063197186605e-05,
"loss": 1.3364830017089844,
"step": 356
},
{
"epoch": 0.5226277372262774,
"grad_norm": 1.109375,
"learning_rate": 5.734539945456771e-05,
"loss": 1.415923833847046,
"step": 358
},
{
"epoch": 0.5255474452554745,
"grad_norm": 0.65625,
"learning_rate": 5.730994484527591e-05,
"loss": 1.5084344148635864,
"step": 360
},
{
"epoch": 0.5284671532846715,
"grad_norm": 1.3125,
"learning_rate": 5.7274268496375714e-05,
"loss": 1.54220449924469,
"step": 362
},
{
"epoch": 0.5313868613138686,
"grad_norm": 0.8203125,
"learning_rate": 5.723837076245603e-05,
"loss": 1.1953389644622803,
"step": 364
},
{
"epoch": 0.5343065693430656,
"grad_norm": 1.7578125,
"learning_rate": 5.7202252000306173e-05,
"loss": 1.7339110374450684,
"step": 366
},
{
"epoch": 0.5372262773722628,
"grad_norm": 0.77734375,
"learning_rate": 5.716591256891225e-05,
"loss": 1.3485782146453857,
"step": 368
},
{
"epoch": 0.5401459854014599,
"grad_norm": 0.7109375,
"learning_rate": 5.712935282945359e-05,
"loss": 1.2649073600769043,
"step": 370
},
{
"epoch": 0.5430656934306569,
"grad_norm": 1.7421875,
"learning_rate": 5.709257314529922e-05,
"loss": 1.0904498100280762,
"step": 372
},
{
"epoch": 0.545985401459854,
"grad_norm": 0.8046875,
"learning_rate": 5.705557388200414e-05,
"loss": 1.5211831331253052,
"step": 374
},
{
"epoch": 0.5489051094890511,
"grad_norm": 2.5,
"learning_rate": 5.7018355407305835e-05,
"loss": 1.6481215953826904,
"step": 376
},
{
"epoch": 0.5518248175182482,
"grad_norm": 3.21875,
"learning_rate": 5.6980918091120475e-05,
"loss": 1.6256749629974365,
"step": 378
},
{
"epoch": 0.5547445255474452,
"grad_norm": 0.859375,
"learning_rate": 5.694326230553934e-05,
"loss": 1.287406086921692,
"step": 380
},
{
"epoch": 0.5576642335766423,
"grad_norm": 0.76171875,
"learning_rate": 5.690538842482509e-05,
"loss": 1.3722602128982544,
"step": 382
},
{
"epoch": 0.5605839416058395,
"grad_norm": 0.69921875,
"learning_rate": 5.686729682540806e-05,
"loss": 1.3319132328033447,
"step": 384
},
{
"epoch": 0.5635036496350365,
"grad_norm": 0.61328125,
"learning_rate": 5.6828987885882424e-05,
"loss": 1.3779847621917725,
"step": 386
},
{
"epoch": 0.5664233576642336,
"grad_norm": 0.89453125,
"learning_rate": 5.679046198700261e-05,
"loss": 1.652022361755371,
"step": 388
},
{
"epoch": 0.5693430656934306,
"grad_norm": 0.57421875,
"learning_rate": 5.675171951167935e-05,
"loss": 1.425034999847412,
"step": 390
},
{
"epoch": 0.5722627737226277,
"grad_norm": 0.64453125,
"learning_rate": 5.6712760844975945e-05,
"loss": 1.275322437286377,
"step": 392
},
{
"epoch": 0.5751824817518248,
"grad_norm": 1.15625,
"learning_rate": 5.6673586374104475e-05,
"loss": 1.4515531063079834,
"step": 394
},
{
"epoch": 0.5781021897810219,
"grad_norm": 1.703125,
"learning_rate": 5.663419648842187e-05,
"loss": 1.622361421585083,
"step": 396
},
{
"epoch": 0.581021897810219,
"grad_norm": 0.7421875,
"learning_rate": 5.6594591579426065e-05,
"loss": 1.6139750480651855,
"step": 398
},
{
"epoch": 0.583941605839416,
"grad_norm": 1.015625,
"learning_rate": 5.655477204075218e-05,
"loss": 1.1461379528045654,
"step": 400
},
{
"epoch": 0.5868613138686132,
"grad_norm": 0.69140625,
"learning_rate": 5.651473826816849e-05,
"loss": 1.5190107822418213,
"step": 402
},
{
"epoch": 0.5897810218978102,
"grad_norm": 1.0859375,
"learning_rate": 5.6474490659572564e-05,
"loss": 1.3880577087402344,
"step": 404
},
{
"epoch": 0.5927007299270073,
"grad_norm": 0.5703125,
"learning_rate": 5.643402961498734e-05,
"loss": 1.2586677074432373,
"step": 406
},
{
"epoch": 0.5956204379562043,
"grad_norm": 1.2421875,
"learning_rate": 5.6393355536557044e-05,
"loss": 1.5116124153137207,
"step": 408
},
{
"epoch": 0.5985401459854015,
"grad_norm": 0.8515625,
"learning_rate": 5.635246882854331e-05,
"loss": 1.4478124380111694,
"step": 410
},
{
"epoch": 0.6014598540145986,
"grad_norm": 0.68359375,
"learning_rate": 5.631136989732107e-05,
"loss": 1.2513399124145508,
"step": 412
},
{
"epoch": 0.6043795620437956,
"grad_norm": 1.109375,
"learning_rate": 5.6270059151374555e-05,
"loss": 1.342003345489502,
"step": 414
},
{
"epoch": 0.6072992700729927,
"grad_norm": 0.6015625,
"learning_rate": 5.622853700129328e-05,
"loss": 1.3598436117172241,
"step": 416
},
{
"epoch": 0.6102189781021898,
"grad_norm": 5.09375,
"learning_rate": 5.6186803859767854e-05,
"loss": 1.286314845085144,
"step": 418
},
{
"epoch": 0.6131386861313869,
"grad_norm": 2.328125,
"learning_rate": 5.614486014158596e-05,
"loss": 1.7422081232070923,
"step": 420
},
{
"epoch": 0.6160583941605839,
"grad_norm": 1.015625,
"learning_rate": 5.610270626362824e-05,
"loss": 1.3796322345733643,
"step": 422
},
{
"epoch": 0.618978102189781,
"grad_norm": 1.125,
"learning_rate": 5.6060342644864076e-05,
"loss": 1.5898281335830688,
"step": 424
},
{
"epoch": 0.621897810218978,
"grad_norm": 0.703125,
"learning_rate": 5.6017769706347517e-05,
"loss": 1.2560129165649414,
"step": 426
},
{
"epoch": 0.6248175182481752,
"grad_norm": 0.89453125,
"learning_rate": 5.597498787121305e-05,
"loss": 1.4159992933273315,
"step": 428
},
{
"epoch": 0.6277372262773723,
"grad_norm": 0.9296875,
"learning_rate": 5.593199756467134e-05,
"loss": 1.3794710636138916,
"step": 430
},
{
"epoch": 0.6306569343065693,
"grad_norm": 2.265625,
"learning_rate": 5.588879921400514e-05,
"loss": 1.5122029781341553,
"step": 432
},
{
"epoch": 0.6335766423357664,
"grad_norm": 0.87890625,
"learning_rate": 5.584539324856488e-05,
"loss": 1.33951997756958,
"step": 434
},
{
"epoch": 0.6364963503649635,
"grad_norm": 0.62890625,
"learning_rate": 5.580178009976456e-05,
"loss": 1.353576421737671,
"step": 436
},
{
"epoch": 0.6394160583941606,
"grad_norm": 2.5,
"learning_rate": 5.575796020107735e-05,
"loss": 1.1946585178375244,
"step": 438
},
{
"epoch": 0.6423357664233577,
"grad_norm": 0.77734375,
"learning_rate": 5.571393398803129e-05,
"loss": 1.3554836511611938,
"step": 440
},
{
"epoch": 0.6452554744525547,
"grad_norm": 0.8671875,
"learning_rate": 5.566970189820504e-05,
"loss": 1.3375674486160278,
"step": 442
},
{
"epoch": 0.6481751824817519,
"grad_norm": 0.55859375,
"learning_rate": 5.562526437122347e-05,
"loss": 1.5423380136489868,
"step": 444
},
{
"epoch": 0.6510948905109489,
"grad_norm": 0.7109375,
"learning_rate": 5.558062184875324e-05,
"loss": 1.326158046722412,
"step": 446
},
{
"epoch": 0.654014598540146,
"grad_norm": 2.390625,
"learning_rate": 5.553577477449855e-05,
"loss": 1.5753459930419922,
"step": 448
},
{
"epoch": 0.656934306569343,
"grad_norm": 0.62109375,
"learning_rate": 5.5490723594196604e-05,
"loss": 1.6284210681915283,
"step": 450
},
{
"epoch": 0.6598540145985401,
"grad_norm": 0.63671875,
"learning_rate": 5.544546875561323e-05,
"loss": 1.5892188549041748,
"step": 452
},
{
"epoch": 0.6627737226277373,
"grad_norm": 0.373046875,
"learning_rate": 5.5400010708538445e-05,
"loss": 1.3458421230316162,
"step": 454
},
{
"epoch": 0.6656934306569343,
"grad_norm": 0.66796875,
"learning_rate": 5.535434990478194e-05,
"loss": 1.395503044128418,
"step": 456
},
{
"epoch": 0.6686131386861314,
"grad_norm": 3.078125,
"learning_rate": 5.530848679816864e-05,
"loss": 1.215213656425476,
"step": 458
},
{
"epoch": 0.6715328467153284,
"grad_norm": 0.609375,
"learning_rate": 5.526242184453416e-05,
"loss": 1.3465626239776611,
"step": 460
},
{
"epoch": 0.6744525547445256,
"grad_norm": 0.6171875,
"learning_rate": 5.521615550172024e-05,
"loss": 1.3042705059051514,
"step": 462
},
{
"epoch": 0.6773722627737226,
"grad_norm": 0.7890625,
"learning_rate": 5.516968822957031e-05,
"loss": 1.0415093898773193,
"step": 464
},
{
"epoch": 0.6802919708029197,
"grad_norm": 1.0,
"learning_rate": 5.5123020489924794e-05,
"loss": 1.292935848236084,
"step": 466
},
{
"epoch": 0.6832116788321168,
"grad_norm": 0.60546875,
"learning_rate": 5.507615274661659e-05,
"loss": 1.3761882781982422,
"step": 468
},
{
"epoch": 0.6861313868613139,
"grad_norm": 3.3125,
"learning_rate": 5.502908546546645e-05,
"loss": 1.4313558340072632,
"step": 470
},
{
"epoch": 0.689051094890511,
"grad_norm": 0.9765625,
"learning_rate": 5.498181911427831e-05,
"loss": 1.359558343887329,
"step": 472
},
{
"epoch": 0.691970802919708,
"grad_norm": 0.9921875,
"learning_rate": 5.493435416283471e-05,
"loss": 1.3049167394638062,
"step": 474
},
{
"epoch": 0.6948905109489051,
"grad_norm": 0.79296875,
"learning_rate": 5.488669108289207e-05,
"loss": 1.2260148525238037,
"step": 476
},
{
"epoch": 0.6978102189781021,
"grad_norm": 0.7890625,
"learning_rate": 5.4838830348176e-05,
"loss": 1.3627452850341797,
"step": 478
},
{
"epoch": 0.7007299270072993,
"grad_norm": 4.21875,
"learning_rate": 5.479077243437665e-05,
"loss": 1.3585658073425293,
"step": 480
},
{
"epoch": 0.7036496350364964,
"grad_norm": 1.1484375,
"learning_rate": 5.474251781914393e-05,
"loss": 1.4558300971984863,
"step": 482
},
{
"epoch": 0.7065693430656934,
"grad_norm": 1.0234375,
"learning_rate": 5.469406698208276e-05,
"loss": 1.4577538967132568,
"step": 484
},
{
"epoch": 0.7094890510948905,
"grad_norm": 0.6484375,
"learning_rate": 5.4645420404748316e-05,
"loss": 1.3028912544250488,
"step": 486
},
{
"epoch": 0.7124087591240876,
"grad_norm": 0.515625,
"learning_rate": 5.459657857064128e-05,
"loss": 1.2662179470062256,
"step": 488
},
{
"epoch": 0.7153284671532847,
"grad_norm": 0.578125,
"learning_rate": 5.454754196520294e-05,
"loss": 1.5223736763000488,
"step": 490
},
{
"epoch": 0.7182481751824817,
"grad_norm": 0.91015625,
"learning_rate": 5.449831107581049e-05,
"loss": 1.2283718585968018,
"step": 492
},
{
"epoch": 0.7211678832116788,
"grad_norm": 0.64453125,
"learning_rate": 5.444888639177207e-05,
"loss": 1.3435661792755127,
"step": 494
},
{
"epoch": 0.724087591240876,
"grad_norm": 1.5625,
"learning_rate": 5.439926840432201e-05,
"loss": 1.3870526552200317,
"step": 496
},
{
"epoch": 0.727007299270073,
"grad_norm": 0.640625,
"learning_rate": 5.434945760661583e-05,
"loss": 1.2267348766326904,
"step": 498
},
{
"epoch": 0.7299270072992701,
"grad_norm": 0.7265625,
"learning_rate": 5.429945449372547e-05,
"loss": 1.3859362602233887,
"step": 500
},
{
"epoch": 0.7328467153284671,
"grad_norm": 0.65234375,
"learning_rate": 5.42492595626342e-05,
"loss": 1.4018727540969849,
"step": 502
},
{
"epoch": 0.7357664233576642,
"grad_norm": 0.66796875,
"learning_rate": 5.4198873312231875e-05,
"loss": 1.6720361709594727,
"step": 504
},
{
"epoch": 0.7386861313868613,
"grad_norm": 0.5,
"learning_rate": 5.414829624330986e-05,
"loss": 1.219796895980835,
"step": 506
},
{
"epoch": 0.7416058394160584,
"grad_norm": 0.5546875,
"learning_rate": 5.4097528858556036e-05,
"loss": 1.0394165515899658,
"step": 508
},
{
"epoch": 0.7445255474452555,
"grad_norm": 0.625,
"learning_rate": 5.404657166254986e-05,
"loss": 0.7834327220916748,
"step": 510
},
{
"epoch": 0.7474452554744525,
"grad_norm": 0.77734375,
"learning_rate": 5.399542516175736e-05,
"loss": 1.3642323017120361,
"step": 512
},
{
"epoch": 0.7503649635036497,
"grad_norm": 0.6015625,
"learning_rate": 5.394408986452606e-05,
"loss": 1.352614164352417,
"step": 514
},
{
"epoch": 0.7532846715328467,
"grad_norm": 0.57421875,
"learning_rate": 5.3892566281079915e-05,
"loss": 1.3700366020202637,
"step": 516
},
{
"epoch": 0.7562043795620438,
"grad_norm": 1.375,
"learning_rate": 5.384085492351428e-05,
"loss": 1.157553791999817,
"step": 518
},
{
"epoch": 0.7591240875912408,
"grad_norm": 6.46875,
"learning_rate": 5.378895630579083e-05,
"loss": 0.9500113725662231,
"step": 520
},
{
"epoch": 0.762043795620438,
"grad_norm": 0.6953125,
"learning_rate": 5.3736870943732385e-05,
"loss": 1.3946928977966309,
"step": 522
},
{
"epoch": 0.7649635036496351,
"grad_norm": 1.3515625,
"learning_rate": 5.3684599355017876e-05,
"loss": 1.552666425704956,
"step": 524
},
{
"epoch": 0.7678832116788321,
"grad_norm": 2.625,
"learning_rate": 5.363214205917708e-05,
"loss": 1.5947957038879395,
"step": 526
},
{
"epoch": 0.7708029197080292,
"grad_norm": 1.03125,
"learning_rate": 5.3579499577585615e-05,
"loss": 1.2542707920074463,
"step": 528
},
{
"epoch": 0.7737226277372263,
"grad_norm": 1.0703125,
"learning_rate": 5.352667243345962e-05,
"loss": 1.2413777112960815,
"step": 530
},
{
"epoch": 0.7766423357664234,
"grad_norm": 2.375,
"learning_rate": 5.3473661151850554e-05,
"loss": 1.6848254203796387,
"step": 532
},
{
"epoch": 0.7795620437956204,
"grad_norm": 2.171875,
"learning_rate": 5.342046625964013e-05,
"loss": 1.7417545318603516,
"step": 534
},
{
"epoch": 0.7824817518248175,
"grad_norm": 0.82421875,
"learning_rate": 5.33670882855349e-05,
"loss": 1.4168747663497925,
"step": 536
},
{
"epoch": 0.7854014598540145,
"grad_norm": 1.15625,
"learning_rate": 5.331352776006112e-05,
"loss": 1.4896337985992432,
"step": 538
},
{
"epoch": 0.7883211678832117,
"grad_norm": 0.671875,
"learning_rate": 5.325978521555939e-05,
"loss": 1.3501195907592773,
"step": 540
},
{
"epoch": 0.7912408759124088,
"grad_norm": 1.7890625,
"learning_rate": 5.3205861186179446e-05,
"loss": 1.5467338562011719,
"step": 542
},
{
"epoch": 0.7941605839416058,
"grad_norm": 1.2890625,
"learning_rate": 5.31517562078748e-05,
"loss": 1.7124128341674805,
"step": 544
},
{
"epoch": 0.7970802919708029,
"grad_norm": 0.62890625,
"learning_rate": 5.3097470818397395e-05,
"loss": 1.580247163772583,
"step": 546
},
{
"epoch": 0.8,
"grad_norm": 1.328125,
"learning_rate": 5.304300555729233e-05,
"loss": 1.5659313201904297,
"step": 548
},
{
"epoch": 0.8029197080291971,
"grad_norm": 1.0,
"learning_rate": 5.298836096589243e-05,
"loss": 1.5881831645965576,
"step": 550
},
{
"epoch": 0.8058394160583942,
"grad_norm": 0.70703125,
"learning_rate": 5.2933537587312905e-05,
"loss": 1.5077919960021973,
"step": 552
},
{
"epoch": 0.8087591240875912,
"grad_norm": 1.53125,
"learning_rate": 5.2878535966445946e-05,
"loss": 1.55655837059021,
"step": 554
},
{
"epoch": 0.8116788321167884,
"grad_norm": 0.61328125,
"learning_rate": 5.282335664995526e-05,
"loss": 1.2932486534118652,
"step": 556
},
{
"epoch": 0.8145985401459854,
"grad_norm": 0.71484375,
"learning_rate": 5.2768000186270745e-05,
"loss": 1.3656586408615112,
"step": 558
},
{
"epoch": 0.8175182481751825,
"grad_norm": 1.3671875,
"learning_rate": 5.2712467125582924e-05,
"loss": 1.5890371799468994,
"step": 560
},
{
"epoch": 0.8204379562043795,
"grad_norm": 0.65625,
"learning_rate": 5.2656758019837565e-05,
"loss": 1.6321032047271729,
"step": 562
},
{
"epoch": 0.8233576642335766,
"grad_norm": 0.85546875,
"learning_rate": 5.2600873422730094e-05,
"loss": 1.3365864753723145,
"step": 564
},
{
"epoch": 0.8262773722627738,
"grad_norm": 0.55078125,
"learning_rate": 5.254481388970022e-05,
"loss": 1.2258787155151367,
"step": 566
},
{
"epoch": 0.8291970802919708,
"grad_norm": 1.4609375,
"learning_rate": 5.248857997792632e-05,
"loss": 1.5853493213653564,
"step": 568
},
{
"epoch": 0.8321167883211679,
"grad_norm": 0.73828125,
"learning_rate": 5.2432172246319925e-05,
"loss": 1.3046971559524536,
"step": 570
},
{
"epoch": 0.8350364963503649,
"grad_norm": 0.74609375,
"learning_rate": 5.2375591255520176e-05,
"loss": 1.4615299701690674,
"step": 572
},
{
"epoch": 0.8379562043795621,
"grad_norm": 0.5390625,
"learning_rate": 5.231883756788825e-05,
"loss": 1.32985520362854,
"step": 574
},
{
"epoch": 0.8408759124087591,
"grad_norm": 0.765625,
"learning_rate": 5.226191174750177e-05,
"loss": 1.5649585723876953,
"step": 576
},
{
"epoch": 0.8437956204379562,
"grad_norm": 1.3125,
"learning_rate": 5.220481436014916e-05,
"loss": 1.497056007385254,
"step": 578
},
{
"epoch": 0.8467153284671532,
"grad_norm": 0.7734375,
"learning_rate": 5.2147545973324114e-05,
"loss": 1.3932712078094482,
"step": 580
},
{
"epoch": 0.8496350364963504,
"grad_norm": 0.640625,
"learning_rate": 5.209010715621985e-05,
"loss": 1.2655208110809326,
"step": 582
},
{
"epoch": 0.8525547445255475,
"grad_norm": 0.75390625,
"learning_rate": 5.2032498479723515e-05,
"loss": 1.1907432079315186,
"step": 584
},
{
"epoch": 0.8554744525547445,
"grad_norm": 1.1171875,
"learning_rate": 5.19747205164105e-05,
"loss": 1.4104052782058716,
"step": 586
},
{
"epoch": 0.8583941605839416,
"grad_norm": 0.5,
"learning_rate": 5.1916773840538756e-05,
"loss": 1.3563584089279175,
"step": 588
},
{
"epoch": 0.8613138686131386,
"grad_norm": 0.96875,
"learning_rate": 5.185865902804304e-05,
"loss": 1.255893349647522,
"step": 590
},
{
"epoch": 0.8642335766423358,
"grad_norm": 0.62109375,
"learning_rate": 5.180037665652928e-05,
"loss": 1.231784701347351,
"step": 592
},
{
"epoch": 0.8671532846715329,
"grad_norm": 0.640625,
"learning_rate": 5.1741927305268744e-05,
"loss": 1.2044652700424194,
"step": 594
},
{
"epoch": 0.8700729927007299,
"grad_norm": 0.515625,
"learning_rate": 5.1683311555192316e-05,
"loss": 1.2496631145477295,
"step": 596
},
{
"epoch": 0.872992700729927,
"grad_norm": 0.53515625,
"learning_rate": 5.162452998888474e-05,
"loss": 1.2133772373199463,
"step": 598
},
{
"epoch": 0.8759124087591241,
"grad_norm": 0.67578125,
"learning_rate": 5.1565583190578835e-05,
"loss": 1.2104904651641846,
"step": 600
},
{
"epoch": 0.8788321167883212,
"grad_norm": 1.953125,
"learning_rate": 5.150647174614963e-05,
"loss": 1.2359637022018433,
"step": 602
},
{
"epoch": 0.8817518248175182,
"grad_norm": 0.67578125,
"learning_rate": 5.144719624310861e-05,
"loss": 1.3523967266082764,
"step": 604
},
{
"epoch": 0.8846715328467153,
"grad_norm": 0.859375,
"learning_rate": 5.138775727059781e-05,
"loss": 1.2998895645141602,
"step": 606
},
{
"epoch": 0.8875912408759125,
"grad_norm": 2.78125,
"learning_rate": 5.132815541938404e-05,
"loss": 1.2893991470336914,
"step": 608
},
{
"epoch": 0.8905109489051095,
"grad_norm": 1.21875,
"learning_rate": 5.126839128185294e-05,
"loss": 1.493467092514038,
"step": 610
},
{
"epoch": 0.8934306569343066,
"grad_norm": 0.49609375,
"learning_rate": 5.120846545200312e-05,
"loss": 1.2961106300354004,
"step": 612
},
{
"epoch": 0.8963503649635036,
"grad_norm": 2.40625,
"learning_rate": 5.11483785254403e-05,
"loss": 1.7353638410568237,
"step": 614
},
{
"epoch": 0.8992700729927007,
"grad_norm": 1.125,
"learning_rate": 5.108813109937129e-05,
"loss": 1.246555209159851,
"step": 616
},
{
"epoch": 0.9021897810218978,
"grad_norm": 2.140625,
"learning_rate": 5.102772377259815e-05,
"loss": 0.9241611957550049,
"step": 618
},
{
"epoch": 0.9051094890510949,
"grad_norm": 0.96875,
"learning_rate": 5.096715714551218e-05,
"loss": 1.3003945350646973,
"step": 620
},
{
"epoch": 0.908029197080292,
"grad_norm": 1.40625,
"learning_rate": 5.0906431820087985e-05,
"loss": 1.478858232498169,
"step": 622
},
{
"epoch": 0.910948905109489,
"grad_norm": 0.78515625,
"learning_rate": 5.0845548399877476e-05,
"loss": 1.355273723602295,
"step": 624
},
{
"epoch": 0.9138686131386862,
"grad_norm": 0.6484375,
"learning_rate": 5.078450749000388e-05,
"loss": 1.3407752513885498,
"step": 626
},
{
"epoch": 0.9167883211678832,
"grad_norm": 2.4375,
"learning_rate": 5.072330969715571e-05,
"loss": 1.6535050868988037,
"step": 628
},
{
"epoch": 0.9197080291970803,
"grad_norm": 0.88671875,
"learning_rate": 5.066195562958076e-05,
"loss": 1.3024041652679443,
"step": 630
},
{
"epoch": 0.9226277372262773,
"grad_norm": 0.7421875,
"learning_rate": 5.0600445897080045e-05,
"loss": 1.3594764471054077,
"step": 632
},
{
"epoch": 0.9255474452554745,
"grad_norm": 0.609375,
"learning_rate": 5.053878111100175e-05,
"loss": 1.189161777496338,
"step": 634
},
{
"epoch": 0.9284671532846716,
"grad_norm": 0.462890625,
"learning_rate": 5.04769618842351e-05,
"loss": 0.995853841304779,
"step": 636
},
{
"epoch": 0.9313868613138686,
"grad_norm": 0.796875,
"learning_rate": 5.041498883120437e-05,
"loss": 1.3354415893554688,
"step": 638
},
{
"epoch": 0.9343065693430657,
"grad_norm": 0.58984375,
"learning_rate": 5.035286256786269e-05,
"loss": 1.3200321197509766,
"step": 640
},
{
"epoch": 0.9372262773722628,
"grad_norm": 0.98828125,
"learning_rate": 5.029058371168596e-05,
"loss": 1.3042809963226318,
"step": 642
},
{
"epoch": 0.9401459854014599,
"grad_norm": 0.5546875,
"learning_rate": 5.022815288166671e-05,
"loss": 1.3093239068984985,
"step": 644
},
{
"epoch": 0.9430656934306569,
"grad_norm": 0.7109375,
"learning_rate": 5.0165570698307945e-05,
"loss": 1.2666034698486328,
"step": 646
},
{
"epoch": 0.945985401459854,
"grad_norm": 3.671875,
"learning_rate": 5.010283778361697e-05,
"loss": 0.7473506927490234,
"step": 648
},
{
"epoch": 0.948905109489051,
"grad_norm": 1.09375,
"learning_rate": 5.003995476109922e-05,
"loss": 1.4341310262680054,
"step": 650
},
{
"epoch": 0.9518248175182482,
"grad_norm": 0.75,
"learning_rate": 4.9976922255752084e-05,
"loss": 1.3149964809417725,
"step": 652
},
{
"epoch": 0.9547445255474453,
"grad_norm": 2.25,
"learning_rate": 4.9913740894058637e-05,
"loss": 1.3723666667938232,
"step": 654
},
{
"epoch": 0.9576642335766423,
"grad_norm": 1.3984375,
"learning_rate": 4.9850411303981446e-05,
"loss": 1.3720585107803345,
"step": 656
},
{
"epoch": 0.9605839416058394,
"grad_norm": 0.6875,
"learning_rate": 4.978693411495634e-05,
"loss": 1.2820851802825928,
"step": 658
},
{
"epoch": 0.9635036496350365,
"grad_norm": 0.90625,
"learning_rate": 4.9723309957886124e-05,
"loss": 1.4141497611999512,
"step": 660
},
{
"epoch": 0.9664233576642336,
"grad_norm": 0.57421875,
"learning_rate": 4.965953946513437e-05,
"loss": 1.1847033500671387,
"step": 662
},
{
"epoch": 0.9693430656934306,
"grad_norm": 0.66015625,
"learning_rate": 4.9595623270519006e-05,
"loss": 1.2596821784973145,
"step": 664
},
{
"epoch": 0.9722627737226277,
"grad_norm": 2.421875,
"learning_rate": 4.9531562009306176e-05,
"loss": 1.5929439067840576,
"step": 666
},
{
"epoch": 0.9751824817518249,
"grad_norm": 3.5,
"learning_rate": 4.946735631820382e-05,
"loss": 1.7425875663757324,
"step": 668
},
{
"epoch": 0.9781021897810219,
"grad_norm": 2.390625,
"learning_rate": 4.940300683535535e-05,
"loss": 1.5199015140533447,
"step": 670
},
{
"epoch": 0.981021897810219,
"grad_norm": 0.55078125,
"learning_rate": 4.933851420033338e-05,
"loss": 1.261643409729004,
"step": 672
},
{
"epoch": 0.983941605839416,
"grad_norm": 1.9453125,
"learning_rate": 4.927387905413328e-05,
"loss": 1.4657907485961914,
"step": 674
},
{
"epoch": 0.9868613138686131,
"grad_norm": 1.921875,
"learning_rate": 4.9209102039166846e-05,
"loss": 0.47202789783477783,
"step": 676
},
{
"epoch": 0.9897810218978103,
"grad_norm": 0.98828125,
"learning_rate": 4.914418379925595e-05,
"loss": 1.2479331493377686,
"step": 678
},
{
"epoch": 0.9927007299270073,
"grad_norm": 1.2109375,
"learning_rate": 4.907912497962606e-05,
"loss": 1.3170100450515747,
"step": 680
},
{
"epoch": 0.9956204379562044,
"grad_norm": 1.0,
"learning_rate": 4.901392622689992e-05,
"loss": 1.3046462535858154,
"step": 682
},
{
"epoch": 0.9985401459854014,
"grad_norm": 2.203125,
"learning_rate": 4.894858818909101e-05,
"loss": 1.5552364587783813,
"step": 684
},
{
"epoch": 1.0014598540145985,
"grad_norm": 1.0625,
"learning_rate": 4.888311151559726e-05,
"loss": 1.3213224411010742,
"step": 686
},
{
"epoch": 1.0043795620437956,
"grad_norm": 0.9375,
"learning_rate": 4.8817496857194416e-05,
"loss": 1.2282832860946655,
"step": 688
},
{
"epoch": 1.0072992700729928,
"grad_norm": 2.53125,
"learning_rate": 4.875174486602973e-05,
"loss": 1.4474010467529297,
"step": 690
},
{
"epoch": 1.0102189781021897,
"grad_norm": 2.578125,
"learning_rate": 4.868585619561538e-05,
"loss": 1.3594729900360107,
"step": 692
},
{
"epoch": 1.013138686131387,
"grad_norm": 53.5,
"learning_rate": 4.8619831500822e-05,
"loss": 1.2219158411026,
"step": 694
},
{
"epoch": 1.0160583941605839,
"grad_norm": 0.640625,
"learning_rate": 4.855367143787222e-05,
"loss": 1.1806716918945312,
"step": 696
},
{
"epoch": 1.018978102189781,
"grad_norm": 0.76171875,
"learning_rate": 4.848737666433404e-05,
"loss": 1.3240623474121094,
"step": 698
},
{
"epoch": 1.0218978102189782,
"grad_norm": 1.046875,
"learning_rate": 4.842094783911438e-05,
"loss": 1.0889549255371094,
"step": 700
},
{
"epoch": 1.0248175182481751,
"grad_norm": 1.1640625,
"learning_rate": 4.8354385622452534e-05,
"loss": 1.1705904006958008,
"step": 702
},
{
"epoch": 1.0277372262773723,
"grad_norm": 1.3515625,
"learning_rate": 4.8287690675913536e-05,
"loss": 1.2288353443145752,
"step": 704
},
{
"epoch": 1.0306569343065692,
"grad_norm": 0.6328125,
"learning_rate": 4.822086366238166e-05,
"loss": 1.258807897567749,
"step": 706
},
{
"epoch": 1.0335766423357664,
"grad_norm": 0.86328125,
"learning_rate": 4.815390524605377e-05,
"loss": 1.1945178508758545,
"step": 708
},
{
"epoch": 1.0364963503649636,
"grad_norm": 1.6171875,
"learning_rate": 4.808681609243279e-05,
"loss": 1.3580873012542725,
"step": 710
},
{
"epoch": 1.0394160583941605,
"grad_norm": 4.65625,
"learning_rate": 4.8019596868321005e-05,
"loss": 1.1779024600982666,
"step": 712
},
{
"epoch": 1.0423357664233577,
"grad_norm": 2.046875,
"learning_rate": 4.79522482418135e-05,
"loss": 0.9665200710296631,
"step": 714
},
{
"epoch": 1.0452554744525548,
"grad_norm": 1.0625,
"learning_rate": 4.788477088229151e-05,
"loss": 1.090451955795288,
"step": 716
},
{
"epoch": 1.0481751824817518,
"grad_norm": 2.40625,
"learning_rate": 4.7817165460415734e-05,
"loss": 0.9157304167747498,
"step": 718
},
{
"epoch": 1.051094890510949,
"grad_norm": 1.296875,
"learning_rate": 4.7749432648119675e-05,
"loss": 1.003751516342163,
"step": 720
},
{
"epoch": 1.054014598540146,
"grad_norm": 4.65625,
"learning_rate": 4.7681573118603024e-05,
"loss": 0.7297412157058716,
"step": 722
},
{
"epoch": 1.056934306569343,
"grad_norm": 1.46875,
"learning_rate": 4.761358754632483e-05,
"loss": 1.176282525062561,
"step": 724
},
{
"epoch": 1.0598540145985402,
"grad_norm": 0.60546875,
"learning_rate": 4.7545476606997025e-05,
"loss": 0.9568055868148804,
"step": 726
},
{
"epoch": 1.0627737226277372,
"grad_norm": 0.8515625,
"learning_rate": 4.747724097757743e-05,
"loss": 0.849090576171875,
"step": 728
},
{
"epoch": 1.0656934306569343,
"grad_norm": 3.0625,
"learning_rate": 4.740888133626324e-05,
"loss": 1.0601378679275513,
"step": 730
},
{
"epoch": 1.0686131386861315,
"grad_norm": 0.8125,
"learning_rate": 4.7340398362484226e-05,
"loss": 0.9943457245826721,
"step": 732
},
{
"epoch": 1.0715328467153284,
"grad_norm": 2.40625,
"learning_rate": 4.727179273689591e-05,
"loss": 0.6839646100997925,
"step": 734
},
{
"epoch": 1.0744525547445256,
"grad_norm": 1.0703125,
"learning_rate": 4.72030651413729e-05,
"loss": 1.0361661911010742,
"step": 736
},
{
"epoch": 1.0773722627737226,
"grad_norm": 2.0625,
"learning_rate": 4.713421625900203e-05,
"loss": 1.017957329750061,
"step": 738
},
{
"epoch": 1.0802919708029197,
"grad_norm": 1.0625,
"learning_rate": 4.706524677407567e-05,
"loss": 0.6705314517021179,
"step": 740
},
{
"epoch": 1.0832116788321169,
"grad_norm": 0.6640625,
"learning_rate": 4.69961573720848e-05,
"loss": 0.906855583190918,
"step": 742
},
{
"epoch": 1.0861313868613138,
"grad_norm": 1.9921875,
"learning_rate": 4.69269487397123e-05,
"loss": 0.8631948232650757,
"step": 744
},
{
"epoch": 1.089051094890511,
"grad_norm": 0.85546875,
"learning_rate": 4.6857621564826066e-05,
"loss": 0.9076054096221924,
"step": 746
},
{
"epoch": 1.091970802919708,
"grad_norm": 0.98046875,
"learning_rate": 4.678817653647223e-05,
"loss": 1.0434634685516357,
"step": 748
},
{
"epoch": 1.094890510948905,
"grad_norm": 0.6796875,
"learning_rate": 4.671861434486821e-05,
"loss": 0.9549797773361206,
"step": 750
},
{
"epoch": 1.0978102189781023,
"grad_norm": 0.578125,
"learning_rate": 4.6648935681395996e-05,
"loss": 0.9440876245498657,
"step": 752
},
{
"epoch": 1.1007299270072992,
"grad_norm": 0.62890625,
"learning_rate": 4.657914123859512e-05,
"loss": 0.8400392532348633,
"step": 754
},
{
"epoch": 1.1036496350364964,
"grad_norm": 1.484375,
"learning_rate": 4.65092317101559e-05,
"loss": 0.8279464244842529,
"step": 756
},
{
"epoch": 1.1065693430656935,
"grad_norm": 1.0234375,
"learning_rate": 4.643920779091249e-05,
"loss": 0.6409823894500732,
"step": 758
},
{
"epoch": 1.1094890510948905,
"grad_norm": 0.984375,
"learning_rate": 4.6369070176835943e-05,
"loss": 0.7931218147277832,
"step": 760
},
{
"epoch": 1.1124087591240877,
"grad_norm": 0.84375,
"learning_rate": 4.629881956502739e-05,
"loss": 0.9238249063491821,
"step": 762
},
{
"epoch": 1.1153284671532846,
"grad_norm": 0.6328125,
"learning_rate": 4.6228456653711005e-05,
"loss": 0.9794689416885376,
"step": 764
},
{
"epoch": 1.1182481751824818,
"grad_norm": 1.65625,
"learning_rate": 4.615798214222713e-05,
"loss": 0.7727400660514832,
"step": 766
},
{
"epoch": 1.121167883211679,
"grad_norm": 0.73828125,
"learning_rate": 4.6087396731025305e-05,
"loss": 0.7949696779251099,
"step": 768
},
{
"epoch": 1.1240875912408759,
"grad_norm": 0.6875,
"learning_rate": 4.6016701121657315e-05,
"loss": 0.9301152229309082,
"step": 770
},
{
"epoch": 1.127007299270073,
"grad_norm": 0.56640625,
"learning_rate": 4.5945896016770174e-05,
"loss": 0.8954685926437378,
"step": 772
},
{
"epoch": 1.12992700729927,
"grad_norm": 0.65234375,
"learning_rate": 4.5874982120099244e-05,
"loss": 0.790676474571228,
"step": 774
},
{
"epoch": 1.1328467153284671,
"grad_norm": 2.3125,
"learning_rate": 4.5803960136461095e-05,
"loss": 0.8866307735443115,
"step": 776
},
{
"epoch": 1.1357664233576643,
"grad_norm": 0.5546875,
"learning_rate": 4.573283077174664e-05,
"loss": 1.0086119174957275,
"step": 778
},
{
"epoch": 1.1386861313868613,
"grad_norm": 0.625,
"learning_rate": 4.566159473291403e-05,
"loss": 0.9111287593841553,
"step": 780
},
{
"epoch": 1.1416058394160584,
"grad_norm": 0.60546875,
"learning_rate": 4.5590252727981665e-05,
"loss": 0.7828556299209595,
"step": 782
},
{
"epoch": 1.1445255474452556,
"grad_norm": 1.28125,
"learning_rate": 4.551880546602113e-05,
"loss": 0.8504880666732788,
"step": 784
},
{
"epoch": 1.1474452554744525,
"grad_norm": 0.875,
"learning_rate": 4.54472536571502e-05,
"loss": 0.6608180999755859,
"step": 786
},
{
"epoch": 1.1503649635036497,
"grad_norm": 0.50390625,
"learning_rate": 4.537559801252571e-05,
"loss": 0.8004018068313599,
"step": 788
},
{
"epoch": 1.1532846715328466,
"grad_norm": 0.49609375,
"learning_rate": 4.530383924433653e-05,
"loss": 0.8180815577507019,
"step": 790
},
{
"epoch": 1.1562043795620438,
"grad_norm": 0.376953125,
"learning_rate": 4.523197806579649e-05,
"loss": 0.8199501037597656,
"step": 792
},
{
"epoch": 1.159124087591241,
"grad_norm": 0.60546875,
"learning_rate": 4.51600151911373e-05,
"loss": 0.8051800727844238,
"step": 794
},
{
"epoch": 1.162043795620438,
"grad_norm": 0.5625,
"learning_rate": 4.50879513356014e-05,
"loss": 0.838517427444458,
"step": 796
},
{
"epoch": 1.164963503649635,
"grad_norm": 0.84765625,
"learning_rate": 4.501578721543488e-05,
"loss": 0.7846630215644836,
"step": 798
},
{
"epoch": 1.167883211678832,
"grad_norm": 0.515625,
"learning_rate": 4.4943523547880396e-05,
"loss": 0.8626841902732849,
"step": 800
},
{
"epoch": 1.1708029197080292,
"grad_norm": 0.578125,
"learning_rate": 4.487116105117e-05,
"loss": 0.834052324295044,
"step": 802
},
{
"epoch": 1.1737226277372264,
"grad_norm": 1.1171875,
"learning_rate": 4.479870044451797e-05,
"loss": 0.7536060810089111,
"step": 804
},
{
"epoch": 1.1766423357664233,
"grad_norm": 2.0,
"learning_rate": 4.472614244811379e-05,
"loss": 0.5739097595214844,
"step": 806
},
{
"epoch": 1.1795620437956205,
"grad_norm": 1.1875,
"learning_rate": 4.4653487783114824e-05,
"loss": 0.8073641657829285,
"step": 808
},
{
"epoch": 1.1824817518248176,
"grad_norm": 1.0859375,
"learning_rate": 4.458073717163927e-05,
"loss": 0.7964679002761841,
"step": 810
},
{
"epoch": 1.1854014598540146,
"grad_norm": 1.1953125,
"learning_rate": 4.4507891336758944e-05,
"loss": 0.727637529373169,
"step": 812
},
{
"epoch": 1.1883211678832117,
"grad_norm": 1.234375,
"learning_rate": 4.443495100249206e-05,
"loss": 0.759423017501831,
"step": 814
},
{
"epoch": 1.1912408759124087,
"grad_norm": 0.59765625,
"learning_rate": 4.43619168937961e-05,
"loss": 0.9602820873260498,
"step": 816
},
{
"epoch": 1.1941605839416058,
"grad_norm": 0.44140625,
"learning_rate": 4.4288789736560544e-05,
"loss": 0.5548771619796753,
"step": 818
},
{
"epoch": 1.197080291970803,
"grad_norm": 1.0625,
"learning_rate": 4.421557025759974e-05,
"loss": 0.8706461191177368,
"step": 820
},
{
"epoch": 1.2,
"grad_norm": 0.59765625,
"learning_rate": 4.414225918464556e-05,
"loss": 0.7758599519729614,
"step": 822
},
{
"epoch": 1.2029197080291971,
"grad_norm": 0.44140625,
"learning_rate": 4.406885724634027e-05,
"loss": 0.8401455879211426,
"step": 824
},
{
"epoch": 1.205839416058394,
"grad_norm": 1.5546875,
"learning_rate": 4.399536517222924e-05,
"loss": 0.4931638836860657,
"step": 826
},
{
"epoch": 1.2087591240875912,
"grad_norm": 0.60546875,
"learning_rate": 4.3921783692753726e-05,
"loss": 0.8398951888084412,
"step": 828
},
{
"epoch": 1.2116788321167884,
"grad_norm": 1.21875,
"learning_rate": 4.384811353924356e-05,
"loss": 0.5683646202087402,
"step": 830
},
{
"epoch": 1.2145985401459853,
"grad_norm": 0.5078125,
"learning_rate": 4.3774355443909906e-05,
"loss": 0.7751730680465698,
"step": 832
},
{
"epoch": 1.2175182481751825,
"grad_norm": 0.4609375,
"learning_rate": 4.3700510139838025e-05,
"loss": 0.7935394048690796,
"step": 834
},
{
"epoch": 1.2204379562043797,
"grad_norm": 1.265625,
"learning_rate": 4.36265783609799e-05,
"loss": 0.45605871081352234,
"step": 836
},
{
"epoch": 1.2233576642335766,
"grad_norm": 1.4921875,
"learning_rate": 4.355256084214703e-05,
"loss": 0.39349085092544556,
"step": 838
},
{
"epoch": 1.2262773722627738,
"grad_norm": 0.72265625,
"learning_rate": 4.3478458319003076e-05,
"loss": 0.5289195775985718,
"step": 840
},
{
"epoch": 1.2291970802919707,
"grad_norm": 1.3046875,
"learning_rate": 4.340427152805655e-05,
"loss": 0.3823907971382141,
"step": 842
},
{
"epoch": 1.2321167883211679,
"grad_norm": 0.7734375,
"learning_rate": 4.333000120665351e-05,
"loss": 0.7731255292892456,
"step": 844
},
{
"epoch": 1.235036496350365,
"grad_norm": 0.98046875,
"learning_rate": 4.325564809297025e-05,
"loss": 0.7135635614395142,
"step": 846
},
{
"epoch": 1.237956204379562,
"grad_norm": 0.625,
"learning_rate": 4.31812129260059e-05,
"loss": 0.7766275405883789,
"step": 848
},
{
"epoch": 1.2408759124087592,
"grad_norm": 0.4140625,
"learning_rate": 4.310669644557515e-05,
"loss": 0.7377574443817139,
"step": 850
},
{
"epoch": 1.243795620437956,
"grad_norm": 0.50390625,
"learning_rate": 4.303209939230086e-05,
"loss": 0.6263979077339172,
"step": 852
},
{
"epoch": 1.2467153284671533,
"grad_norm": 0.5390625,
"learning_rate": 4.295742250760674e-05,
"loss": 0.8428114652633667,
"step": 854
},
{
"epoch": 1.2496350364963504,
"grad_norm": 0.609375,
"learning_rate": 4.288266653370986e-05,
"loss": 0.6811407804489136,
"step": 856
},
{
"epoch": 1.2525547445255474,
"grad_norm": 1.265625,
"learning_rate": 4.280783221361346e-05,
"loss": 0.6165977120399475,
"step": 858
},
{
"epoch": 1.2554744525547445,
"grad_norm": 1.03125,
"learning_rate": 4.2732920291099407e-05,
"loss": 0.4194489121437073,
"step": 860
},
{
"epoch": 1.2583941605839417,
"grad_norm": 0.67578125,
"learning_rate": 4.265793151072089e-05,
"loss": 0.7688656449317932,
"step": 862
},
{
"epoch": 1.2613138686131387,
"grad_norm": 0.498046875,
"learning_rate": 4.258286661779498e-05,
"loss": 0.7792798280715942,
"step": 864
},
{
"epoch": 1.2642335766423358,
"grad_norm": 1.0703125,
"learning_rate": 4.2507726358395236e-05,
"loss": 0.529695451259613,
"step": 866
},
{
"epoch": 1.2671532846715328,
"grad_norm": 0.53125,
"learning_rate": 4.243251147934428e-05,
"loss": 0.7187914848327637,
"step": 868
},
{
"epoch": 1.27007299270073,
"grad_norm": 0.6015625,
"learning_rate": 4.2357222728206404e-05,
"loss": 0.8103936314582825,
"step": 870
},
{
"epoch": 1.2729927007299269,
"grad_norm": 0.4375,
"learning_rate": 4.22818608532801e-05,
"loss": 0.7720659971237183,
"step": 872
},
{
"epoch": 1.275912408759124,
"grad_norm": 0.494140625,
"learning_rate": 4.2206426603590644e-05,
"loss": 0.7679579257965088,
"step": 874
},
{
"epoch": 1.2788321167883212,
"grad_norm": 0.455078125,
"learning_rate": 4.213092072888264e-05,
"loss": 0.7206258177757263,
"step": 876
},
{
"epoch": 1.2817518248175181,
"grad_norm": 1.984375,
"learning_rate": 4.205534397961262e-05,
"loss": 0.5508802533149719,
"step": 878
},
{
"epoch": 1.2846715328467153,
"grad_norm": 0.703125,
"learning_rate": 4.197969710694146e-05,
"loss": 0.717147707939148,
"step": 880
},
{
"epoch": 1.2875912408759125,
"grad_norm": 0.53125,
"learning_rate": 4.190398086272708e-05,
"loss": 0.7146405577659607,
"step": 882
},
{
"epoch": 1.2905109489051094,
"grad_norm": 1.15625,
"learning_rate": 4.182819599951685e-05,
"loss": 0.8259648084640503,
"step": 884
},
{
"epoch": 1.2934306569343066,
"grad_norm": 0.55078125,
"learning_rate": 4.1752343270540136e-05,
"loss": 0.6179315447807312,
"step": 886
},
{
"epoch": 1.2963503649635038,
"grad_norm": 0.8203125,
"learning_rate": 4.167642342970084e-05,
"loss": 0.7542316913604736,
"step": 888
},
{
"epoch": 1.2992700729927007,
"grad_norm": 0.5078125,
"learning_rate": 4.160043723156989e-05,
"loss": 0.7624303102493286,
"step": 890
},
{
"epoch": 1.3021897810218979,
"grad_norm": 1.5,
"learning_rate": 4.152438543137773e-05,
"loss": 0.9253525733947754,
"step": 892
},
{
"epoch": 1.305109489051095,
"grad_norm": 1.1796875,
"learning_rate": 4.144826878500687e-05,
"loss": 0.6559224128723145,
"step": 894
},
{
"epoch": 1.308029197080292,
"grad_norm": 0.40625,
"learning_rate": 4.137208804898428e-05,
"loss": 0.8019828796386719,
"step": 896
},
{
"epoch": 1.310948905109489,
"grad_norm": 0.6015625,
"learning_rate": 4.1295843980473924e-05,
"loss": 0.9176638126373291,
"step": 898
},
{
"epoch": 1.313868613138686,
"grad_norm": 0.453125,
"learning_rate": 4.121953733726925e-05,
"loss": 0.8651784658432007,
"step": 900
},
{
"epoch": 1.3167883211678832,
"grad_norm": 0.59375,
"learning_rate": 4.114316887778564e-05,
"loss": 0.8026199340820312,
"step": 902
},
{
"epoch": 1.3197080291970802,
"grad_norm": 1.546875,
"learning_rate": 4.106673936105287e-05,
"loss": 0.5445064306259155,
"step": 904
},
{
"epoch": 1.3226277372262774,
"grad_norm": 1.140625,
"learning_rate": 4.0990249546707556e-05,
"loss": 0.5602635145187378,
"step": 906
},
{
"epoch": 1.3255474452554745,
"grad_norm": 3.15625,
"learning_rate": 4.091370019498561e-05,
"loss": 0.8074881434440613,
"step": 908
},
{
"epoch": 1.3284671532846715,
"grad_norm": 0.48046875,
"learning_rate": 4.0837092066714694e-05,
"loss": 0.8778038024902344,
"step": 910
},
{
"epoch": 1.3313868613138686,
"grad_norm": 1.296875,
"learning_rate": 4.076042592330668e-05,
"loss": 0.8802911639213562,
"step": 912
},
{
"epoch": 1.3343065693430658,
"grad_norm": 0.65625,
"learning_rate": 4.068370252675002e-05,
"loss": 1.0339293479919434,
"step": 914
},
{
"epoch": 1.3372262773722627,
"grad_norm": 3.171875,
"learning_rate": 4.0606922639602215e-05,
"loss": 0.7693579196929932,
"step": 916
},
{
"epoch": 1.34014598540146,
"grad_norm": 0.66015625,
"learning_rate": 4.0530087024982245e-05,
"loss": 0.545236349105835,
"step": 918
},
{
"epoch": 1.343065693430657,
"grad_norm": 1.0,
"learning_rate": 4.045319644656295e-05,
"loss": 0.7232102155685425,
"step": 920
},
{
"epoch": 1.345985401459854,
"grad_norm": 0.83984375,
"learning_rate": 4.037625166856347e-05,
"loss": 0.5631003975868225,
"step": 922
},
{
"epoch": 1.348905109489051,
"grad_norm": 0.84765625,
"learning_rate": 4.0299253455741626e-05,
"loss": 0.75032639503479,
"step": 924
},
{
"epoch": 1.3518248175182481,
"grad_norm": 0.546875,
"learning_rate": 4.022220257338635e-05,
"loss": 0.7473883628845215,
"step": 926
},
{
"epoch": 1.3547445255474453,
"grad_norm": 0.66015625,
"learning_rate": 4.014509978731002e-05,
"loss": 0.6389172077178955,
"step": 928
},
{
"epoch": 1.3576642335766422,
"grad_norm": 0.52734375,
"learning_rate": 4.006794586384093e-05,
"loss": 0.8051577210426331,
"step": 930
},
{
"epoch": 1.3605839416058394,
"grad_norm": 1.640625,
"learning_rate": 3.999074156981561e-05,
"loss": 0.7719905376434326,
"step": 932
},
{
"epoch": 1.3635036496350366,
"grad_norm": 1.3125,
"learning_rate": 3.9913487672571246e-05,
"loss": 0.7920917272567749,
"step": 934
},
{
"epoch": 1.3664233576642335,
"grad_norm": 0.380859375,
"learning_rate": 3.983618493993799e-05,
"loss": 0.6346763372421265,
"step": 936
},
{
"epoch": 1.3693430656934307,
"grad_norm": 0.90625,
"learning_rate": 3.975883414023141e-05,
"loss": 0.7190485000610352,
"step": 938
},
{
"epoch": 1.3722627737226278,
"grad_norm": 0.474609375,
"learning_rate": 3.96814360422448e-05,
"loss": 0.6966416835784912,
"step": 940
},
{
"epoch": 1.3751824817518248,
"grad_norm": 0.427734375,
"learning_rate": 3.9603991415241556e-05,
"loss": 0.8331156373023987,
"step": 942
},
{
"epoch": 1.378102189781022,
"grad_norm": 0.44921875,
"learning_rate": 3.9526501028947525e-05,
"loss": 0.7017558813095093,
"step": 944
},
{
"epoch": 1.3810218978102191,
"grad_norm": 0.55859375,
"learning_rate": 3.9448965653543374e-05,
"loss": 0.7089091539382935,
"step": 946
},
{
"epoch": 1.383941605839416,
"grad_norm": 0.5859375,
"learning_rate": 3.937138605965689e-05,
"loss": 0.8313618898391724,
"step": 948
},
{
"epoch": 1.3868613138686132,
"grad_norm": 1.265625,
"learning_rate": 3.929376301835538e-05,
"loss": 0.5355940461158752,
"step": 950
},
{
"epoch": 1.3897810218978102,
"grad_norm": 0.546875,
"learning_rate": 3.9216097301137943e-05,
"loss": 0.8322726488113403,
"step": 952
},
{
"epoch": 1.3927007299270073,
"grad_norm": 0.5234375,
"learning_rate": 3.9138389679927876e-05,
"loss": 0.7144240736961365,
"step": 954
},
{
"epoch": 1.3956204379562043,
"grad_norm": 0.71484375,
"learning_rate": 3.906064092706493e-05,
"loss": 0.877200722694397,
"step": 956
},
{
"epoch": 1.3985401459854014,
"grad_norm": 1.4453125,
"learning_rate": 3.898285181529768e-05,
"loss": 0.5701913833618164,
"step": 958
},
{
"epoch": 1.4014598540145986,
"grad_norm": 0.58203125,
"learning_rate": 3.890502311777583e-05,
"loss": 0.6877053380012512,
"step": 960
},
{
"epoch": 1.4043795620437955,
"grad_norm": 1.234375,
"learning_rate": 3.882715560804251e-05,
"loss": 0.7841130495071411,
"step": 962
},
{
"epoch": 1.4072992700729927,
"grad_norm": 0.91796875,
"learning_rate": 3.8749250060026645e-05,
"loss": 0.6575303077697754,
"step": 964
},
{
"epoch": 1.4102189781021899,
"grad_norm": 0.482421875,
"learning_rate": 3.8671307248035183e-05,
"loss": 0.7847940921783447,
"step": 966
},
{
"epoch": 1.4131386861313868,
"grad_norm": 0.56640625,
"learning_rate": 3.8593327946745465e-05,
"loss": 0.6031632423400879,
"step": 968
},
{
"epoch": 1.416058394160584,
"grad_norm": 0.33984375,
"learning_rate": 3.85153129311975e-05,
"loss": 0.7221322655677795,
"step": 970
},
{
"epoch": 1.4189781021897812,
"grad_norm": 0.494140625,
"learning_rate": 3.843726297678625e-05,
"loss": 0.6405712366104126,
"step": 972
},
{
"epoch": 1.421897810218978,
"grad_norm": 0.55859375,
"learning_rate": 3.835917885925394e-05,
"loss": 0.8854905962944031,
"step": 974
},
{
"epoch": 1.4248175182481753,
"grad_norm": 0.33984375,
"learning_rate": 3.828106135468236e-05,
"loss": 0.8395616412162781,
"step": 976
},
{
"epoch": 1.4277372262773722,
"grad_norm": 0.953125,
"learning_rate": 3.820291123948512e-05,
"loss": 0.7625291347503662,
"step": 978
},
{
"epoch": 1.4306569343065694,
"grad_norm": 0.5390625,
"learning_rate": 3.812472929039994e-05,
"loss": 0.6698781251907349,
"step": 980
},
{
"epoch": 1.4335766423357663,
"grad_norm": 1.71875,
"learning_rate": 3.804651628448095e-05,
"loss": 0.46671491861343384,
"step": 982
},
{
"epoch": 1.4364963503649635,
"grad_norm": 3.59375,
"learning_rate": 3.796827299909097e-05,
"loss": 0.7954891920089722,
"step": 984
},
{
"epoch": 1.4394160583941606,
"grad_norm": 0.5390625,
"learning_rate": 3.789000021189374e-05,
"loss": 0.7973778247833252,
"step": 986
},
{
"epoch": 1.4423357664233576,
"grad_norm": 0.6171875,
"learning_rate": 3.7811698700846245e-05,
"loss": 0.77935791015625,
"step": 988
},
{
"epoch": 1.4452554744525548,
"grad_norm": 0.2890625,
"learning_rate": 3.773336924419094e-05,
"loss": 0.4812260866165161,
"step": 990
},
{
"epoch": 1.448175182481752,
"grad_norm": 0.58984375,
"learning_rate": 3.765501262044804e-05,
"loss": 0.6503823399543762,
"step": 992
},
{
"epoch": 1.4510948905109489,
"grad_norm": 1.1875,
"learning_rate": 3.757662960840776e-05,
"loss": 0.7457800507545471,
"step": 994
},
{
"epoch": 1.454014598540146,
"grad_norm": 0.78515625,
"learning_rate": 3.749822098712264e-05,
"loss": 0.729491114616394,
"step": 996
},
{
"epoch": 1.4569343065693432,
"grad_norm": 1.0,
"learning_rate": 3.741978753589968e-05,
"loss": 0.7310203313827515,
"step": 998
},
{
"epoch": 1.4598540145985401,
"grad_norm": 0.78515625,
"learning_rate": 3.7341330034292735e-05,
"loss": 0.8127636909484863,
"step": 1000
},
{
"epoch": 1.4627737226277373,
"grad_norm": 0.96875,
"learning_rate": 3.7262849262094626e-05,
"loss": 0.744979202747345,
"step": 1002
},
{
"epoch": 1.4656934306569342,
"grad_norm": 2.8125,
"learning_rate": 3.718434599932953e-05,
"loss": 0.6435647010803223,
"step": 1004
},
{
"epoch": 1.4686131386861314,
"grad_norm": 0.53125,
"learning_rate": 3.710582102624511e-05,
"loss": 0.7206861972808838,
"step": 1006
},
{
"epoch": 1.4715328467153284,
"grad_norm": 0.494140625,
"learning_rate": 3.702727512330484e-05,
"loss": 0.6581206321716309,
"step": 1008
},
{
"epoch": 1.4744525547445255,
"grad_norm": 0.6015625,
"learning_rate": 3.69487090711802e-05,
"loss": 0.6592334508895874,
"step": 1010
},
{
"epoch": 1.4773722627737227,
"grad_norm": 1.5625,
"learning_rate": 3.687012365074293e-05,
"loss": 0.8629540205001831,
"step": 1012
},
{
"epoch": 1.4802919708029196,
"grad_norm": 0.80078125,
"learning_rate": 3.67915196430573e-05,
"loss": 0.786446213722229,
"step": 1014
},
{
"epoch": 1.4832116788321168,
"grad_norm": 1.640625,
"learning_rate": 3.671289782937228e-05,
"loss": 0.7205163240432739,
"step": 1016
},
{
"epoch": 1.486131386861314,
"grad_norm": 0.78515625,
"learning_rate": 3.663425899111385e-05,
"loss": 0.6132459044456482,
"step": 1018
},
{
"epoch": 1.489051094890511,
"grad_norm": 0.478515625,
"learning_rate": 3.6555603909877175e-05,
"loss": 0.7237584590911865,
"step": 1020
},
{
"epoch": 1.491970802919708,
"grad_norm": 0.5859375,
"learning_rate": 3.647693336741887e-05,
"loss": 0.7926473617553711,
"step": 1022
},
{
"epoch": 1.4948905109489052,
"grad_norm": 0.6328125,
"learning_rate": 3.639824814564921e-05,
"loss": 0.8149737119674683,
"step": 1024
},
{
"epoch": 1.4978102189781022,
"grad_norm": 0.65234375,
"learning_rate": 3.63195490266244e-05,
"loss": 0.590778112411499,
"step": 1026
},
{
"epoch": 1.5007299270072991,
"grad_norm": 0.640625,
"learning_rate": 3.6240836792538715e-05,
"loss": 0.560297966003418,
"step": 1028
},
{
"epoch": 1.5036496350364965,
"grad_norm": 0.5625,
"learning_rate": 3.616211222571683e-05,
"loss": 0.73332679271698,
"step": 1030
},
{
"epoch": 1.5065693430656935,
"grad_norm": 1.2421875,
"learning_rate": 3.608337610860598e-05,
"loss": 0.5995861291885376,
"step": 1032
},
{
"epoch": 1.5094890510948904,
"grad_norm": 0.6015625,
"learning_rate": 3.600462922376819e-05,
"loss": 0.6911063194274902,
"step": 1034
},
{
"epoch": 1.5124087591240876,
"grad_norm": 0.984375,
"learning_rate": 3.592587235387251e-05,
"loss": 0.7384026050567627,
"step": 1036
},
{
"epoch": 1.5153284671532847,
"grad_norm": 1.5390625,
"learning_rate": 3.5847106281687245e-05,
"loss": 0.44528472423553467,
"step": 1038
},
{
"epoch": 1.5182481751824817,
"grad_norm": 0.46875,
"learning_rate": 3.576833179007214e-05,
"loss": 0.7749471664428711,
"step": 1040
},
{
"epoch": 1.5211678832116788,
"grad_norm": 0.7109375,
"learning_rate": 3.5689549661970667e-05,
"loss": 0.8797944188117981,
"step": 1042
},
{
"epoch": 1.524087591240876,
"grad_norm": 4.71875,
"learning_rate": 3.561076068040211e-05,
"loss": 0.6449779272079468,
"step": 1044
},
{
"epoch": 1.527007299270073,
"grad_norm": 1.1796875,
"learning_rate": 3.553196562845396e-05,
"loss": 0.7924333810806274,
"step": 1046
},
{
"epoch": 1.5299270072992701,
"grad_norm": 0.443359375,
"learning_rate": 3.545316528927401e-05,
"loss": 0.727583646774292,
"step": 1048
},
{
"epoch": 1.5328467153284673,
"grad_norm": 1.25,
"learning_rate": 3.537436044606257e-05,
"loss": 0.6008315682411194,
"step": 1050
},
{
"epoch": 1.5357664233576642,
"grad_norm": 0.46875,
"learning_rate": 3.529555188206476e-05,
"loss": 0.5438330769538879,
"step": 1052
},
{
"epoch": 1.5386861313868612,
"grad_norm": 1.453125,
"learning_rate": 3.521674038056267e-05,
"loss": 0.8460222482681274,
"step": 1054
},
{
"epoch": 1.5416058394160586,
"grad_norm": 0.53515625,
"learning_rate": 3.5137926724867566e-05,
"loss": 0.7475937604904175,
"step": 1056
},
{
"epoch": 1.5445255474452555,
"grad_norm": 1.125,
"learning_rate": 3.505911169831216e-05,
"loss": 0.31257715821266174,
"step": 1058
},
{
"epoch": 1.5474452554744524,
"grad_norm": 1.8671875,
"learning_rate": 3.498029608424276e-05,
"loss": 0.5920336246490479,
"step": 1060
},
{
"epoch": 1.5503649635036496,
"grad_norm": 0.5546875,
"learning_rate": 3.490148066601151e-05,
"loss": 0.8748451471328735,
"step": 1062
},
{
"epoch": 1.5532846715328468,
"grad_norm": 0.486328125,
"learning_rate": 3.482266622696863e-05,
"loss": 0.7054937481880188,
"step": 1064
},
{
"epoch": 1.5562043795620437,
"grad_norm": 0.75,
"learning_rate": 3.474385355045462e-05,
"loss": 0.839794397354126,
"step": 1066
},
{
"epoch": 1.5591240875912409,
"grad_norm": 1.3515625,
"learning_rate": 3.46650434197924e-05,
"loss": 0.6114320755004883,
"step": 1068
},
{
"epoch": 1.562043795620438,
"grad_norm": 0.53515625,
"learning_rate": 3.4586236618279654e-05,
"loss": 0.6584864854812622,
"step": 1070
},
{
"epoch": 1.564963503649635,
"grad_norm": 1.046875,
"learning_rate": 3.4507433929180924e-05,
"loss": 0.6659724712371826,
"step": 1072
},
{
"epoch": 1.5678832116788322,
"grad_norm": 0.75390625,
"learning_rate": 3.4428636135719936e-05,
"loss": 0.6218416690826416,
"step": 1074
},
{
"epoch": 1.5708029197080293,
"grad_norm": 1.1328125,
"learning_rate": 3.4349844021071694e-05,
"loss": 0.41415178775787354,
"step": 1076
},
{
"epoch": 1.5737226277372263,
"grad_norm": 0.5234375,
"learning_rate": 3.427105836835482e-05,
"loss": 0.7918195724487305,
"step": 1078
},
{
"epoch": 1.5766423357664232,
"grad_norm": 0.62109375,
"learning_rate": 3.4192279960623674e-05,
"loss": 0.5616426467895508,
"step": 1080
},
{
"epoch": 1.5795620437956206,
"grad_norm": 1.390625,
"learning_rate": 3.411350958086061e-05,
"loss": 0.48918431997299194,
"step": 1082
},
{
"epoch": 1.5824817518248175,
"grad_norm": 0.447265625,
"learning_rate": 3.403474801196821e-05,
"loss": 0.8120974898338318,
"step": 1084
},
{
"epoch": 1.5854014598540145,
"grad_norm": 0.84375,
"learning_rate": 3.3955996036761454e-05,
"loss": 0.6056236624717712,
"step": 1086
},
{
"epoch": 1.5883211678832116,
"grad_norm": 0.578125,
"learning_rate": 3.3877254437960014e-05,
"loss": 0.7579033374786377,
"step": 1088
},
{
"epoch": 1.5912408759124088,
"grad_norm": 0.7890625,
"learning_rate": 3.3798523998180374e-05,
"loss": 0.6755779981613159,
"step": 1090
},
{
"epoch": 1.5941605839416058,
"grad_norm": 0.328125,
"learning_rate": 3.371980549992817e-05,
"loss": 0.6938905119895935,
"step": 1092
},
{
"epoch": 1.597080291970803,
"grad_norm": 0.412109375,
"learning_rate": 3.36410997255903e-05,
"loss": 0.5442206859588623,
"step": 1094
},
{
"epoch": 1.6,
"grad_norm": 0.443359375,
"learning_rate": 3.3562407457427236e-05,
"loss": 0.6741411685943604,
"step": 1096
},
{
"epoch": 1.602919708029197,
"grad_norm": 0.435546875,
"learning_rate": 3.3483729477565163e-05,
"loss": 0.8024434447288513,
"step": 1098
},
{
"epoch": 1.6058394160583942,
"grad_norm": 0.55078125,
"learning_rate": 3.340506656798831e-05,
"loss": 0.887005090713501,
"step": 1100
},
{
"epoch": 1.6087591240875914,
"grad_norm": 1.4296875,
"learning_rate": 3.332641951053108e-05,
"loss": 0.5712992548942566,
"step": 1102
},
{
"epoch": 1.6116788321167883,
"grad_norm": 0.380859375,
"learning_rate": 3.3247789086870364e-05,
"loss": 0.27882304787635803,
"step": 1104
},
{
"epoch": 1.6145985401459853,
"grad_norm": 0.416015625,
"learning_rate": 3.3169176078517666e-05,
"loss": 0.7044307589530945,
"step": 1106
},
{
"epoch": 1.6175182481751826,
"grad_norm": 1.171875,
"learning_rate": 3.3090581266811454e-05,
"loss": 0.4385440945625305,
"step": 1108
},
{
"epoch": 1.6204379562043796,
"grad_norm": 3.828125,
"learning_rate": 3.301200543290932e-05,
"loss": 0.5933332443237305,
"step": 1110
},
{
"epoch": 1.6233576642335765,
"grad_norm": 0.48828125,
"learning_rate": 3.293344935778023e-05,
"loss": 0.7244850397109985,
"step": 1112
},
{
"epoch": 1.6262773722627737,
"grad_norm": 0.48046875,
"learning_rate": 3.2854913822196774e-05,
"loss": 0.725429117679596,
"step": 1114
},
{
"epoch": 1.6291970802919709,
"grad_norm": 0.83203125,
"learning_rate": 3.277639960672739e-05,
"loss": 0.6351516246795654,
"step": 1116
},
{
"epoch": 1.6321167883211678,
"grad_norm": 0.50390625,
"learning_rate": 3.269790749172861e-05,
"loss": 0.5855380296707153,
"step": 1118
},
{
"epoch": 1.635036496350365,
"grad_norm": 0.33203125,
"learning_rate": 3.261943825733734e-05,
"loss": 0.5529741644859314,
"step": 1120
},
{
"epoch": 1.6379562043795621,
"grad_norm": 0.62890625,
"learning_rate": 3.254099268346303e-05,
"loss": 0.7271662354469299,
"step": 1122
},
{
"epoch": 1.640875912408759,
"grad_norm": 0.4140625,
"learning_rate": 3.2462571549780006e-05,
"loss": 0.3429512083530426,
"step": 1124
},
{
"epoch": 1.6437956204379562,
"grad_norm": 0.5703125,
"learning_rate": 3.238417563571965e-05,
"loss": 0.8319324254989624,
"step": 1126
},
{
"epoch": 1.6467153284671534,
"grad_norm": 1.2890625,
"learning_rate": 3.230580572046272e-05,
"loss": 0.5692154169082642,
"step": 1128
},
{
"epoch": 1.6496350364963503,
"grad_norm": 0.55859375,
"learning_rate": 3.222746258293153e-05,
"loss": 0.789124608039856,
"step": 1130
},
{
"epoch": 1.6525547445255473,
"grad_norm": 0.7578125,
"learning_rate": 3.2149147001782275e-05,
"loss": 0.5931746959686279,
"step": 1132
},
{
"epoch": 1.6554744525547447,
"grad_norm": 1.1015625,
"learning_rate": 3.207085975539727e-05,
"loss": 0.5728804469108582,
"step": 1134
},
{
"epoch": 1.6583941605839416,
"grad_norm": 1.046875,
"learning_rate": 3.199260162187719e-05,
"loss": 0.5378882884979248,
"step": 1136
},
{
"epoch": 1.6613138686131386,
"grad_norm": 0.8046875,
"learning_rate": 3.191437337903336e-05,
"loss": 0.7326488494873047,
"step": 1138
},
{
"epoch": 1.6642335766423357,
"grad_norm": 0.474609375,
"learning_rate": 3.183617580438004e-05,
"loss": 0.5117689371109009,
"step": 1140
},
{
"epoch": 1.667153284671533,
"grad_norm": 2.484375,
"learning_rate": 3.1758009675126656e-05,
"loss": 0.7147212028503418,
"step": 1142
},
{
"epoch": 1.6700729927007298,
"grad_norm": 0.65625,
"learning_rate": 3.16798757681701e-05,
"loss": 0.4904450476169586,
"step": 1144
},
{
"epoch": 1.672992700729927,
"grad_norm": 0.5390625,
"learning_rate": 3.160177486008702e-05,
"loss": 0.6540476083755493,
"step": 1146
},
{
"epoch": 1.6759124087591242,
"grad_norm": 4.15625,
"learning_rate": 3.152370772712605e-05,
"loss": 0.43966585397720337,
"step": 1148
},
{
"epoch": 1.6788321167883211,
"grad_norm": 0.5,
"learning_rate": 3.144567514520019e-05,
"loss": 0.6375809907913208,
"step": 1150
},
{
"epoch": 1.6817518248175183,
"grad_norm": 0.5546875,
"learning_rate": 3.136767788987898e-05,
"loss": 0.5703507661819458,
"step": 1152
},
{
"epoch": 1.6846715328467154,
"grad_norm": 0.5625,
"learning_rate": 3.128971673638088e-05,
"loss": 0.8321278691291809,
"step": 1154
},
{
"epoch": 1.6875912408759124,
"grad_norm": 0.68359375,
"learning_rate": 3.121179245956552e-05,
"loss": 0.6486014127731323,
"step": 1156
},
{
"epoch": 1.6905109489051093,
"grad_norm": 2.125,
"learning_rate": 3.1133905833926004e-05,
"loss": 0.8116127252578735,
"step": 1158
},
{
"epoch": 1.6934306569343067,
"grad_norm": 0.546875,
"learning_rate": 3.105605763358124e-05,
"loss": 0.9435343742370605,
"step": 1160
},
{
"epoch": 1.6963503649635037,
"grad_norm": 0.71484375,
"learning_rate": 3.097824863226821e-05,
"loss": 0.7895150184631348,
"step": 1162
},
{
"epoch": 1.6992700729927006,
"grad_norm": 1.2265625,
"learning_rate": 3.090047960333428e-05,
"loss": 0.6546390056610107,
"step": 1164
},
{
"epoch": 1.7021897810218978,
"grad_norm": 0.89453125,
"learning_rate": 3.082275131972958e-05,
"loss": 0.4520113468170166,
"step": 1166
},
{
"epoch": 1.705109489051095,
"grad_norm": 0.625,
"learning_rate": 3.07450645539992e-05,
"loss": 0.6498976945877075,
"step": 1168
},
{
"epoch": 1.7080291970802919,
"grad_norm": 0.5,
"learning_rate": 3.066742007827566e-05,
"loss": 0.625820517539978,
"step": 1170
},
{
"epoch": 1.710948905109489,
"grad_norm": 0.431640625,
"learning_rate": 3.058981866427107e-05,
"loss": 0.6509993076324463,
"step": 1172
},
{
"epoch": 1.7138686131386862,
"grad_norm": 3.6875,
"learning_rate": 3.0512261083269633e-05,
"loss": 0.6243072152137756,
"step": 1174
},
{
"epoch": 1.7167883211678832,
"grad_norm": 0.5078125,
"learning_rate": 3.043474810611983e-05,
"loss": 0.5908772349357605,
"step": 1176
},
{
"epoch": 1.7197080291970803,
"grad_norm": 0.59765625,
"learning_rate": 3.035728050322687e-05,
"loss": 0.7354363203048706,
"step": 1178
},
{
"epoch": 1.7226277372262775,
"grad_norm": 0.6015625,
"learning_rate": 3.027985904454493e-05,
"loss": 0.5529353618621826,
"step": 1180
},
{
"epoch": 1.7255474452554744,
"grad_norm": 0.87109375,
"learning_rate": 3.0202484499569614e-05,
"loss": 0.6982197761535645,
"step": 1182
},
{
"epoch": 1.7284671532846714,
"grad_norm": 0.6875,
"learning_rate": 3.012515763733021e-05,
"loss": 0.7173529863357544,
"step": 1184
},
{
"epoch": 1.7313868613138688,
"grad_norm": 0.88671875,
"learning_rate": 3.0047879226382115e-05,
"loss": 0.5552504062652588,
"step": 1186
},
{
"epoch": 1.7343065693430657,
"grad_norm": 1.359375,
"learning_rate": 2.9970650034799123e-05,
"loss": 0.7216619253158569,
"step": 1188
},
{
"epoch": 1.7372262773722627,
"grad_norm": 1.0546875,
"learning_rate": 2.9893470830165898e-05,
"loss": 0.679259717464447,
"step": 1190
},
{
"epoch": 1.7401459854014598,
"grad_norm": 1.609375,
"learning_rate": 2.9816342379570198e-05,
"loss": 0.8096989393234253,
"step": 1192
},
{
"epoch": 1.743065693430657,
"grad_norm": 0.5703125,
"learning_rate": 2.9739265449595435e-05,
"loss": 0.9562541246414185,
"step": 1194
},
{
"epoch": 1.745985401459854,
"grad_norm": 1.4765625,
"learning_rate": 2.9662240806312853e-05,
"loss": 0.23433877527713776,
"step": 1196
},
{
"epoch": 1.748905109489051,
"grad_norm": 0.31640625,
"learning_rate": 2.9585269215274096e-05,
"loss": 0.6434130668640137,
"step": 1198
},
{
"epoch": 1.7518248175182483,
"grad_norm": 0.98828125,
"learning_rate": 2.9508351441503465e-05,
"loss": 0.4967500567436218,
"step": 1200
},
{
"epoch": 1.7547445255474452,
"grad_norm": 0.6875,
"learning_rate": 2.9431488249490415e-05,
"loss": 0.6541959047317505,
"step": 1202
},
{
"epoch": 1.7576642335766424,
"grad_norm": 0.59375,
"learning_rate": 2.9354680403181856e-05,
"loss": 0.39902958273887634,
"step": 1204
},
{
"epoch": 1.7605839416058395,
"grad_norm": 0.671875,
"learning_rate": 2.9277928665974674e-05,
"loss": 0.45974647998809814,
"step": 1206
},
{
"epoch": 1.7635036496350365,
"grad_norm": 0.5546875,
"learning_rate": 2.9201233800708025e-05,
"loss": 0.6035457849502563,
"step": 1208
},
{
"epoch": 1.7664233576642334,
"grad_norm": 0.58203125,
"learning_rate": 2.9124596569655876e-05,
"loss": 0.6182115077972412,
"step": 1210
},
{
"epoch": 1.7693430656934308,
"grad_norm": 0.60546875,
"learning_rate": 2.9048017734519298e-05,
"loss": 0.7187447547912598,
"step": 1212
},
{
"epoch": 1.7722627737226277,
"grad_norm": 0.412109375,
"learning_rate": 2.8971498056419018e-05,
"loss": 0.928739070892334,
"step": 1214
},
{
"epoch": 1.7751824817518247,
"grad_norm": 1.6484375,
"learning_rate": 2.8895038295887756e-05,
"loss": 0.551633894443512,
"step": 1216
},
{
"epoch": 1.7781021897810219,
"grad_norm": 0.96875,
"learning_rate": 2.8818639212862733e-05,
"loss": 0.49139732122421265,
"step": 1218
},
{
"epoch": 1.781021897810219,
"grad_norm": 0.62109375,
"learning_rate": 2.8742301566678107e-05,
"loss": 0.5061070919036865,
"step": 1220
},
{
"epoch": 1.783941605839416,
"grad_norm": 0.765625,
"learning_rate": 2.8666026116057362e-05,
"loss": 0.5571912527084351,
"step": 1222
},
{
"epoch": 1.7868613138686131,
"grad_norm": 0.62890625,
"learning_rate": 2.8589813619105876e-05,
"loss": 0.7460435032844543,
"step": 1224
},
{
"epoch": 1.7897810218978103,
"grad_norm": 0.67578125,
"learning_rate": 2.851366483330327e-05,
"loss": 0.7260211706161499,
"step": 1226
},
{
"epoch": 1.7927007299270072,
"grad_norm": 1.5625,
"learning_rate": 2.8437580515496004e-05,
"loss": 0.523064374923706,
"step": 1228
},
{
"epoch": 1.7956204379562044,
"grad_norm": 1.703125,
"learning_rate": 2.8361561421889713e-05,
"loss": 0.5164039134979248,
"step": 1230
},
{
"epoch": 1.7985401459854016,
"grad_norm": 1.4609375,
"learning_rate": 2.828560830804183e-05,
"loss": 0.6638224124908447,
"step": 1232
},
{
"epoch": 1.8014598540145985,
"grad_norm": 0.515625,
"learning_rate": 2.8209721928853964e-05,
"loss": 0.717475950717926,
"step": 1234
},
{
"epoch": 1.8043795620437955,
"grad_norm": 1.171875,
"learning_rate": 2.8133903038564492e-05,
"loss": 0.5235683917999268,
"step": 1236
},
{
"epoch": 1.8072992700729928,
"grad_norm": 0.404296875,
"learning_rate": 2.8058152390740954e-05,
"loss": 0.5262608528137207,
"step": 1238
},
{
"epoch": 1.8102189781021898,
"grad_norm": 0.69921875,
"learning_rate": 2.7982470738272697e-05,
"loss": 0.7407115697860718,
"step": 1240
},
{
"epoch": 1.8131386861313867,
"grad_norm": 0.87109375,
"learning_rate": 2.7906858833363256e-05,
"loss": 0.6663039922714233,
"step": 1242
},
{
"epoch": 1.816058394160584,
"grad_norm": 0.474609375,
"learning_rate": 2.783131742752298e-05,
"loss": 0.6756942868232727,
"step": 1244
},
{
"epoch": 1.818978102189781,
"grad_norm": 2.0625,
"learning_rate": 2.775584727156151e-05,
"loss": 0.39022934436798096,
"step": 1246
},
{
"epoch": 1.821897810218978,
"grad_norm": 0.5859375,
"learning_rate": 2.768044911558034e-05,
"loss": 0.7429840564727783,
"step": 1248
},
{
"epoch": 1.8248175182481752,
"grad_norm": 0.5,
"learning_rate": 2.7605123708965334e-05,
"loss": 0.7059404850006104,
"step": 1250
},
{
"epoch": 1.8277372262773723,
"grad_norm": 0.609375,
"learning_rate": 2.752987180037932e-05,
"loss": 0.7387444972991943,
"step": 1252
},
{
"epoch": 1.8306569343065693,
"grad_norm": 0.76953125,
"learning_rate": 2.7454694137754615e-05,
"loss": 0.5452640652656555,
"step": 1254
},
{
"epoch": 1.8335766423357664,
"grad_norm": 0.98046875,
"learning_rate": 2.73795914682856e-05,
"loss": 0.6368634700775146,
"step": 1256
},
{
"epoch": 1.8364963503649636,
"grad_norm": 0.69140625,
"learning_rate": 2.7304564538421298e-05,
"loss": 0.528918981552124,
"step": 1258
},
{
"epoch": 1.8394160583941606,
"grad_norm": 2.328125,
"learning_rate": 2.7229614093857955e-05,
"loss": 0.4514443278312683,
"step": 1260
},
{
"epoch": 1.8423357664233575,
"grad_norm": 1.125,
"learning_rate": 2.7154740879531608e-05,
"loss": 0.5287116765975952,
"step": 1262
},
{
"epoch": 1.845255474452555,
"grad_norm": 1.3671875,
"learning_rate": 2.7079945639610734e-05,
"loss": 0.7141860723495483,
"step": 1264
},
{
"epoch": 1.8481751824817518,
"grad_norm": 0.498046875,
"learning_rate": 2.700522911748878e-05,
"loss": 0.7375243306159973,
"step": 1266
},
{
"epoch": 1.8510948905109488,
"grad_norm": 0.90625,
"learning_rate": 2.693059205577685e-05,
"loss": 0.5961591601371765,
"step": 1268
},
{
"epoch": 1.854014598540146,
"grad_norm": 0.57421875,
"learning_rate": 2.6856035196296247e-05,
"loss": 0.8086559772491455,
"step": 1270
},
{
"epoch": 1.856934306569343,
"grad_norm": 0.4921875,
"learning_rate": 2.6781559280071186e-05,
"loss": 0.5709831714630127,
"step": 1272
},
{
"epoch": 1.85985401459854,
"grad_norm": 0.69921875,
"learning_rate": 2.6707165047321337e-05,
"loss": 0.6018452644348145,
"step": 1274
},
{
"epoch": 1.8627737226277372,
"grad_norm": 1.6015625,
"learning_rate": 2.6632853237454555e-05,
"loss": 0.9354125261306763,
"step": 1276
},
{
"epoch": 1.8656934306569344,
"grad_norm": 0.90625,
"learning_rate": 2.6558624589059454e-05,
"loss": 0.7831665873527527,
"step": 1278
},
{
"epoch": 1.8686131386861313,
"grad_norm": 0.380859375,
"learning_rate": 2.6484479839898127e-05,
"loss": 0.7829075455665588,
"step": 1280
},
{
"epoch": 1.8715328467153285,
"grad_norm": 0.6640625,
"learning_rate": 2.6410419726898782e-05,
"loss": 0.6603153944015503,
"step": 1282
},
{
"epoch": 1.8744525547445257,
"grad_norm": 0.68359375,
"learning_rate": 2.633644498614842e-05,
"loss": 0.4937764108181,
"step": 1284
},
{
"epoch": 1.8773722627737226,
"grad_norm": 0.59375,
"learning_rate": 2.6262556352885528e-05,
"loss": 0.7518973350524902,
"step": 1286
},
{
"epoch": 1.8802919708029195,
"grad_norm": 0.56640625,
"learning_rate": 2.618875456149276e-05,
"loss": 0.8098867535591125,
"step": 1288
},
{
"epoch": 1.883211678832117,
"grad_norm": 0.7890625,
"learning_rate": 2.6115040345489654e-05,
"loss": 0.8065282702445984,
"step": 1290
},
{
"epoch": 1.8861313868613139,
"grad_norm": 1.9296875,
"learning_rate": 2.604141443752532e-05,
"loss": 0.45944589376449585,
"step": 1292
},
{
"epoch": 1.8890510948905108,
"grad_norm": 0.6328125,
"learning_rate": 2.5967877569371193e-05,
"loss": 0.41555699706077576,
"step": 1294
},
{
"epoch": 1.891970802919708,
"grad_norm": 0.5859375,
"learning_rate": 2.5894430471913706e-05,
"loss": 0.5259090662002563,
"step": 1296
},
{
"epoch": 1.8948905109489051,
"grad_norm": 1.7109375,
"learning_rate": 2.5821073875147074e-05,
"loss": 0.5657607316970825,
"step": 1298
},
{
"epoch": 1.897810218978102,
"grad_norm": 0.9921875,
"learning_rate": 2.5747808508166022e-05,
"loss": 0.5103273391723633,
"step": 1300
},
{
"epoch": 1.9007299270072993,
"grad_norm": 0.5078125,
"learning_rate": 2.567463509915854e-05,
"loss": 0.7555137872695923,
"step": 1302
},
{
"epoch": 1.9036496350364964,
"grad_norm": 0.3125,
"learning_rate": 2.5601554375398626e-05,
"loss": 0.22839674353599548,
"step": 1304
},
{
"epoch": 1.9065693430656934,
"grad_norm": 1.3984375,
"learning_rate": 2.55285670632391e-05,
"loss": 0.5510879158973694,
"step": 1306
},
{
"epoch": 1.9094890510948905,
"grad_norm": 2.75,
"learning_rate": 2.545567388810434e-05,
"loss": 0.9097703099250793,
"step": 1308
},
{
"epoch": 1.9124087591240877,
"grad_norm": 0.671875,
"learning_rate": 2.5382875574483104e-05,
"loss": 0.5909059047698975,
"step": 1310
},
{
"epoch": 1.9153284671532846,
"grad_norm": 1.3046875,
"learning_rate": 2.53101728459213e-05,
"loss": 0.6244196891784668,
"step": 1312
},
{
"epoch": 1.9182481751824818,
"grad_norm": 0.51953125,
"learning_rate": 2.523756642501483e-05,
"loss": 0.5189331769943237,
"step": 1314
},
{
"epoch": 1.921167883211679,
"grad_norm": 0.875,
"learning_rate": 2.5165057033402388e-05,
"loss": 0.6180688738822937,
"step": 1316
},
{
"epoch": 1.924087591240876,
"grad_norm": 0.345703125,
"learning_rate": 2.5092645391758282e-05,
"loss": 0.7852336764335632,
"step": 1318
},
{
"epoch": 1.9270072992700729,
"grad_norm": 0.44921875,
"learning_rate": 2.5020332219785266e-05,
"loss": 0.6935960054397583,
"step": 1320
},
{
"epoch": 1.92992700729927,
"grad_norm": 0.5390625,
"learning_rate": 2.4948118236207428e-05,
"loss": 0.7328112721443176,
"step": 1322
},
{
"epoch": 1.9328467153284672,
"grad_norm": 0.25,
"learning_rate": 2.4876004158763e-05,
"loss": 0.4806957244873047,
"step": 1324
},
{
"epoch": 1.9357664233576641,
"grad_norm": 0.59375,
"learning_rate": 2.4803990704197254e-05,
"loss": 0.6416811943054199,
"step": 1326
},
{
"epoch": 1.9386861313868613,
"grad_norm": 0.59375,
"learning_rate": 2.4732078588255335e-05,
"loss": 0.5719661712646484,
"step": 1328
},
{
"epoch": 1.9416058394160585,
"grad_norm": 0.52734375,
"learning_rate": 2.4660268525675224e-05,
"loss": 0.7179700136184692,
"step": 1330
},
{
"epoch": 1.9445255474452554,
"grad_norm": 0.6015625,
"learning_rate": 2.458856123018055e-05,
"loss": 0.698421835899353,
"step": 1332
},
{
"epoch": 1.9474452554744526,
"grad_norm": 1.640625,
"learning_rate": 2.4516957414473565e-05,
"loss": 0.2941528260707855,
"step": 1334
},
{
"epoch": 1.9503649635036497,
"grad_norm": 0.72265625,
"learning_rate": 2.4445457790228005e-05,
"loss": 0.7893608808517456,
"step": 1336
},
{
"epoch": 1.9532846715328467,
"grad_norm": 0.69921875,
"learning_rate": 2.4374063068082062e-05,
"loss": 0.749394953250885,
"step": 1338
},
{
"epoch": 1.9562043795620438,
"grad_norm": 0.396484375,
"learning_rate": 2.430277395763128e-05,
"loss": 0.588677704334259,
"step": 1340
},
{
"epoch": 1.959124087591241,
"grad_norm": 0.6171875,
"learning_rate": 2.423159116742155e-05,
"loss": 0.7306614518165588,
"step": 1342
},
{
"epoch": 1.962043795620438,
"grad_norm": 2.1875,
"learning_rate": 2.416051540494202e-05,
"loss": 0.43609586358070374,
"step": 1344
},
{
"epoch": 1.964963503649635,
"grad_norm": 0.74609375,
"learning_rate": 2.40895473766181e-05,
"loss": 0.6756691336631775,
"step": 1346
},
{
"epoch": 1.967883211678832,
"grad_norm": 0.49609375,
"learning_rate": 2.4018687787804397e-05,
"loss": 0.7406471967697144,
"step": 1348
},
{
"epoch": 1.9708029197080292,
"grad_norm": 0.76953125,
"learning_rate": 2.394793734277777e-05,
"loss": 0.6689704656600952,
"step": 1350
},
{
"epoch": 1.9737226277372262,
"grad_norm": 0.4921875,
"learning_rate": 2.387729674473027e-05,
"loss": 0.49914392828941345,
"step": 1352
},
{
"epoch": 1.9766423357664233,
"grad_norm": 0.5625,
"learning_rate": 2.380676669576218e-05,
"loss": 0.5775256156921387,
"step": 1354
},
{
"epoch": 1.9795620437956205,
"grad_norm": 0.65234375,
"learning_rate": 2.373634789687502e-05,
"loss": 0.8281344175338745,
"step": 1356
},
{
"epoch": 1.9824817518248175,
"grad_norm": 0.384765625,
"learning_rate": 2.3666041047964605e-05,
"loss": 0.9128406047821045,
"step": 1358
},
{
"epoch": 1.9854014598540146,
"grad_norm": 1.65625,
"learning_rate": 2.359584684781407e-05,
"loss": 0.4032854437828064,
"step": 1360
},
{
"epoch": 1.9883211678832118,
"grad_norm": 1.203125,
"learning_rate": 2.3525765994086912e-05,
"loss": 0.507628858089447,
"step": 1362
},
{
"epoch": 1.9912408759124087,
"grad_norm": 0.470703125,
"learning_rate": 2.3455799183320102e-05,
"loss": 0.6595430374145508,
"step": 1364
},
{
"epoch": 1.994160583941606,
"grad_norm": 0.55859375,
"learning_rate": 2.3385947110917122e-05,
"loss": 0.7209213972091675,
"step": 1366
},
{
"epoch": 1.997080291970803,
"grad_norm": 0.9296875,
"learning_rate": 2.331621047114105e-05,
"loss": 0.6316984295845032,
"step": 1368
},
{
"epoch": 2.0,
"grad_norm": 1.265625,
"learning_rate": 2.3246589957107666e-05,
"loss": 0.5848791599273682,
"step": 1370
},
{
"epoch": 2.002919708029197,
"grad_norm": 0.451171875,
"learning_rate": 2.3177086260778612e-05,
"loss": 1.0494563579559326,
"step": 1372
},
{
"epoch": 2.0058394160583943,
"grad_norm": 1.140625,
"learning_rate": 2.310770007295445e-05,
"loss": 0.41478756070137024,
"step": 1374
},
{
"epoch": 2.0087591240875913,
"grad_norm": 0.62109375,
"learning_rate": 2.3038432083267808e-05,
"loss": 0.547417163848877,
"step": 1376
},
{
"epoch": 2.011678832116788,
"grad_norm": 0.56640625,
"learning_rate": 2.2969282980176526e-05,
"loss": 0.6029900312423706,
"step": 1378
},
{
"epoch": 2.0145985401459856,
"grad_norm": 0.392578125,
"learning_rate": 2.2900253450956885e-05,
"loss": 0.8512624502182007,
"step": 1380
},
{
"epoch": 2.0175182481751825,
"grad_norm": 0.498046875,
"learning_rate": 2.2831344181696654e-05,
"loss": 0.5808537006378174,
"step": 1382
},
{
"epoch": 2.0204379562043795,
"grad_norm": 0.65625,
"learning_rate": 2.2762555857288363e-05,
"loss": 0.7059779167175293,
"step": 1384
},
{
"epoch": 2.0233576642335764,
"grad_norm": 1.5859375,
"learning_rate": 2.269388916142244e-05,
"loss": 0.698493480682373,
"step": 1386
},
{
"epoch": 2.026277372262774,
"grad_norm": 0.57421875,
"learning_rate": 2.2625344776580486e-05,
"loss": 0.6546064019203186,
"step": 1388
},
{
"epoch": 2.0291970802919708,
"grad_norm": 0.87890625,
"learning_rate": 2.2556923384028394e-05,
"loss": 0.6731595993041992,
"step": 1390
},
{
"epoch": 2.0321167883211677,
"grad_norm": 0.703125,
"learning_rate": 2.248862566380967e-05,
"loss": 0.566330075263977,
"step": 1392
},
{
"epoch": 2.035036496350365,
"grad_norm": 0.455078125,
"learning_rate": 2.2420452294738604e-05,
"loss": 0.5171869993209839,
"step": 1394
},
{
"epoch": 2.037956204379562,
"grad_norm": 1.046875,
"learning_rate": 2.2352403954393592e-05,
"loss": 0.3384949564933777,
"step": 1396
},
{
"epoch": 2.040875912408759,
"grad_norm": 1.078125,
"learning_rate": 2.2284481319110333e-05,
"loss": 0.41768980026245117,
"step": 1398
},
{
"epoch": 2.0437956204379564,
"grad_norm": 0.5859375,
"learning_rate": 2.2216685063975147e-05,
"loss": 0.6205852627754211,
"step": 1400
},
{
"epoch": 2.0467153284671533,
"grad_norm": 0.59765625,
"learning_rate": 2.214901586281824e-05,
"loss": 0.42609018087387085,
"step": 1402
},
{
"epoch": 2.0496350364963503,
"grad_norm": 0.69921875,
"learning_rate": 2.2081474388207062e-05,
"loss": 0.5273014307022095,
"step": 1404
},
{
"epoch": 2.0525547445255476,
"grad_norm": 0.95703125,
"learning_rate": 2.2014061311439547e-05,
"loss": 0.2152610421180725,
"step": 1406
},
{
"epoch": 2.0554744525547446,
"grad_norm": 0.546875,
"learning_rate": 2.1946777302537487e-05,
"loss": 0.42848068475723267,
"step": 1408
},
{
"epoch": 2.0583941605839415,
"grad_norm": 1.46875,
"learning_rate": 2.1879623030239855e-05,
"loss": 0.5728009343147278,
"step": 1410
},
{
"epoch": 2.0613138686131385,
"grad_norm": 0.85546875,
"learning_rate": 2.1812599161996192e-05,
"loss": 0.2542463541030884,
"step": 1412
},
{
"epoch": 2.064233576642336,
"grad_norm": 0.7109375,
"learning_rate": 2.1745706363959914e-05,
"loss": 0.3851102590560913,
"step": 1414
},
{
"epoch": 2.067153284671533,
"grad_norm": 0.85546875,
"learning_rate": 2.1678945300981753e-05,
"loss": 0.23180317878723145,
"step": 1416
},
{
"epoch": 2.0700729927007298,
"grad_norm": 0.95703125,
"learning_rate": 2.1612316636603082e-05,
"loss": 0.31533747911453247,
"step": 1418
},
{
"epoch": 2.072992700729927,
"grad_norm": 0.486328125,
"learning_rate": 2.1545821033049407e-05,
"loss": 0.22496944665908813,
"step": 1420
},
{
"epoch": 2.075912408759124,
"grad_norm": 0.455078125,
"learning_rate": 2.14794591512237e-05,
"loss": 0.8437415361404419,
"step": 1422
},
{
"epoch": 2.078832116788321,
"grad_norm": 0.671875,
"learning_rate": 2.1413231650699877e-05,
"loss": 0.28613120317459106,
"step": 1424
},
{
"epoch": 2.0817518248175184,
"grad_norm": 0.51953125,
"learning_rate": 2.1347139189716225e-05,
"loss": 0.2427263855934143,
"step": 1426
},
{
"epoch": 2.0846715328467154,
"grad_norm": 0.53125,
"learning_rate": 2.1281182425168885e-05,
"loss": 0.35121026635169983,
"step": 1428
},
{
"epoch": 2.0875912408759123,
"grad_norm": 0.6328125,
"learning_rate": 2.1215362012605297e-05,
"loss": 0.2667168974876404,
"step": 1430
},
{
"epoch": 2.0905109489051097,
"grad_norm": 1.1953125,
"learning_rate": 2.114967860621766e-05,
"loss": 0.30120381712913513,
"step": 1432
},
{
"epoch": 2.0934306569343066,
"grad_norm": 0.55859375,
"learning_rate": 2.1084132858836522e-05,
"loss": 0.27142685651779175,
"step": 1434
},
{
"epoch": 2.0963503649635036,
"grad_norm": 0.4375,
"learning_rate": 2.1018725421924195e-05,
"loss": 0.34424224495887756,
"step": 1436
},
{
"epoch": 2.0992700729927005,
"grad_norm": 0.6171875,
"learning_rate": 2.095345694556831e-05,
"loss": 0.3176770508289337,
"step": 1438
},
{
"epoch": 2.102189781021898,
"grad_norm": 0.51953125,
"learning_rate": 2.0888328078475364e-05,
"loss": 0.2459079474210739,
"step": 1440
},
{
"epoch": 2.105109489051095,
"grad_norm": 0.94921875,
"learning_rate": 2.0823339467964312e-05,
"loss": 0.4240154027938843,
"step": 1442
},
{
"epoch": 2.108029197080292,
"grad_norm": 0.5625,
"learning_rate": 2.0758491759960037e-05,
"loss": 0.508914053440094,
"step": 1444
},
{
"epoch": 2.110948905109489,
"grad_norm": 0.474609375,
"learning_rate": 2.0693785598987023e-05,
"loss": 0.4890514314174652,
"step": 1446
},
{
"epoch": 2.113868613138686,
"grad_norm": 0.5078125,
"learning_rate": 2.0629221628162876e-05,
"loss": 0.28382956981658936,
"step": 1448
},
{
"epoch": 2.116788321167883,
"grad_norm": 0.38671875,
"learning_rate": 2.0564800489192013e-05,
"loss": 0.6701063513755798,
"step": 1450
},
{
"epoch": 2.1197080291970805,
"grad_norm": 0.484375,
"learning_rate": 2.0500522822359208e-05,
"loss": 0.2041202187538147,
"step": 1452
},
{
"epoch": 2.1226277372262774,
"grad_norm": 0.54296875,
"learning_rate": 2.0436389266523257e-05,
"loss": 0.3254830241203308,
"step": 1454
},
{
"epoch": 2.1255474452554743,
"grad_norm": 0.62109375,
"learning_rate": 2.0372400459110633e-05,
"loss": 0.2847502827644348,
"step": 1456
},
{
"epoch": 2.1284671532846717,
"grad_norm": 0.46484375,
"learning_rate": 2.0308557036109167e-05,
"loss": 0.250871479511261,
"step": 1458
},
{
"epoch": 2.1313868613138687,
"grad_norm": 0.498046875,
"learning_rate": 2.024485963206169e-05,
"loss": 0.37414756417274475,
"step": 1460
},
{
"epoch": 2.1343065693430656,
"grad_norm": 0.5234375,
"learning_rate": 2.018130888005974e-05,
"loss": 0.5036002397537231,
"step": 1462
},
{
"epoch": 2.137226277372263,
"grad_norm": 0.4609375,
"learning_rate": 2.0117905411737267e-05,
"loss": 0.510346531867981,
"step": 1464
},
{
"epoch": 2.14014598540146,
"grad_norm": 3.953125,
"learning_rate": 2.005464985726441e-05,
"loss": 0.1956787109375,
"step": 1466
},
{
"epoch": 2.143065693430657,
"grad_norm": 0.4921875,
"learning_rate": 1.999154284534111e-05,
"loss": 0.24436712265014648,
"step": 1468
},
{
"epoch": 2.145985401459854,
"grad_norm": 0.2578125,
"learning_rate": 1.9928585003191e-05,
"loss": 0.20835143327713013,
"step": 1470
},
{
"epoch": 2.1489051094890512,
"grad_norm": 0.4609375,
"learning_rate": 1.9865776956555066e-05,
"loss": 0.2522450089454651,
"step": 1472
},
{
"epoch": 2.151824817518248,
"grad_norm": 0.427734375,
"learning_rate": 1.9803119329685513e-05,
"loss": 0.23127983510494232,
"step": 1474
},
{
"epoch": 2.154744525547445,
"grad_norm": 0.94921875,
"learning_rate": 1.974061274533946e-05,
"loss": 0.19277967512607574,
"step": 1476
},
{
"epoch": 2.1576642335766425,
"grad_norm": 1.1328125,
"learning_rate": 1.9678257824772857e-05,
"loss": 0.4629068374633789,
"step": 1478
},
{
"epoch": 2.1605839416058394,
"grad_norm": 0.41796875,
"learning_rate": 1.961605518773422e-05,
"loss": 0.49624115228652954,
"step": 1480
},
{
"epoch": 2.1635036496350364,
"grad_norm": 0.453125,
"learning_rate": 1.9554005452458563e-05,
"loss": 0.20151491463184357,
"step": 1482
},
{
"epoch": 2.1664233576642338,
"grad_norm": 0.8125,
"learning_rate": 1.949210923566114e-05,
"loss": 0.491416871547699,
"step": 1484
},
{
"epoch": 2.1693430656934307,
"grad_norm": 0.46875,
"learning_rate": 1.9430367152531432e-05,
"loss": 0.21659183502197266,
"step": 1486
},
{
"epoch": 2.1722627737226277,
"grad_norm": 1.484375,
"learning_rate": 1.9368779816726938e-05,
"loss": 0.41106492280960083,
"step": 1488
},
{
"epoch": 2.1751824817518246,
"grad_norm": 0.875,
"learning_rate": 1.9307347840367156e-05,
"loss": 0.1200709193944931,
"step": 1490
},
{
"epoch": 2.178102189781022,
"grad_norm": 0.80859375,
"learning_rate": 1.9246071834027406e-05,
"loss": 0.16358846426010132,
"step": 1492
},
{
"epoch": 2.181021897810219,
"grad_norm": 0.5234375,
"learning_rate": 1.9184952406732864e-05,
"loss": 0.1703709363937378,
"step": 1494
},
{
"epoch": 2.183941605839416,
"grad_norm": 0.64453125,
"learning_rate": 1.9123990165952438e-05,
"loss": 0.17072588205337524,
"step": 1496
},
{
"epoch": 2.1868613138686133,
"grad_norm": 0.44921875,
"learning_rate": 1.906318571759274e-05,
"loss": 0.16185638308525085,
"step": 1498
},
{
"epoch": 2.18978102189781,
"grad_norm": 0.609375,
"learning_rate": 1.900253966599211e-05,
"loss": 0.4327083230018616,
"step": 1500
},
{
"epoch": 2.192700729927007,
"grad_norm": 1.1328125,
"learning_rate": 1.8942052613914533e-05,
"loss": 0.5814958810806274,
"step": 1502
},
{
"epoch": 2.1956204379562045,
"grad_norm": 0.86328125,
"learning_rate": 1.8881725162543743e-05,
"loss": 0.13417096436023712,
"step": 1504
},
{
"epoch": 2.1985401459854015,
"grad_norm": 0.42578125,
"learning_rate": 1.8821557911477123e-05,
"loss": 0.2804584503173828,
"step": 1506
},
{
"epoch": 2.2014598540145984,
"grad_norm": 0.35546875,
"learning_rate": 1.8761551458719887e-05,
"loss": 0.4399762749671936,
"step": 1508
},
{
"epoch": 2.204379562043796,
"grad_norm": 0.578125,
"learning_rate": 1.8701706400679023e-05,
"loss": 0.1459416151046753,
"step": 1510
},
{
"epoch": 2.2072992700729928,
"grad_norm": 0.482421875,
"learning_rate": 1.8642023332157446e-05,
"loss": 0.19403180480003357,
"step": 1512
},
{
"epoch": 2.2102189781021897,
"grad_norm": 0.439453125,
"learning_rate": 1.8582502846347988e-05,
"loss": 0.22103887796401978,
"step": 1514
},
{
"epoch": 2.213138686131387,
"grad_norm": 0.9921875,
"learning_rate": 1.8523145534827637e-05,
"loss": 0.12535154819488525,
"step": 1516
},
{
"epoch": 2.216058394160584,
"grad_norm": 0.458984375,
"learning_rate": 1.8463951987551536e-05,
"loss": 0.20944124460220337,
"step": 1518
},
{
"epoch": 2.218978102189781,
"grad_norm": 0.44921875,
"learning_rate": 1.8404922792847193e-05,
"loss": 0.23369359970092773,
"step": 1520
},
{
"epoch": 2.221897810218978,
"grad_norm": 0.451171875,
"learning_rate": 1.8346058537408573e-05,
"loss": 0.04991687834262848,
"step": 1522
},
{
"epoch": 2.2248175182481753,
"grad_norm": 0.9609375,
"learning_rate": 1.8287359806290346e-05,
"loss": 0.39661896228790283,
"step": 1524
},
{
"epoch": 2.2277372262773723,
"grad_norm": 0.32421875,
"learning_rate": 1.8228827182902e-05,
"loss": 0.027169307693839073,
"step": 1526
},
{
"epoch": 2.230656934306569,
"grad_norm": 0.546875,
"learning_rate": 1.8170461249002077e-05,
"loss": 0.15711988508701324,
"step": 1528
},
{
"epoch": 2.2335766423357666,
"grad_norm": 0.8203125,
"learning_rate": 1.811226258469238e-05,
"loss": 0.46470558643341064,
"step": 1530
},
{
"epoch": 2.2364963503649635,
"grad_norm": 0.49609375,
"learning_rate": 1.8054231768412225e-05,
"loss": 0.14680901169776917,
"step": 1532
},
{
"epoch": 2.2394160583941605,
"grad_norm": 0.376953125,
"learning_rate": 1.7996369376932668e-05,
"loss": 0.44296467304229736,
"step": 1534
},
{
"epoch": 2.242335766423358,
"grad_norm": 0.8515625,
"learning_rate": 1.7938675985350778e-05,
"loss": 0.11045099794864655,
"step": 1536
},
{
"epoch": 2.245255474452555,
"grad_norm": 0.484375,
"learning_rate": 1.788115216708393e-05,
"loss": 0.46650534868240356,
"step": 1538
},
{
"epoch": 2.2481751824817517,
"grad_norm": 0.85546875,
"learning_rate": 1.7823798493864113e-05,
"loss": 0.16499537229537964,
"step": 1540
},
{
"epoch": 2.2510948905109487,
"grad_norm": 0.41015625,
"learning_rate": 1.776661553573222e-05,
"loss": 0.4719018340110779,
"step": 1542
},
{
"epoch": 2.254014598540146,
"grad_norm": 0.62890625,
"learning_rate": 1.7709603861032415e-05,
"loss": 0.10700561106204987,
"step": 1544
},
{
"epoch": 2.256934306569343,
"grad_norm": 0.447265625,
"learning_rate": 1.7652764036406445e-05,
"loss": 0.14236462116241455,
"step": 1546
},
{
"epoch": 2.25985401459854,
"grad_norm": 0.96875,
"learning_rate": 1.7596096626788055e-05,
"loss": 0.4474194049835205,
"step": 1548
},
{
"epoch": 2.2627737226277373,
"grad_norm": 0.70703125,
"learning_rate": 1.7539602195397337e-05,
"loss": 0.13715533912181854,
"step": 1550
},
{
"epoch": 2.2656934306569343,
"grad_norm": 0.470703125,
"learning_rate": 1.7483281303735143e-05,
"loss": 0.12302975356578827,
"step": 1552
},
{
"epoch": 2.2686131386861312,
"grad_norm": 0.44140625,
"learning_rate": 1.7427134511577503e-05,
"loss": 0.3969992995262146,
"step": 1554
},
{
"epoch": 2.2715328467153286,
"grad_norm": 0.47265625,
"learning_rate": 1.737116237697008e-05,
"loss": 0.18677493929862976,
"step": 1556
},
{
"epoch": 2.2744525547445256,
"grad_norm": 0.58984375,
"learning_rate": 1.7315365456222585e-05,
"loss": 0.4171912670135498,
"step": 1558
},
{
"epoch": 2.2773722627737225,
"grad_norm": 0.50390625,
"learning_rate": 1.7259744303903292e-05,
"loss": 0.2462674379348755,
"step": 1560
},
{
"epoch": 2.28029197080292,
"grad_norm": 0.57421875,
"learning_rate": 1.7204299472833474e-05,
"loss": 0.13167493045330048,
"step": 1562
},
{
"epoch": 2.283211678832117,
"grad_norm": 0.56640625,
"learning_rate": 1.7149031514081978e-05,
"loss": 0.10060621798038483,
"step": 1564
},
{
"epoch": 2.286131386861314,
"grad_norm": 0.48828125,
"learning_rate": 1.7093940976959682e-05,
"loss": 0.20011527836322784,
"step": 1566
},
{
"epoch": 2.289051094890511,
"grad_norm": 0.455078125,
"learning_rate": 1.7039028409014057e-05,
"loss": 0.2114190012216568,
"step": 1568
},
{
"epoch": 2.291970802919708,
"grad_norm": 0.412109375,
"learning_rate": 1.6984294356023756e-05,
"loss": 0.11579601466655731,
"step": 1570
},
{
"epoch": 2.294890510948905,
"grad_norm": 0.484375,
"learning_rate": 1.6929739361993152e-05,
"loss": 0.14887449145317078,
"step": 1572
},
{
"epoch": 2.297810218978102,
"grad_norm": 0.443359375,
"learning_rate": 1.6875363969146922e-05,
"loss": 0.14829905331134796,
"step": 1574
},
{
"epoch": 2.3007299270072994,
"grad_norm": 0.48046875,
"learning_rate": 1.6821168717924696e-05,
"loss": 0.1990872621536255,
"step": 1576
},
{
"epoch": 2.3036496350364963,
"grad_norm": 0.408203125,
"learning_rate": 1.6767154146975678e-05,
"loss": 0.17175400257110596,
"step": 1578
},
{
"epoch": 2.3065693430656933,
"grad_norm": 0.419921875,
"learning_rate": 1.6713320793153272e-05,
"loss": 0.11114311963319778,
"step": 1580
},
{
"epoch": 2.3094890510948907,
"grad_norm": 0.37109375,
"learning_rate": 1.665966919150975e-05,
"loss": 0.7707895040512085,
"step": 1582
},
{
"epoch": 2.3124087591240876,
"grad_norm": 1.21875,
"learning_rate": 1.6606199875290936e-05,
"loss": 0.6511590480804443,
"step": 1584
},
{
"epoch": 2.3153284671532846,
"grad_norm": 0.53515625,
"learning_rate": 1.655291337593094e-05,
"loss": 0.48480451107025146,
"step": 1586
},
{
"epoch": 2.318248175182482,
"grad_norm": 1.2578125,
"learning_rate": 1.6499810223046812e-05,
"loss": 0.33573096990585327,
"step": 1588
},
{
"epoch": 2.321167883211679,
"grad_norm": 0.609375,
"learning_rate": 1.6446890944433333e-05,
"loss": 0.2068139910697937,
"step": 1590
},
{
"epoch": 2.324087591240876,
"grad_norm": 0.73046875,
"learning_rate": 1.6394156066057737e-05,
"loss": 0.1301928609609604,
"step": 1592
},
{
"epoch": 2.3270072992700728,
"grad_norm": 0.53515625,
"learning_rate": 1.6341606112054513e-05,
"loss": 0.8669120073318481,
"step": 1594
},
{
"epoch": 2.32992700729927,
"grad_norm": 0.546875,
"learning_rate": 1.6289241604720157e-05,
"loss": 0.18708989024162292,
"step": 1596
},
{
"epoch": 2.332846715328467,
"grad_norm": 0.283203125,
"learning_rate": 1.6237063064508013e-05,
"loss": 0.5843273401260376,
"step": 1598
},
{
"epoch": 2.335766423357664,
"grad_norm": 0.6640625,
"learning_rate": 1.6185071010023086e-05,
"loss": 0.4750453233718872,
"step": 1600
},
{
"epoch": 2.3386861313868614,
"grad_norm": 0.435546875,
"learning_rate": 1.613326595801689e-05,
"loss": 0.09889721870422363,
"step": 1602
},
{
"epoch": 2.3416058394160584,
"grad_norm": 0.59765625,
"learning_rate": 1.6081648423382324e-05,
"loss": 0.1712934672832489,
"step": 1604
},
{
"epoch": 2.3445255474452553,
"grad_norm": 1.28125,
"learning_rate": 1.6030218919148516e-05,
"loss": 0.16689908504486084,
"step": 1606
},
{
"epoch": 2.3474452554744527,
"grad_norm": 0.43359375,
"learning_rate": 1.597897795647577e-05,
"loss": 0.19200465083122253,
"step": 1608
},
{
"epoch": 2.3503649635036497,
"grad_norm": 0.72265625,
"learning_rate": 1.592792604465048e-05,
"loss": 0.10364709794521332,
"step": 1610
},
{
"epoch": 2.3532846715328466,
"grad_norm": 0.984375,
"learning_rate": 1.587706369108003e-05,
"loss": 0.31456390023231506,
"step": 1612
},
{
"epoch": 2.356204379562044,
"grad_norm": 0.3984375,
"learning_rate": 1.5826391401287778e-05,
"loss": 0.1509348750114441,
"step": 1614
},
{
"epoch": 2.359124087591241,
"grad_norm": 0.85546875,
"learning_rate": 1.5775909678908028e-05,
"loss": 0.5223057270050049,
"step": 1616
},
{
"epoch": 2.362043795620438,
"grad_norm": 1.2265625,
"learning_rate": 1.572561902568105e-05,
"loss": 0.4159151017665863,
"step": 1618
},
{
"epoch": 2.3649635036496353,
"grad_norm": 0.498046875,
"learning_rate": 1.5675519941448042e-05,
"loss": 0.4469154179096222,
"step": 1620
},
{
"epoch": 2.367883211678832,
"grad_norm": 0.61328125,
"learning_rate": 1.56256129241462e-05,
"loss": 0.12339417636394501,
"step": 1622
},
{
"epoch": 2.370802919708029,
"grad_norm": 0.69140625,
"learning_rate": 1.5575898469803747e-05,
"loss": 0.0839366763830185,
"step": 1624
},
{
"epoch": 2.373722627737226,
"grad_norm": 2.3125,
"learning_rate": 1.552637707253503e-05,
"loss": 0.5269365310668945,
"step": 1626
},
{
"epoch": 2.3766423357664235,
"grad_norm": 0.54296875,
"learning_rate": 1.5477049224535585e-05,
"loss": 0.12736204266548157,
"step": 1628
},
{
"epoch": 2.3795620437956204,
"grad_norm": 1.203125,
"learning_rate": 1.5427915416077247e-05,
"loss": 0.46538758277893066,
"step": 1630
},
{
"epoch": 2.3824817518248174,
"grad_norm": 0.57421875,
"learning_rate": 1.5378976135503283e-05,
"loss": 0.4754159450531006,
"step": 1632
},
{
"epoch": 2.3854014598540147,
"grad_norm": 0.365234375,
"learning_rate": 1.533023186922356e-05,
"loss": 0.23388564586639404,
"step": 1634
},
{
"epoch": 2.3883211678832117,
"grad_norm": 0.6484375,
"learning_rate": 1.5281683101709646e-05,
"loss": 0.13159261643886566,
"step": 1636
},
{
"epoch": 2.3912408759124086,
"grad_norm": 0.423828125,
"learning_rate": 1.5233330315490072e-05,
"loss": 0.4673286974430084,
"step": 1638
},
{
"epoch": 2.394160583941606,
"grad_norm": 1.40625,
"learning_rate": 1.5185173991145504e-05,
"loss": 0.14024242758750916,
"step": 1640
},
{
"epoch": 2.397080291970803,
"grad_norm": 0.44921875,
"learning_rate": 1.5137214607303939e-05,
"loss": 0.4857579171657562,
"step": 1642
},
{
"epoch": 2.4,
"grad_norm": 0.5078125,
"learning_rate": 1.5089452640635994e-05,
"loss": 0.06180737540125847,
"step": 1644
},
{
"epoch": 2.402919708029197,
"grad_norm": 0.33984375,
"learning_rate": 1.5041888565850121e-05,
"loss": 0.1659780740737915,
"step": 1646
},
{
"epoch": 2.4058394160583942,
"grad_norm": 0.51953125,
"learning_rate": 1.499452285568796e-05,
"loss": 0.5283091068267822,
"step": 1648
},
{
"epoch": 2.408759124087591,
"grad_norm": 0.484375,
"learning_rate": 1.4947355980919554e-05,
"loss": 0.1580996811389923,
"step": 1650
},
{
"epoch": 2.411678832116788,
"grad_norm": 0.6171875,
"learning_rate": 1.4900388410338727e-05,
"loss": 0.18507535755634308,
"step": 1652
},
{
"epoch": 2.4145985401459855,
"grad_norm": 0.4453125,
"learning_rate": 1.4853620610758403e-05,
"loss": 0.13384950160980225,
"step": 1654
},
{
"epoch": 2.4175182481751825,
"grad_norm": 0.380859375,
"learning_rate": 1.4807053047005992e-05,
"loss": 0.15351298451423645,
"step": 1656
},
{
"epoch": 2.4204379562043794,
"grad_norm": 0.396484375,
"learning_rate": 1.4760686181918726e-05,
"loss": 0.43027734756469727,
"step": 1658
},
{
"epoch": 2.423357664233577,
"grad_norm": 0.451171875,
"learning_rate": 1.4714520476339096e-05,
"loss": 0.21505692601203918,
"step": 1660
},
{
"epoch": 2.4262773722627737,
"grad_norm": 0.48046875,
"learning_rate": 1.4668556389110251e-05,
"loss": 0.4807857871055603,
"step": 1662
},
{
"epoch": 2.4291970802919707,
"grad_norm": 0.455078125,
"learning_rate": 1.4622794377071474e-05,
"loss": 0.3995106816291809,
"step": 1664
},
{
"epoch": 2.432116788321168,
"grad_norm": 0.482421875,
"learning_rate": 1.4577234895053557e-05,
"loss": 0.0785241350531578,
"step": 1666
},
{
"epoch": 2.435036496350365,
"grad_norm": 0.478515625,
"learning_rate": 1.4531878395874396e-05,
"loss": 0.13907024264335632,
"step": 1668
},
{
"epoch": 2.437956204379562,
"grad_norm": 0.51171875,
"learning_rate": 1.4486725330334389e-05,
"loss": 0.5620059967041016,
"step": 1670
},
{
"epoch": 2.4408759124087593,
"grad_norm": 0.48828125,
"learning_rate": 1.444177614721203e-05,
"loss": 0.21153351664543152,
"step": 1672
},
{
"epoch": 2.4437956204379563,
"grad_norm": 0.400390625,
"learning_rate": 1.4397031293259377e-05,
"loss": 0.15002240240573883,
"step": 1674
},
{
"epoch": 2.4467153284671532,
"grad_norm": 0.423828125,
"learning_rate": 1.435249121319769e-05,
"loss": 0.06266305595636368,
"step": 1676
},
{
"epoch": 2.44963503649635,
"grad_norm": 0.50390625,
"learning_rate": 1.4308156349712946e-05,
"loss": 0.6670635938644409,
"step": 1678
},
{
"epoch": 2.4525547445255476,
"grad_norm": 0.8828125,
"learning_rate": 1.4264027143451482e-05,
"loss": 0.0652037113904953,
"step": 1680
},
{
"epoch": 2.4554744525547445,
"grad_norm": 0.515625,
"learning_rate": 1.4220104033015571e-05,
"loss": 0.21574798226356506,
"step": 1682
},
{
"epoch": 2.4583941605839414,
"grad_norm": 1.671875,
"learning_rate": 1.4176387454959128e-05,
"loss": 0.42671555280685425,
"step": 1684
},
{
"epoch": 2.461313868613139,
"grad_norm": 0.427734375,
"learning_rate": 1.4132877843783294e-05,
"loss": 0.19699843227863312,
"step": 1686
},
{
"epoch": 2.4642335766423358,
"grad_norm": 0.435546875,
"learning_rate": 1.4089575631932194e-05,
"loss": 0.20942422747612,
"step": 1688
},
{
"epoch": 2.4671532846715327,
"grad_norm": 0.453125,
"learning_rate": 1.4046481249788559e-05,
"loss": 0.11901196837425232,
"step": 1690
},
{
"epoch": 2.47007299270073,
"grad_norm": 0.703125,
"learning_rate": 1.4003595125669534e-05,
"loss": 0.20953388512134552,
"step": 1692
},
{
"epoch": 2.472992700729927,
"grad_norm": 0.423828125,
"learning_rate": 1.3960917685822347e-05,
"loss": 0.18220821022987366,
"step": 1694
},
{
"epoch": 2.475912408759124,
"grad_norm": 0.41796875,
"learning_rate": 1.3918449354420113e-05,
"loss": 0.3743027448654175,
"step": 1696
},
{
"epoch": 2.478832116788321,
"grad_norm": 0.56640625,
"learning_rate": 1.3876190553557605e-05,
"loss": 0.13356122374534607,
"step": 1698
},
{
"epoch": 2.4817518248175183,
"grad_norm": 0.4453125,
"learning_rate": 1.3834141703247076e-05,
"loss": 0.433431476354599,
"step": 1700
},
{
"epoch": 2.4846715328467153,
"grad_norm": 0.3984375,
"learning_rate": 1.3792303221414052e-05,
"loss": 0.18251864612102509,
"step": 1702
},
{
"epoch": 2.487591240875912,
"grad_norm": 0.83984375,
"learning_rate": 1.3750675523893204e-05,
"loss": 0.10885253548622131,
"step": 1704
},
{
"epoch": 2.4905109489051096,
"grad_norm": 0.326171875,
"learning_rate": 1.3709259024424204e-05,
"loss": 0.7609469294548035,
"step": 1706
},
{
"epoch": 2.4934306569343065,
"grad_norm": 0.96484375,
"learning_rate": 1.3668054134647623e-05,
"loss": 0.14499390125274658,
"step": 1708
},
{
"epoch": 2.4963503649635035,
"grad_norm": 0.6640625,
"learning_rate": 1.362706126410085e-05,
"loss": 0.1034422516822815,
"step": 1710
},
{
"epoch": 2.499270072992701,
"grad_norm": 0.51953125,
"learning_rate": 1.358628082021395e-05,
"loss": 0.13897135853767395,
"step": 1712
},
{
"epoch": 2.502189781021898,
"grad_norm": 0.46875,
"learning_rate": 1.3545713208305729e-05,
"loss": 0.1568652242422104,
"step": 1714
},
{
"epoch": 2.5051094890510948,
"grad_norm": 0.412109375,
"learning_rate": 1.3505358831579612e-05,
"loss": 0.15259164571762085,
"step": 1716
},
{
"epoch": 2.508029197080292,
"grad_norm": 0.423828125,
"learning_rate": 1.3465218091119688e-05,
"loss": 0.12042444944381714,
"step": 1718
},
{
"epoch": 2.510948905109489,
"grad_norm": 0.6953125,
"learning_rate": 1.342529138588668e-05,
"loss": 0.4051997661590576,
"step": 1720
},
{
"epoch": 2.513868613138686,
"grad_norm": 0.90625,
"learning_rate": 1.3385579112714033e-05,
"loss": 0.0928339958190918,
"step": 1722
},
{
"epoch": 2.5167883211678834,
"grad_norm": 0.58203125,
"learning_rate": 1.3346081666303925e-05,
"loss": 0.3658754527568817,
"step": 1724
},
{
"epoch": 2.5197080291970804,
"grad_norm": 0.515625,
"learning_rate": 1.3306799439223361e-05,
"loss": 0.3925940990447998,
"step": 1726
},
{
"epoch": 2.5226277372262773,
"grad_norm": 0.5390625,
"learning_rate": 1.3267732821900273e-05,
"loss": 0.41492193937301636,
"step": 1728
},
{
"epoch": 2.5255474452554747,
"grad_norm": 0.4375,
"learning_rate": 1.3228882202619645e-05,
"loss": 0.1794070452451706,
"step": 1730
},
{
"epoch": 2.5284671532846716,
"grad_norm": 0.49609375,
"learning_rate": 1.3190247967519638e-05,
"loss": 0.164272278547287,
"step": 1732
},
{
"epoch": 2.5313868613138686,
"grad_norm": 0.82421875,
"learning_rate": 1.3151830500587755e-05,
"loss": 0.4470779299736023,
"step": 1734
},
{
"epoch": 2.5343065693430655,
"grad_norm": 0.7578125,
"learning_rate": 1.3113630183657041e-05,
"loss": 0.056883249431848526,
"step": 1736
},
{
"epoch": 2.537226277372263,
"grad_norm": 0.546875,
"learning_rate": 1.3075647396402274e-05,
"loss": 0.1761680245399475,
"step": 1738
},
{
"epoch": 2.54014598540146,
"grad_norm": 0.478515625,
"learning_rate": 1.3037882516336194e-05,
"loss": 0.5256772041320801,
"step": 1740
},
{
"epoch": 2.543065693430657,
"grad_norm": 0.1953125,
"learning_rate": 1.3000335918805743e-05,
"loss": 0.12634223699569702,
"step": 1742
},
{
"epoch": 2.5459854014598537,
"grad_norm": 0.60546875,
"learning_rate": 1.2963007976988347e-05,
"loss": 0.14479045569896698,
"step": 1744
},
{
"epoch": 2.548905109489051,
"grad_norm": 1.046875,
"learning_rate": 1.2925899061888217e-05,
"loss": 0.4536617398262024,
"step": 1746
},
{
"epoch": 2.551824817518248,
"grad_norm": 1.03125,
"learning_rate": 1.2889009542332625e-05,
"loss": 0.10153871029615402,
"step": 1748
},
{
"epoch": 2.554744525547445,
"grad_norm": 0.3828125,
"learning_rate": 1.285233978496827e-05,
"loss": 0.5315350294113159,
"step": 1750
},
{
"epoch": 2.5576642335766424,
"grad_norm": 0.515625,
"learning_rate": 1.281589015425762e-05,
"loss": 0.20661664009094238,
"step": 1752
},
{
"epoch": 2.5605839416058394,
"grad_norm": 0.484375,
"learning_rate": 1.2779661012475309e-05,
"loss": 0.18551045656204224,
"step": 1754
},
{
"epoch": 2.5635036496350363,
"grad_norm": 0.62890625,
"learning_rate": 1.2743652719704498e-05,
"loss": 0.2083350121974945,
"step": 1756
},
{
"epoch": 2.5664233576642337,
"grad_norm": 0.98046875,
"learning_rate": 1.2707865633833338e-05,
"loss": 0.14626437425613403,
"step": 1758
},
{
"epoch": 2.5693430656934306,
"grad_norm": 0.396484375,
"learning_rate": 1.2672300110551376e-05,
"loss": 0.16228264570236206,
"step": 1760
},
{
"epoch": 2.5722627737226276,
"grad_norm": 0.578125,
"learning_rate": 1.2636956503346062e-05,
"loss": 0.20038333535194397,
"step": 1762
},
{
"epoch": 2.575182481751825,
"grad_norm": 0.50390625,
"learning_rate": 1.2601835163499188e-05,
"loss": 0.36375898122787476,
"step": 1764
},
{
"epoch": 2.578102189781022,
"grad_norm": 0.59375,
"learning_rate": 1.2566936440083432e-05,
"loss": 0.10441319644451141,
"step": 1766
},
{
"epoch": 2.581021897810219,
"grad_norm": 0.515625,
"learning_rate": 1.253226067995887e-05,
"loss": 0.15817835927009583,
"step": 1768
},
{
"epoch": 2.5839416058394162,
"grad_norm": 0.515625,
"learning_rate": 1.2497808227769554e-05,
"loss": 0.7072803974151611,
"step": 1770
},
{
"epoch": 2.586861313868613,
"grad_norm": 0.44140625,
"learning_rate": 1.2463579425940038e-05,
"loss": 0.12276852875947952,
"step": 1772
},
{
"epoch": 2.58978102189781,
"grad_norm": 1.4765625,
"learning_rate": 1.242957461467203e-05,
"loss": 0.22830504179000854,
"step": 1774
},
{
"epoch": 2.5927007299270075,
"grad_norm": 0.380859375,
"learning_rate": 1.2395794131940965e-05,
"loss": 0.42617326974868774,
"step": 1776
},
{
"epoch": 2.5956204379562045,
"grad_norm": 0.404296875,
"learning_rate": 1.2362238313492683e-05,
"loss": 0.0888446643948555,
"step": 1778
},
{
"epoch": 2.5985401459854014,
"grad_norm": 0.625,
"learning_rate": 1.232890749284007e-05,
"loss": 0.11126016080379486,
"step": 1780
},
{
"epoch": 2.601459854014599,
"grad_norm": 0.46484375,
"learning_rate": 1.2295802001259735e-05,
"loss": 0.5219573974609375,
"step": 1782
},
{
"epoch": 2.6043795620437957,
"grad_norm": 1.9765625,
"learning_rate": 1.2262922167788754e-05,
"loss": 0.5340749621391296,
"step": 1784
},
{
"epoch": 2.6072992700729927,
"grad_norm": 0.421875,
"learning_rate": 1.223026831922136e-05,
"loss": 0.19568416476249695,
"step": 1786
},
{
"epoch": 2.61021897810219,
"grad_norm": 0.05859375,
"learning_rate": 1.2197840780105713e-05,
"loss": 0.04643552750349045,
"step": 1788
},
{
"epoch": 2.613138686131387,
"grad_norm": 0.80859375,
"learning_rate": 1.216563987274067e-05,
"loss": 0.10973326861858368,
"step": 1790
},
{
"epoch": 2.616058394160584,
"grad_norm": 0.296875,
"learning_rate": 1.21336659171726e-05,
"loss": 0.13602161407470703,
"step": 1792
},
{
"epoch": 2.618978102189781,
"grad_norm": 0.35546875,
"learning_rate": 1.2101919231192166e-05,
"loss": 0.034216154366731644,
"step": 1794
},
{
"epoch": 2.621897810218978,
"grad_norm": 0.486328125,
"learning_rate": 1.2070400130331205e-05,
"loss": 0.5200577974319458,
"step": 1796
},
{
"epoch": 2.624817518248175,
"grad_norm": 0.890625,
"learning_rate": 1.2039108927859557e-05,
"loss": 0.19371935725212097,
"step": 1798
},
{
"epoch": 2.627737226277372,
"grad_norm": 0.380859375,
"learning_rate": 1.2008045934781995e-05,
"loss": 0.21105724573135376,
"step": 1800
},
{
"epoch": 2.630656934306569,
"grad_norm": 1.1015625,
"learning_rate": 1.1977211459835078e-05,
"loss": 0.14942879974842072,
"step": 1802
},
{
"epoch": 2.6335766423357665,
"grad_norm": 0.419921875,
"learning_rate": 1.1946605809484135e-05,
"loss": 0.20162081718444824,
"step": 1804
},
{
"epoch": 2.6364963503649634,
"grad_norm": 0.46875,
"learning_rate": 1.191622928792018e-05,
"loss": 0.16653478145599365,
"step": 1806
},
{
"epoch": 2.6394160583941604,
"grad_norm": 0.98046875,
"learning_rate": 1.1886082197056922e-05,
"loss": 0.14720329642295837,
"step": 1808
},
{
"epoch": 2.6423357664233578,
"grad_norm": 0.435546875,
"learning_rate": 1.1856164836527736e-05,
"loss": 0.4846619963645935,
"step": 1810
},
{
"epoch": 2.6452554744525547,
"grad_norm": 0.455078125,
"learning_rate": 1.1826477503682697e-05,
"loss": 0.18337565660476685,
"step": 1812
},
{
"epoch": 2.6481751824817517,
"grad_norm": 1.03125,
"learning_rate": 1.179702049358562e-05,
"loss": 0.3766210377216339,
"step": 1814
},
{
"epoch": 2.651094890510949,
"grad_norm": 0.76171875,
"learning_rate": 1.1767794099011148e-05,
"loss": 0.33012354373931885,
"step": 1816
},
{
"epoch": 2.654014598540146,
"grad_norm": 0.8515625,
"learning_rate": 1.1738798610441806e-05,
"loss": 0.13045457005500793,
"step": 1818
},
{
"epoch": 2.656934306569343,
"grad_norm": 0.34375,
"learning_rate": 1.1710034316065145e-05,
"loss": 0.09453101456165314,
"step": 1820
},
{
"epoch": 2.6598540145985403,
"grad_norm": 0.443359375,
"learning_rate": 1.1681501501770855e-05,
"loss": 0.16232267022132874,
"step": 1822
},
{
"epoch": 2.6627737226277373,
"grad_norm": 0.1533203125,
"learning_rate": 1.1653200451147953e-05,
"loss": 0.1418476402759552,
"step": 1824
},
{
"epoch": 2.665693430656934,
"grad_norm": 0.62890625,
"learning_rate": 1.1625131445481925e-05,
"loss": 0.188279390335083,
"step": 1826
},
{
"epoch": 2.6686131386861316,
"grad_norm": 1.9140625,
"learning_rate": 1.1597294763751962e-05,
"loss": 0.14047500491142273,
"step": 1828
},
{
"epoch": 2.6715328467153285,
"grad_norm": 0.443359375,
"learning_rate": 1.1569690682628173e-05,
"loss": 0.2133643925189972,
"step": 1830
},
{
"epoch": 2.6744525547445255,
"grad_norm": 0.431640625,
"learning_rate": 1.1542319476468853e-05,
"loss": 0.18978801369667053,
"step": 1832
},
{
"epoch": 2.677372262773723,
"grad_norm": 0.5859375,
"learning_rate": 1.1515181417317706e-05,
"loss": 0.095926433801651,
"step": 1834
},
{
"epoch": 2.68029197080292,
"grad_norm": 0.65625,
"learning_rate": 1.1488276774901216e-05,
"loss": 0.1508825123310089,
"step": 1836
},
{
"epoch": 2.6832116788321168,
"grad_norm": 0.39453125,
"learning_rate": 1.1461605816625904e-05,
"loss": 0.14228323101997375,
"step": 1838
},
{
"epoch": 2.686131386861314,
"grad_norm": 0.53125,
"learning_rate": 1.1435168807575707e-05,
"loss": 0.42970946431159973,
"step": 1840
},
{
"epoch": 2.689051094890511,
"grad_norm": 0.3984375,
"learning_rate": 1.140896601050931e-05,
"loss": 0.3935721814632416,
"step": 1842
},
{
"epoch": 2.691970802919708,
"grad_norm": 0.66796875,
"learning_rate": 1.1382997685857571e-05,
"loss": 0.8472050428390503,
"step": 1844
},
{
"epoch": 2.694890510948905,
"grad_norm": 0.400390625,
"learning_rate": 1.135726409172091e-05,
"loss": 0.4143994450569153,
"step": 1846
},
{
"epoch": 2.697810218978102,
"grad_norm": 0.416015625,
"learning_rate": 1.1331765483866754e-05,
"loss": 0.44374924898147583,
"step": 1848
},
{
"epoch": 2.7007299270072993,
"grad_norm": 1.4140625,
"learning_rate": 1.130650211572698e-05,
"loss": 0.16854047775268555,
"step": 1850
},
{
"epoch": 2.7036496350364962,
"grad_norm": 0.53515625,
"learning_rate": 1.1281474238395413e-05,
"loss": 0.12173114717006683,
"step": 1852
},
{
"epoch": 2.706569343065693,
"grad_norm": 0.37109375,
"learning_rate": 1.1256682100625323e-05,
"loss": 0.12239767611026764,
"step": 1854
},
{
"epoch": 2.7094890510948906,
"grad_norm": 0.4375,
"learning_rate": 1.1232125948826955e-05,
"loss": 0.2048071324825287,
"step": 1856
},
{
"epoch": 2.7124087591240875,
"grad_norm": 0.380859375,
"learning_rate": 1.120780602706507e-05,
"loss": 0.45423823595046997,
"step": 1858
},
{
"epoch": 2.7153284671532845,
"grad_norm": 0.462890625,
"learning_rate": 1.1183722577056533e-05,
"loss": 0.10558777302503586,
"step": 1860
},
{
"epoch": 2.718248175182482,
"grad_norm": 1.375,
"learning_rate": 1.1159875838167915e-05,
"loss": 0.41063955426216125,
"step": 1862
},
{
"epoch": 2.721167883211679,
"grad_norm": 0.42578125,
"learning_rate": 1.1136266047413083e-05,
"loss": 0.15179383754730225,
"step": 1864
},
{
"epoch": 2.7240875912408757,
"grad_norm": 0.369140625,
"learning_rate": 1.111289343945088e-05,
"loss": 0.11223858594894409,
"step": 1866
},
{
"epoch": 2.727007299270073,
"grad_norm": 0.451171875,
"learning_rate": 1.1089758246582772e-05,
"loss": 0.44403815269470215,
"step": 1868
},
{
"epoch": 2.72992700729927,
"grad_norm": 0.50390625,
"learning_rate": 1.1066860698750556e-05,
"loss": 0.1581033617258072,
"step": 1870
},
{
"epoch": 2.732846715328467,
"grad_norm": 1.265625,
"learning_rate": 1.104420102353404e-05,
"loss": 0.36841726303100586,
"step": 1872
},
{
"epoch": 2.7357664233576644,
"grad_norm": 0.6796875,
"learning_rate": 1.1021779446148827e-05,
"loss": 0.1277724653482437,
"step": 1874
},
{
"epoch": 2.7386861313868613,
"grad_norm": 1.578125,
"learning_rate": 1.0999596189444048e-05,
"loss": 0.4650922417640686,
"step": 1876
},
{
"epoch": 2.7416058394160583,
"grad_norm": 0.796875,
"learning_rate": 1.0977651473900154e-05,
"loss": 0.7138586640357971,
"step": 1878
},
{
"epoch": 2.7445255474452557,
"grad_norm": 0.1162109375,
"learning_rate": 1.0955945517626712e-05,
"loss": 0.4540976881980896,
"step": 1880
},
{
"epoch": 2.7474452554744526,
"grad_norm": 0.66015625,
"learning_rate": 1.0934478536360268e-05,
"loss": 0.3492112159729004,
"step": 1882
},
{
"epoch": 2.7503649635036496,
"grad_norm": 0.52734375,
"learning_rate": 1.0913250743462172e-05,
"loss": 0.17992794513702393,
"step": 1884
},
{
"epoch": 2.753284671532847,
"grad_norm": 0.498046875,
"learning_rate": 1.0892262349916478e-05,
"loss": 0.11098943650722504,
"step": 1886
},
{
"epoch": 2.756204379562044,
"grad_norm": 0.89453125,
"learning_rate": 1.0871513564327818e-05,
"loss": 0.07393358647823334,
"step": 1888
},
{
"epoch": 2.759124087591241,
"grad_norm": 0.296875,
"learning_rate": 1.0851004592919381e-05,
"loss": 0.13731540739536285,
"step": 1890
},
{
"epoch": 2.7620437956204382,
"grad_norm": 0.439453125,
"learning_rate": 1.0830735639530804e-05,
"loss": 0.20814824104309082,
"step": 1892
},
{
"epoch": 2.764963503649635,
"grad_norm": 0.5234375,
"learning_rate": 1.0810706905616185e-05,
"loss": 0.1031702533364296,
"step": 1894
},
{
"epoch": 2.767883211678832,
"grad_norm": 3.25,
"learning_rate": 1.0790918590242073e-05,
"loss": 0.1332436501979828,
"step": 1896
},
{
"epoch": 2.770802919708029,
"grad_norm": 0.55859375,
"learning_rate": 1.0771370890085475e-05,
"loss": 0.43151581287384033,
"step": 1898
},
{
"epoch": 2.7737226277372264,
"grad_norm": 2.53125,
"learning_rate": 1.0752063999431919e-05,
"loss": 0.5219103097915649,
"step": 1900
},
{
"epoch": 2.7766423357664234,
"grad_norm": 0.26171875,
"learning_rate": 1.0732998110173518e-05,
"loss": 0.04137423634529114,
"step": 1902
},
{
"epoch": 2.7795620437956203,
"grad_norm": 0.87890625,
"learning_rate": 1.071417341180705e-05,
"loss": 0.1267162263393402,
"step": 1904
},
{
"epoch": 2.7824817518248173,
"grad_norm": 0.60546875,
"learning_rate": 1.0695590091432096e-05,
"loss": 0.2184256613254547,
"step": 1906
},
{
"epoch": 2.7854014598540147,
"grad_norm": 0.61328125,
"learning_rate": 1.0677248333749168e-05,
"loss": 0.1326974630355835,
"step": 1908
},
{
"epoch": 2.7883211678832116,
"grad_norm": 0.73046875,
"learning_rate": 1.065914832105787e-05,
"loss": 0.25122809410095215,
"step": 1910
},
{
"epoch": 2.7912408759124085,
"grad_norm": 0.6484375,
"learning_rate": 1.0641290233255086e-05,
"loss": 0.14174973964691162,
"step": 1912
},
{
"epoch": 2.794160583941606,
"grad_norm": 0.52734375,
"learning_rate": 1.0623674247833218e-05,
"loss": 0.05495617538690567,
"step": 1914
},
{
"epoch": 2.797080291970803,
"grad_norm": 0.65234375,
"learning_rate": 1.060630053987838e-05,
"loss": 0.15820512175559998,
"step": 1916
},
{
"epoch": 2.8,
"grad_norm": 0.90625,
"learning_rate": 1.0589169282068684e-05,
"loss": 0.15590894222259521,
"step": 1918
},
{
"epoch": 2.802919708029197,
"grad_norm": 1.2734375,
"learning_rate": 1.0572280644672525e-05,
"loss": 0.1760368049144745,
"step": 1920
},
{
"epoch": 2.805839416058394,
"grad_norm": 0.68359375,
"learning_rate": 1.055563479554688e-05,
"loss": 0.11797623336315155,
"step": 1922
},
{
"epoch": 2.808759124087591,
"grad_norm": 1.2109375,
"learning_rate": 1.053923190013563e-05,
"loss": 0.10800056159496307,
"step": 1924
},
{
"epoch": 2.8116788321167885,
"grad_norm": 0.5234375,
"learning_rate": 1.0523072121467941e-05,
"loss": 0.5103263854980469,
"step": 1926
},
{
"epoch": 2.8145985401459854,
"grad_norm": 0.59375,
"learning_rate": 1.0507155620156628e-05,
"loss": 0.233617901802063,
"step": 1928
},
{
"epoch": 2.8175182481751824,
"grad_norm": 0.373046875,
"learning_rate": 1.0491482554396559e-05,
"loss": 0.12097039818763733,
"step": 1930
},
{
"epoch": 2.8204379562043798,
"grad_norm": 0.50390625,
"learning_rate": 1.047605307996308e-05,
"loss": 0.16398653388023376,
"step": 1932
},
{
"epoch": 2.8233576642335767,
"grad_norm": 0.55859375,
"learning_rate": 1.0460867350210475e-05,
"loss": 0.29245248436927795,
"step": 1934
},
{
"epoch": 2.8262773722627736,
"grad_norm": 0.5703125,
"learning_rate": 1.0445925516070444e-05,
"loss": 0.4952959716320038,
"step": 1936
},
{
"epoch": 2.829197080291971,
"grad_norm": 0.474609375,
"learning_rate": 1.0431227726050582e-05,
"loss": 0.10879136621952057,
"step": 1938
},
{
"epoch": 2.832116788321168,
"grad_norm": 0.53515625,
"learning_rate": 1.0416774126232934e-05,
"loss": 0.20576564967632294,
"step": 1940
},
{
"epoch": 2.835036496350365,
"grad_norm": 0.376953125,
"learning_rate": 1.0402564860272515e-05,
"loss": 0.16286109387874603,
"step": 1942
},
{
"epoch": 2.8379562043795623,
"grad_norm": 0.388671875,
"learning_rate": 1.0388600069395904e-05,
"loss": 0.1414416879415512,
"step": 1944
},
{
"epoch": 2.8408759124087593,
"grad_norm": 0.6875,
"learning_rate": 1.0374879892399819e-05,
"loss": 0.05496169626712799,
"step": 1946
},
{
"epoch": 2.843795620437956,
"grad_norm": 0.671875,
"learning_rate": 1.036140446564976e-05,
"loss": 0.17887184023857117,
"step": 1948
},
{
"epoch": 2.846715328467153,
"grad_norm": 0.390625,
"learning_rate": 1.0348173923078633e-05,
"loss": 0.39219170808792114,
"step": 1950
},
{
"epoch": 2.8496350364963505,
"grad_norm": 0.482421875,
"learning_rate": 1.0335188396185437e-05,
"loss": 0.18130755424499512,
"step": 1952
},
{
"epoch": 2.8525547445255475,
"grad_norm": 1.09375,
"learning_rate": 1.0322448014033946e-05,
"loss": 0.4997798204421997,
"step": 1954
},
{
"epoch": 2.8554744525547444,
"grad_norm": 0.58984375,
"learning_rate": 1.030995290325142e-05,
"loss": 0.15554431080818176,
"step": 1956
},
{
"epoch": 2.8583941605839414,
"grad_norm": 0.494140625,
"learning_rate": 1.0297703188027368e-05,
"loss": 0.23963004350662231,
"step": 1958
},
{
"epoch": 2.8613138686131387,
"grad_norm": 0.259765625,
"learning_rate": 1.0285698990112294e-05,
"loss": 0.4507202208042145,
"step": 1960
},
{
"epoch": 2.8642335766423357,
"grad_norm": 0.54296875,
"learning_rate": 1.0273940428816499e-05,
"loss": 0.563029944896698,
"step": 1962
},
{
"epoch": 2.8671532846715326,
"grad_norm": 0.453125,
"learning_rate": 1.026242762100888e-05,
"loss": 0.48327958583831787,
"step": 1964
},
{
"epoch": 2.87007299270073,
"grad_norm": 0.453125,
"learning_rate": 1.0251160681115785e-05,
"loss": 0.5378194451332092,
"step": 1966
},
{
"epoch": 2.872992700729927,
"grad_norm": 0.25390625,
"learning_rate": 1.024013972111987e-05,
"loss": 0.18405146896839142,
"step": 1968
},
{
"epoch": 2.875912408759124,
"grad_norm": 1.28125,
"learning_rate": 1.0229364850558985e-05,
"loss": 0.4143378734588623,
"step": 1970
},
{
"epoch": 2.8788321167883213,
"grad_norm": 0.66796875,
"learning_rate": 1.021883617652508e-05,
"loss": 0.4933127760887146,
"step": 1972
},
{
"epoch": 2.8817518248175182,
"grad_norm": 0.55078125,
"learning_rate": 1.0208553803663148e-05,
"loss": 0.4699753522872925,
"step": 1974
},
{
"epoch": 2.884671532846715,
"grad_norm": 0.5625,
"learning_rate": 1.019851783417019e-05,
"loss": 0.21085891127586365,
"step": 1976
},
{
"epoch": 2.8875912408759126,
"grad_norm": 0.14453125,
"learning_rate": 1.0188728367794187e-05,
"loss": 0.027944084256887436,
"step": 1978
},
{
"epoch": 2.8905109489051095,
"grad_norm": 0.310546875,
"learning_rate": 1.0179185501833107e-05,
"loss": 0.1497136652469635,
"step": 1980
},
{
"epoch": 2.8934306569343065,
"grad_norm": 0.53515625,
"learning_rate": 1.0169889331133957e-05,
"loss": 0.2530011534690857,
"step": 1982
},
{
"epoch": 2.896350364963504,
"grad_norm": 0.8671875,
"learning_rate": 1.0160839948091817e-05,
"loss": 0.07604291290044785,
"step": 1984
},
{
"epoch": 2.899270072992701,
"grad_norm": 0.53515625,
"learning_rate": 1.015203744264894e-05,
"loss": 0.38429468870162964,
"step": 1986
},
{
"epoch": 2.9021897810218977,
"grad_norm": 0.0230712890625,
"learning_rate": 1.014348190229385e-05,
"loss": 0.12912079691886902,
"step": 1988
},
{
"epoch": 2.905109489051095,
"grad_norm": 0.546875,
"learning_rate": 1.013517341206047e-05,
"loss": 0.16505561769008636,
"step": 1990
},
{
"epoch": 2.908029197080292,
"grad_norm": 0.60546875,
"learning_rate": 1.012711205452728e-05,
"loss": 0.1626954823732376,
"step": 1992
},
{
"epoch": 2.910948905109489,
"grad_norm": 1.09375,
"learning_rate": 1.0119297909816503e-05,
"loss": 0.38482239842414856,
"step": 1994
},
{
"epoch": 2.9138686131386864,
"grad_norm": 0.9375,
"learning_rate": 1.0111731055593291e-05,
"loss": 0.2643647789955139,
"step": 1996
},
{
"epoch": 2.9167883211678833,
"grad_norm": 0.59765625,
"learning_rate": 1.0104411567064973e-05,
"loss": 0.05413287878036499,
"step": 1998
},
{
"epoch": 2.9197080291970803,
"grad_norm": 0.54296875,
"learning_rate": 1.0097339516980295e-05,
"loss": 0.21008609235286713,
"step": 2000
},
{
"epoch": 2.9226277372262772,
"grad_norm": 0.52734375,
"learning_rate": 1.0090514975628703e-05,
"loss": 0.20540976524353027,
"step": 2002
},
{
"epoch": 2.9255474452554746,
"grad_norm": 0.87890625,
"learning_rate": 1.0083938010839634e-05,
"loss": 0.5189849734306335,
"step": 2004
},
{
"epoch": 2.9284671532846716,
"grad_norm": 0.61328125,
"learning_rate": 1.007760868798186e-05,
"loss": 0.7012931108474731,
"step": 2006
},
{
"epoch": 2.9313868613138685,
"grad_norm": 0.515625,
"learning_rate": 1.0071527069962825e-05,
"loss": 0.24337317049503326,
"step": 2008
},
{
"epoch": 2.9343065693430654,
"grad_norm": 0.443359375,
"learning_rate": 1.0065693217228018e-05,
"loss": 0.15759816765785217,
"step": 2010
},
{
"epoch": 2.937226277372263,
"grad_norm": 0.455078125,
"learning_rate": 1.0060107187760375e-05,
"loss": 0.1873372197151184,
"step": 2012
},
{
"epoch": 2.9401459854014598,
"grad_norm": 0.49609375,
"learning_rate": 1.0054769037079724e-05,
"loss": 0.4337967038154602,
"step": 2014
},
{
"epoch": 2.9430656934306567,
"grad_norm": 0.470703125,
"learning_rate": 1.0049678818242187e-05,
"loss": 0.19185805320739746,
"step": 2016
},
{
"epoch": 2.945985401459854,
"grad_norm": 0.0986328125,
"learning_rate": 1.0044836581839697e-05,
"loss": 0.3550591468811035,
"step": 2018
},
{
"epoch": 2.948905109489051,
"grad_norm": 0.5234375,
"learning_rate": 1.0040242375999476e-05,
"loss": 0.4003986120223999,
"step": 2020
},
{
"epoch": 2.951824817518248,
"grad_norm": 0.5390625,
"learning_rate": 1.0035896246383552e-05,
"loss": 0.21828565001487732,
"step": 2022
},
{
"epoch": 2.9547445255474454,
"grad_norm": 1.1015625,
"learning_rate": 1.0031798236188314e-05,
"loss": 0.39733898639678955,
"step": 2024
},
{
"epoch": 2.9576642335766423,
"grad_norm": 2.25,
"learning_rate": 1.0027948386144085e-05,
"loss": 0.3748105764389038,
"step": 2026
},
{
"epoch": 2.9605839416058393,
"grad_norm": 0.5625,
"learning_rate": 1.0024346734514706e-05,
"loss": 0.42706161737442017,
"step": 2028
},
{
"epoch": 2.9635036496350367,
"grad_norm": 0.53125,
"learning_rate": 1.0020993317097167e-05,
"loss": 0.13745678961277008,
"step": 2030
},
{
"epoch": 2.9664233576642336,
"grad_norm": 0.54296875,
"learning_rate": 1.0017888167221238e-05,
"loss": 0.5219398736953735,
"step": 2032
},
{
"epoch": 2.9693430656934305,
"grad_norm": 0.49609375,
"learning_rate": 1.0015031315749157e-05,
"loss": 0.21247032284736633,
"step": 2034
},
{
"epoch": 2.972262773722628,
"grad_norm": 0.5546875,
"learning_rate": 1.0012422791075309e-05,
"loss": 0.13911724090576172,
"step": 2036
},
{
"epoch": 2.975182481751825,
"grad_norm": 1.2890625,
"learning_rate": 1.0010062619125943e-05,
"loss": 0.1428239345550537,
"step": 2038
},
{
"epoch": 2.978102189781022,
"grad_norm": 0.99609375,
"learning_rate": 1.0007950823358923e-05,
"loss": 0.20074939727783203,
"step": 2040
},
{
"epoch": 2.981021897810219,
"grad_norm": 1.03125,
"learning_rate": 1.0006087424763482e-05,
"loss": 0.5967737436294556,
"step": 2042
},
{
"epoch": 2.983941605839416,
"grad_norm": 0.875,
"learning_rate": 1.0004472441860033e-05,
"loss": 0.43237411975860596,
"step": 2044
},
{
"epoch": 2.986861313868613,
"grad_norm": 0.267578125,
"learning_rate": 1.0003105890699969e-05,
"loss": 0.01062680408358574,
"step": 2046
},
{
"epoch": 2.9897810218978105,
"grad_norm": 0.55859375,
"learning_rate": 1.0001987784865506e-05,
"loss": 0.538578450679779,
"step": 2048
},
{
"epoch": 2.9927007299270074,
"grad_norm": 0.5703125,
"learning_rate": 1.0001118135469548e-05,
"loss": 0.28526389598846436,
"step": 2050
},
{
"epoch": 2.9956204379562044,
"grad_norm": 6.21875,
"learning_rate": 1.0000496951155586e-05,
"loss": 0.3971728980541229,
"step": 2052
},
{
"epoch": 2.9985401459854013,
"grad_norm": 0.6484375,
"learning_rate": 1.0000124238097599e-05,
"loss": 0.07702690362930298,
"step": 2054
},
{
"epoch": 3.0,
"step": 2055,
"total_flos": 2.976816812190073e+18,
"train_loss": 0.8063934714925405,
"train_runtime": 19281.3988,
"train_samples_per_second": 1.705,
"train_steps_per_second": 0.107
}
],
"logging_steps": 2,
"max_steps": 2055,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 9999999,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.976816812190073e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}