9b-20 / trainer_state.json
furproxy's picture
Upload folder using huggingface_hub
2e0cc6f verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 1680,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0035714285714285713,
"grad_norm": 1.015763759613037,
"learning_rate": 2.3809523809523811e-07,
"loss": 1.6102378368377686,
"step": 2
},
{
"epoch": 0.007142857142857143,
"grad_norm": 2.673253297805786,
"learning_rate": 7.142857142857143e-07,
"loss": 2.1662302017211914,
"step": 4
},
{
"epoch": 0.010714285714285714,
"grad_norm": 1.0624771118164062,
"learning_rate": 1.1904761904761906e-06,
"loss": 1.9342565536499023,
"step": 6
},
{
"epoch": 0.014285714285714285,
"grad_norm": 0.48025915026664734,
"learning_rate": 1.6666666666666667e-06,
"loss": 1.6917049884796143,
"step": 8
},
{
"epoch": 0.017857142857142856,
"grad_norm": 0.8735950589179993,
"learning_rate": 2.1428571428571427e-06,
"loss": 2.5385754108428955,
"step": 10
},
{
"epoch": 0.02142857142857143,
"grad_norm": 0.611692488193512,
"learning_rate": 2.6190476190476192e-06,
"loss": 2.1326732635498047,
"step": 12
},
{
"epoch": 0.025,
"grad_norm": 1.3469291925430298,
"learning_rate": 3.0952380952380957e-06,
"loss": 2.2317585945129395,
"step": 14
},
{
"epoch": 0.02857142857142857,
"grad_norm": 0.6198003888130188,
"learning_rate": 3.5714285714285718e-06,
"loss": 1.9082640409469604,
"step": 16
},
{
"epoch": 0.03214285714285714,
"grad_norm": 0.7136287689208984,
"learning_rate": 4.047619047619048e-06,
"loss": 1.882780909538269,
"step": 18
},
{
"epoch": 0.03571428571428571,
"grad_norm": 1.2058864831924438,
"learning_rate": 4.523809523809524e-06,
"loss": 2.1093122959136963,
"step": 20
},
{
"epoch": 0.039285714285714285,
"grad_norm": 0.44349122047424316,
"learning_rate": 5e-06,
"loss": 1.7804787158966064,
"step": 22
},
{
"epoch": 0.04285714285714286,
"grad_norm": 1.3120089769363403,
"learning_rate": 5.476190476190477e-06,
"loss": 2.0871825218200684,
"step": 24
},
{
"epoch": 0.04642857142857143,
"grad_norm": 0.6059308052062988,
"learning_rate": 5.9523809523809525e-06,
"loss": 2.06781005859375,
"step": 26
},
{
"epoch": 0.05,
"grad_norm": 0.42657509446144104,
"learning_rate": 6.4285714285714295e-06,
"loss": 1.5501041412353516,
"step": 28
},
{
"epoch": 0.05357142857142857,
"grad_norm": 1.5947790145874023,
"learning_rate": 6.9047619047619055e-06,
"loss": 2.1851985454559326,
"step": 30
},
{
"epoch": 0.05714285714285714,
"grad_norm": 1.2271709442138672,
"learning_rate": 7.380952380952382e-06,
"loss": 2.1415982246398926,
"step": 32
},
{
"epoch": 0.060714285714285714,
"grad_norm": 0.4892466068267822,
"learning_rate": 7.857142857142858e-06,
"loss": 1.8040955066680908,
"step": 34
},
{
"epoch": 0.06428571428571428,
"grad_norm": 1.1410105228424072,
"learning_rate": 8.333333333333334e-06,
"loss": 1.7697616815567017,
"step": 36
},
{
"epoch": 0.06785714285714285,
"grad_norm": 0.6576052308082581,
"learning_rate": 8.80952380952381e-06,
"loss": 1.7375702857971191,
"step": 38
},
{
"epoch": 0.07142857142857142,
"grad_norm": 0.5573370456695557,
"learning_rate": 9.285714285714288e-06,
"loss": 1.9562886953353882,
"step": 40
},
{
"epoch": 0.075,
"grad_norm": 0.4374576508998871,
"learning_rate": 9.761904761904762e-06,
"loss": 2.0028505325317383,
"step": 42
},
{
"epoch": 0.07857142857142857,
"grad_norm": 1.975783348083496,
"learning_rate": 1.0238095238095238e-05,
"loss": 2.158412456512451,
"step": 44
},
{
"epoch": 0.08214285714285714,
"grad_norm": 0.34883615374565125,
"learning_rate": 1.0714285714285714e-05,
"loss": 1.8357443809509277,
"step": 46
},
{
"epoch": 0.08571428571428572,
"grad_norm": 0.507429838180542,
"learning_rate": 1.1190476190476192e-05,
"loss": 1.6925463676452637,
"step": 48
},
{
"epoch": 0.08928571428571429,
"grad_norm": 0.5762679576873779,
"learning_rate": 1.1666666666666668e-05,
"loss": 1.9667983055114746,
"step": 50
},
{
"epoch": 0.09285714285714286,
"grad_norm": 0.5237306952476501,
"learning_rate": 1.2142857142857142e-05,
"loss": 1.739395022392273,
"step": 52
},
{
"epoch": 0.09642857142857143,
"grad_norm": 1.05526602268219,
"learning_rate": 1.261904761904762e-05,
"loss": 1.9076595306396484,
"step": 54
},
{
"epoch": 0.1,
"grad_norm": 0.5703083872795105,
"learning_rate": 1.3095238095238096e-05,
"loss": 1.6693158149719238,
"step": 56
},
{
"epoch": 0.10357142857142858,
"grad_norm": 0.36678144335746765,
"learning_rate": 1.3571428571428574e-05,
"loss": 1.9416877031326294,
"step": 58
},
{
"epoch": 0.10714285714285714,
"grad_norm": 0.5519128441810608,
"learning_rate": 1.4047619047619048e-05,
"loss": 1.5721981525421143,
"step": 60
},
{
"epoch": 0.11071428571428571,
"grad_norm": 1.7103147506713867,
"learning_rate": 1.4523809523809524e-05,
"loss": 1.9566872119903564,
"step": 62
},
{
"epoch": 0.11428571428571428,
"grad_norm": 0.46710196137428284,
"learning_rate": 1.5000000000000002e-05,
"loss": 1.7038791179656982,
"step": 64
},
{
"epoch": 0.11785714285714285,
"grad_norm": 0.5537970066070557,
"learning_rate": 1.5476190476190476e-05,
"loss": 1.7286560535430908,
"step": 66
},
{
"epoch": 0.12142857142857143,
"grad_norm": 0.4240347445011139,
"learning_rate": 1.5952380952380954e-05,
"loss": 1.4021289348602295,
"step": 68
},
{
"epoch": 0.125,
"grad_norm": 0.5624739527702332,
"learning_rate": 1.642857142857143e-05,
"loss": 1.7000582218170166,
"step": 70
},
{
"epoch": 0.12857142857142856,
"grad_norm": 0.929057776927948,
"learning_rate": 1.6904761904761906e-05,
"loss": 1.57151198387146,
"step": 72
},
{
"epoch": 0.13214285714285715,
"grad_norm": 0.45938676595687866,
"learning_rate": 1.7380952380952384e-05,
"loss": 1.7509145736694336,
"step": 74
},
{
"epoch": 0.1357142857142857,
"grad_norm": 0.7960886359214783,
"learning_rate": 1.785714285714286e-05,
"loss": 1.627264142036438,
"step": 76
},
{
"epoch": 0.1392857142857143,
"grad_norm": 0.6913440227508545,
"learning_rate": 1.8333333333333333e-05,
"loss": 1.5597522258758545,
"step": 78
},
{
"epoch": 0.14285714285714285,
"grad_norm": 0.3351443111896515,
"learning_rate": 1.880952380952381e-05,
"loss": 1.5047800540924072,
"step": 80
},
{
"epoch": 0.14642857142857144,
"grad_norm": 4.245250701904297,
"learning_rate": 1.928571428571429e-05,
"loss": 1.6616361141204834,
"step": 82
},
{
"epoch": 0.15,
"grad_norm": 0.8213376402854919,
"learning_rate": 1.9761904761904763e-05,
"loss": 1.633697509765625,
"step": 84
},
{
"epoch": 0.15357142857142858,
"grad_norm": 0.5915255546569824,
"learning_rate": 1.9999982564020695e-05,
"loss": 1.3151729106903076,
"step": 86
},
{
"epoch": 0.15714285714285714,
"grad_norm": 0.5589431524276733,
"learning_rate": 1.9999843076591598e-05,
"loss": 1.5256870985031128,
"step": 88
},
{
"epoch": 0.16071428571428573,
"grad_norm": 0.5508841872215271,
"learning_rate": 1.9999564103895265e-05,
"loss": 1.4112975597381592,
"step": 90
},
{
"epoch": 0.16428571428571428,
"grad_norm": 0.6648878455162048,
"learning_rate": 1.9999145650255392e-05,
"loss": 1.4721081256866455,
"step": 92
},
{
"epoch": 0.16785714285714284,
"grad_norm": 0.7325057983398438,
"learning_rate": 1.999858772215744e-05,
"loss": 1.45548415184021,
"step": 94
},
{
"epoch": 0.17142857142857143,
"grad_norm": 4.314115524291992,
"learning_rate": 1.9997890328248536e-05,
"loss": 1.6468373537063599,
"step": 96
},
{
"epoch": 0.175,
"grad_norm": 0.4537746012210846,
"learning_rate": 1.9997053479337327e-05,
"loss": 1.4209429025650024,
"step": 98
},
{
"epoch": 0.17857142857142858,
"grad_norm": 0.35222765803337097,
"learning_rate": 1.9996077188393826e-05,
"loss": 1.390677809715271,
"step": 100
},
{
"epoch": 0.18214285714285713,
"grad_norm": 1.7469472885131836,
"learning_rate": 1.9994961470549216e-05,
"loss": 1.7277623414993286,
"step": 102
},
{
"epoch": 0.18571428571428572,
"grad_norm": 0.1474483162164688,
"learning_rate": 1.999370634309559e-05,
"loss": 1.1081668138504028,
"step": 104
},
{
"epoch": 0.18928571428571428,
"grad_norm": 0.46639060974121094,
"learning_rate": 1.999231182548571e-05,
"loss": 1.410254955291748,
"step": 106
},
{
"epoch": 0.19285714285714287,
"grad_norm": 0.43072256445884705,
"learning_rate": 1.9990777939332697e-05,
"loss": 1.404090404510498,
"step": 108
},
{
"epoch": 0.19642857142857142,
"grad_norm": 0.5892950892448425,
"learning_rate": 1.998910470840969e-05,
"loss": 1.591571569442749,
"step": 110
},
{
"epoch": 0.2,
"grad_norm": 0.7321836352348328,
"learning_rate": 1.9987292158649477e-05,
"loss": 1.7062772512435913,
"step": 112
},
{
"epoch": 0.20357142857142857,
"grad_norm": 0.5677797794342041,
"learning_rate": 1.9985340318144104e-05,
"loss": 1.6029880046844482,
"step": 114
},
{
"epoch": 0.20714285714285716,
"grad_norm": 0.5012320280075073,
"learning_rate": 1.998324921714443e-05,
"loss": 1.6100475788116455,
"step": 116
},
{
"epoch": 0.21071428571428572,
"grad_norm": 0.3957848846912384,
"learning_rate": 1.9981018888059666e-05,
"loss": 1.4384129047393799,
"step": 118
},
{
"epoch": 0.21428571428571427,
"grad_norm": 0.5229449272155762,
"learning_rate": 1.997864936545686e-05,
"loss": 1.3900649547576904,
"step": 120
},
{
"epoch": 0.21785714285714286,
"grad_norm": 1.3295955657958984,
"learning_rate": 1.997614068606038e-05,
"loss": 1.7176506519317627,
"step": 122
},
{
"epoch": 0.22142857142857142,
"grad_norm": 0.44528570771217346,
"learning_rate": 1.997349288875132e-05,
"loss": 1.4366806745529175,
"step": 124
},
{
"epoch": 0.225,
"grad_norm": 0.39040693640708923,
"learning_rate": 1.997070601456693e-05,
"loss": 1.3515652418136597,
"step": 126
},
{
"epoch": 0.22857142857142856,
"grad_norm": 0.5782378315925598,
"learning_rate": 1.9967780106699938e-05,
"loss": 1.352698564529419,
"step": 128
},
{
"epoch": 0.23214285714285715,
"grad_norm": 0.4463367760181427,
"learning_rate": 1.9964715210497926e-05,
"loss": 1.2373923063278198,
"step": 130
},
{
"epoch": 0.2357142857142857,
"grad_norm": 0.4424625635147095,
"learning_rate": 1.996151137346259e-05,
"loss": 1.4218852519989014,
"step": 132
},
{
"epoch": 0.2392857142857143,
"grad_norm": 0.39771443605422974,
"learning_rate": 1.9958168645249036e-05,
"loss": 1.3959071636199951,
"step": 134
},
{
"epoch": 0.24285714285714285,
"grad_norm": 0.995601236820221,
"learning_rate": 1.995468707766497e-05,
"loss": 1.5231602191925049,
"step": 136
},
{
"epoch": 0.24642857142857144,
"grad_norm": 0.8687110543251038,
"learning_rate": 1.995106672466994e-05,
"loss": 1.5624220371246338,
"step": 138
},
{
"epoch": 0.25,
"grad_norm": 2.4341928958892822,
"learning_rate": 1.9947307642374466e-05,
"loss": 1.1912024021148682,
"step": 140
},
{
"epoch": 0.25357142857142856,
"grad_norm": 0.3811090886592865,
"learning_rate": 1.9943409889039186e-05,
"loss": 1.145674467086792,
"step": 142
},
{
"epoch": 0.2571428571428571,
"grad_norm": 0.3321725130081177,
"learning_rate": 1.9939373525073946e-05,
"loss": 1.5421075820922852,
"step": 144
},
{
"epoch": 0.26071428571428573,
"grad_norm": 0.9177084565162659,
"learning_rate": 1.9935198613036877e-05,
"loss": 1.5951645374298096,
"step": 146
},
{
"epoch": 0.2642857142857143,
"grad_norm": 0.4631420969963074,
"learning_rate": 1.9930885217633408e-05,
"loss": 1.4165486097335815,
"step": 148
},
{
"epoch": 0.26785714285714285,
"grad_norm": 0.31056374311447144,
"learning_rate": 1.992643340571527e-05,
"loss": 1.337796688079834,
"step": 150
},
{
"epoch": 0.2714285714285714,
"grad_norm": 0.7542070746421814,
"learning_rate": 1.992184324627946e-05,
"loss": 1.378330111503601,
"step": 152
},
{
"epoch": 0.275,
"grad_norm": 0.32567477226257324,
"learning_rate": 1.9917114810467187e-05,
"loss": 1.3354138135910034,
"step": 154
},
{
"epoch": 0.2785714285714286,
"grad_norm": 0.39338260889053345,
"learning_rate": 1.9912248171562732e-05,
"loss": 1.454702615737915,
"step": 156
},
{
"epoch": 0.28214285714285714,
"grad_norm": 1.2659757137298584,
"learning_rate": 1.9907243404992357e-05,
"loss": 1.430185317993164,
"step": 158
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.7528992891311646,
"learning_rate": 1.9902100588323095e-05,
"loss": 1.4888527393341064,
"step": 160
},
{
"epoch": 0.2892857142857143,
"grad_norm": 0.3230721652507782,
"learning_rate": 1.9896819801261575e-05,
"loss": 1.372198462486267,
"step": 162
},
{
"epoch": 0.29285714285714287,
"grad_norm": 0.5240757465362549,
"learning_rate": 1.9891401125652788e-05,
"loss": 1.3045603036880493,
"step": 164
},
{
"epoch": 0.29642857142857143,
"grad_norm": 0.41186925768852234,
"learning_rate": 1.988584464547879e-05,
"loss": 1.3587464094161987,
"step": 166
},
{
"epoch": 0.3,
"grad_norm": 0.5823759436607361,
"learning_rate": 1.9880150446857435e-05,
"loss": 1.2455697059631348,
"step": 168
},
{
"epoch": 0.30357142857142855,
"grad_norm": 0.6033863425254822,
"learning_rate": 1.987431861804102e-05,
"loss": 1.5122787952423096,
"step": 170
},
{
"epoch": 0.30714285714285716,
"grad_norm": 0.5195621848106384,
"learning_rate": 1.9868349249414918e-05,
"loss": 1.5619277954101562,
"step": 172
},
{
"epoch": 0.3107142857142857,
"grad_norm": 0.3413119614124298,
"learning_rate": 1.9862242433496185e-05,
"loss": 1.2759490013122559,
"step": 174
},
{
"epoch": 0.3142857142857143,
"grad_norm": 0.43644359707832336,
"learning_rate": 1.9855998264932118e-05,
"loss": 1.525347352027893,
"step": 176
},
{
"epoch": 0.31785714285714284,
"grad_norm": 2.2331385612487793,
"learning_rate": 1.9849616840498807e-05,
"loss": 1.4077792167663574,
"step": 178
},
{
"epoch": 0.32142857142857145,
"grad_norm": 1.2636653184890747,
"learning_rate": 1.9843098259099597e-05,
"loss": 1.494966745376587,
"step": 180
},
{
"epoch": 0.325,
"grad_norm": 1.3332237005233765,
"learning_rate": 1.9836442621763593e-05,
"loss": 1.2962632179260254,
"step": 182
},
{
"epoch": 0.32857142857142857,
"grad_norm": 0.9052379131317139,
"learning_rate": 1.982965003164408e-05,
"loss": 1.4094679355621338,
"step": 184
},
{
"epoch": 0.33214285714285713,
"grad_norm": 0.3790226876735687,
"learning_rate": 1.982272059401692e-05,
"loss": 1.308838129043579,
"step": 186
},
{
"epoch": 0.3357142857142857,
"grad_norm": 0.5489211678504944,
"learning_rate": 1.9815654416278924e-05,
"loss": 1.2358474731445312,
"step": 188
},
{
"epoch": 0.3392857142857143,
"grad_norm": 0.32167771458625793,
"learning_rate": 1.980845160794619e-05,
"loss": 1.24757719039917,
"step": 190
},
{
"epoch": 0.34285714285714286,
"grad_norm": 0.5797743201255798,
"learning_rate": 1.9801112280652406e-05,
"loss": 0.9474111795425415,
"step": 192
},
{
"epoch": 0.3464285714285714,
"grad_norm": 0.5825701951980591,
"learning_rate": 1.979363654814711e-05,
"loss": 1.6204115152359009,
"step": 194
},
{
"epoch": 0.35,
"grad_norm": 0.2390112429857254,
"learning_rate": 1.9786024526293943e-05,
"loss": 1.2390871047973633,
"step": 196
},
{
"epoch": 0.3535714285714286,
"grad_norm": 0.49600404500961304,
"learning_rate": 1.9778276333068833e-05,
"loss": 1.4131810665130615,
"step": 198
},
{
"epoch": 0.35714285714285715,
"grad_norm": 0.678787350654602,
"learning_rate": 1.977039208855819e-05,
"loss": 1.23861563205719,
"step": 200
},
{
"epoch": 0.3607142857142857,
"grad_norm": 0.8061485290527344,
"learning_rate": 1.9762371914957027e-05,
"loss": 1.4984278678894043,
"step": 202
},
{
"epoch": 0.36428571428571427,
"grad_norm": 1.1672908067703247,
"learning_rate": 1.9754215936567077e-05,
"loss": 1.346609354019165,
"step": 204
},
{
"epoch": 0.3678571428571429,
"grad_norm": 0.42324623465538025,
"learning_rate": 1.9745924279794853e-05,
"loss": 1.4815120697021484,
"step": 206
},
{
"epoch": 0.37142857142857144,
"grad_norm": 1.4115076065063477,
"learning_rate": 1.97374970731497e-05,
"loss": 1.2184674739837646,
"step": 208
},
{
"epoch": 0.375,
"grad_norm": 0.3797534704208374,
"learning_rate": 1.9728934447241815e-05,
"loss": 1.419222116470337,
"step": 210
},
{
"epoch": 0.37857142857142856,
"grad_norm": 0.49382659792900085,
"learning_rate": 1.9720236534780184e-05,
"loss": 1.3580292463302612,
"step": 212
},
{
"epoch": 0.3821428571428571,
"grad_norm": 0.838750958442688,
"learning_rate": 1.971140347057057e-05,
"loss": 1.3154736757278442,
"step": 214
},
{
"epoch": 0.38571428571428573,
"grad_norm": 0.43846485018730164,
"learning_rate": 1.970243539151339e-05,
"loss": 1.4407868385314941,
"step": 216
},
{
"epoch": 0.3892857142857143,
"grad_norm": 0.4752633571624756,
"learning_rate": 1.9693332436601616e-05,
"loss": 1.6369309425354004,
"step": 218
},
{
"epoch": 0.39285714285714285,
"grad_norm": 0.7840119004249573,
"learning_rate": 1.968409474691861e-05,
"loss": 1.2841938734054565,
"step": 220
},
{
"epoch": 0.3964285714285714,
"grad_norm": 0.3010779917240143,
"learning_rate": 1.967472246563593e-05,
"loss": 1.5273520946502686,
"step": 222
},
{
"epoch": 0.4,
"grad_norm": 0.37263405323028564,
"learning_rate": 1.966521573801113e-05,
"loss": 1.2460472583770752,
"step": 224
},
{
"epoch": 0.4035714285714286,
"grad_norm": 0.2715803384780884,
"learning_rate": 1.9655574711385497e-05,
"loss": 1.2670462131500244,
"step": 226
},
{
"epoch": 0.40714285714285714,
"grad_norm": 0.7070139646530151,
"learning_rate": 1.9645799535181767e-05,
"loss": 1.363389253616333,
"step": 228
},
{
"epoch": 0.4107142857142857,
"grad_norm": 0.6753621101379395,
"learning_rate": 1.9635890360901805e-05,
"loss": 1.5315601825714111,
"step": 230
},
{
"epoch": 0.4142857142857143,
"grad_norm": 0.5578319430351257,
"learning_rate": 1.9625847342124278e-05,
"loss": 1.2386059761047363,
"step": 232
},
{
"epoch": 0.41785714285714287,
"grad_norm": 0.5665930509567261,
"learning_rate": 1.961567063450224e-05,
"loss": 1.4771205186843872,
"step": 234
},
{
"epoch": 0.42142857142857143,
"grad_norm": 0.3672884404659271,
"learning_rate": 1.960536039576076e-05,
"loss": 1.2611351013183594,
"step": 236
},
{
"epoch": 0.425,
"grad_norm": 0.37258124351501465,
"learning_rate": 1.959491678569444e-05,
"loss": 1.3256354331970215,
"step": 238
},
{
"epoch": 0.42857142857142855,
"grad_norm": 0.5384905934333801,
"learning_rate": 1.958433996616497e-05,
"loss": 1.2469019889831543,
"step": 240
},
{
"epoch": 0.43214285714285716,
"grad_norm": 0.7136536240577698,
"learning_rate": 1.957363010109859e-05,
"loss": 1.1012486219406128,
"step": 242
},
{
"epoch": 0.4357142857142857,
"grad_norm": 0.6285988092422485,
"learning_rate": 1.9562787356483573e-05,
"loss": 1.3433374166488647,
"step": 244
},
{
"epoch": 0.4392857142857143,
"grad_norm": 1.8126535415649414,
"learning_rate": 1.9551811900367642e-05,
"loss": 1.2772047519683838,
"step": 246
},
{
"epoch": 0.44285714285714284,
"grad_norm": 0.7285305857658386,
"learning_rate": 1.954070390285537e-05,
"loss": 1.4760041236877441,
"step": 248
},
{
"epoch": 0.44642857142857145,
"grad_norm": 0.685204803943634,
"learning_rate": 1.9529463536105525e-05,
"loss": 1.436897873878479,
"step": 250
},
{
"epoch": 0.45,
"grad_norm": 0.7495101094245911,
"learning_rate": 1.951809097432844e-05,
"loss": 1.3357012271881104,
"step": 252
},
{
"epoch": 0.45357142857142857,
"grad_norm": 0.39518865942955017,
"learning_rate": 1.9506586393783278e-05,
"loss": 1.3316457271575928,
"step": 254
},
{
"epoch": 0.45714285714285713,
"grad_norm": 0.3139326870441437,
"learning_rate": 1.949494997277531e-05,
"loss": 1.2338916063308716,
"step": 256
},
{
"epoch": 0.4607142857142857,
"grad_norm": 0.7414034008979797,
"learning_rate": 1.948318189165316e-05,
"loss": 0.9024492502212524,
"step": 258
},
{
"epoch": 0.4642857142857143,
"grad_norm": 0.31645119190216064,
"learning_rate": 1.9471282332805996e-05,
"loss": 1.4956551790237427,
"step": 260
},
{
"epoch": 0.46785714285714286,
"grad_norm": 0.40859320759773254,
"learning_rate": 1.9459251480660726e-05,
"loss": 1.5328614711761475,
"step": 262
},
{
"epoch": 0.4714285714285714,
"grad_norm": 0.5480664372444153,
"learning_rate": 1.944708952167911e-05,
"loss": 1.263709306716919,
"step": 264
},
{
"epoch": 0.475,
"grad_norm": 0.4685518741607666,
"learning_rate": 1.9434796644354885e-05,
"loss": 1.2509859800338745,
"step": 266
},
{
"epoch": 0.4785714285714286,
"grad_norm": 0.4723505973815918,
"learning_rate": 1.942237303921086e-05,
"loss": 1.1997942924499512,
"step": 268
},
{
"epoch": 0.48214285714285715,
"grad_norm": 0.49579185247421265,
"learning_rate": 1.9409818898795924e-05,
"loss": 1.4426811933517456,
"step": 270
},
{
"epoch": 0.4857142857142857,
"grad_norm": 0.7510737776756287,
"learning_rate": 1.93971344176821e-05,
"loss": 1.2704529762268066,
"step": 272
},
{
"epoch": 0.48928571428571427,
"grad_norm": 0.4258019030094147,
"learning_rate": 1.9384319792461513e-05,
"loss": 1.2604095935821533,
"step": 274
},
{
"epoch": 0.4928571428571429,
"grad_norm": 0.7846337556838989,
"learning_rate": 1.9371375221743333e-05,
"loss": 1.2763457298278809,
"step": 276
},
{
"epoch": 0.49642857142857144,
"grad_norm": 0.6551533341407776,
"learning_rate": 1.9358300906150715e-05,
"loss": 1.4955462217330933,
"step": 278
},
{
"epoch": 0.5,
"grad_norm": 0.39320385456085205,
"learning_rate": 1.934509704831768e-05,
"loss": 1.305250644683838,
"step": 280
},
{
"epoch": 0.5035714285714286,
"grad_norm": 0.4256862998008728,
"learning_rate": 1.9331763852885988e-05,
"loss": 1.2777750492095947,
"step": 282
},
{
"epoch": 0.5071428571428571,
"grad_norm": 1.6539260149002075,
"learning_rate": 1.931830152650193e-05,
"loss": 1.3221278190612793,
"step": 284
},
{
"epoch": 0.5107142857142857,
"grad_norm": 0.7155967950820923,
"learning_rate": 1.9304710277813182e-05,
"loss": 1.5129222869873047,
"step": 286
},
{
"epoch": 0.5142857142857142,
"grad_norm": 0.4225437343120575,
"learning_rate": 1.929099031746551e-05,
"loss": 1.4677280187606812,
"step": 288
},
{
"epoch": 0.5178571428571429,
"grad_norm": 1.5834101438522339,
"learning_rate": 1.9277141858099552e-05,
"loss": 1.324265480041504,
"step": 290
},
{
"epoch": 0.5214285714285715,
"grad_norm": 0.42641639709472656,
"learning_rate": 1.9263165114347503e-05,
"loss": 1.5003204345703125,
"step": 292
},
{
"epoch": 0.525,
"grad_norm": 0.32519879937171936,
"learning_rate": 1.924906030282979e-05,
"loss": 1.2406567335128784,
"step": 294
},
{
"epoch": 0.5285714285714286,
"grad_norm": 0.8007866144180298,
"learning_rate": 1.9234827642151705e-05,
"loss": 1.5307186841964722,
"step": 296
},
{
"epoch": 0.5321428571428571,
"grad_norm": 0.2881964147090912,
"learning_rate": 1.922046735290004e-05,
"loss": 1.3302806615829468,
"step": 298
},
{
"epoch": 0.5357142857142857,
"grad_norm": 0.39077696204185486,
"learning_rate": 1.9205979657639658e-05,
"loss": 1.3356809616088867,
"step": 300
},
{
"epoch": 0.5392857142857143,
"grad_norm": 2.1859304904937744,
"learning_rate": 1.919136478091003e-05,
"loss": 1.58237886428833,
"step": 302
},
{
"epoch": 0.5428571428571428,
"grad_norm": 0.5423356890678406,
"learning_rate": 1.9176622949221776e-05,
"loss": 1.2764296531677246,
"step": 304
},
{
"epoch": 0.5464285714285714,
"grad_norm": 0.6123110055923462,
"learning_rate": 1.9161754391053127e-05,
"loss": 1.3649842739105225,
"step": 306
},
{
"epoch": 0.55,
"grad_norm": 0.40666741132736206,
"learning_rate": 1.9146759336846418e-05,
"loss": 1.3004515171051025,
"step": 308
},
{
"epoch": 0.5535714285714286,
"grad_norm": 0.4282005727291107,
"learning_rate": 1.91316380190045e-05,
"loss": 1.4622749090194702,
"step": 310
},
{
"epoch": 0.5571428571428572,
"grad_norm": 0.7790765166282654,
"learning_rate": 1.911639067188713e-05,
"loss": 1.2346100807189941,
"step": 312
},
{
"epoch": 0.5607142857142857,
"grad_norm": 0.8703919649124146,
"learning_rate": 1.9101017531807344e-05,
"loss": 1.2121374607086182,
"step": 314
},
{
"epoch": 0.5642857142857143,
"grad_norm": 0.6184125542640686,
"learning_rate": 1.9085518837027812e-05,
"loss": 1.2770280838012695,
"step": 316
},
{
"epoch": 0.5678571428571428,
"grad_norm": 1.1969032287597656,
"learning_rate": 1.9069894827757112e-05,
"loss": 1.2650474309921265,
"step": 318
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.6322106719017029,
"learning_rate": 1.905414574614604e-05,
"loss": 1.2580130100250244,
"step": 320
},
{
"epoch": 0.575,
"grad_norm": 0.24475856125354767,
"learning_rate": 1.9038271836283826e-05,
"loss": 1.1550788879394531,
"step": 322
},
{
"epoch": 0.5785714285714286,
"grad_norm": 0.3760751783847809,
"learning_rate": 1.9022273344194388e-05,
"loss": 1.4843223094940186,
"step": 324
},
{
"epoch": 0.5821428571428572,
"grad_norm": 0.46275073289871216,
"learning_rate": 1.9006150517832482e-05,
"loss": 1.448726773262024,
"step": 326
},
{
"epoch": 0.5857142857142857,
"grad_norm": 0.4800632894039154,
"learning_rate": 1.8989903607079885e-05,
"loss": 1.2668516635894775,
"step": 328
},
{
"epoch": 0.5892857142857143,
"grad_norm": 0.3428266644477844,
"learning_rate": 1.8973532863741504e-05,
"loss": 1.222381353378296,
"step": 330
},
{
"epoch": 0.5928571428571429,
"grad_norm": 0.4723515510559082,
"learning_rate": 1.895703854154149e-05,
"loss": 1.3355088233947754,
"step": 332
},
{
"epoch": 0.5964285714285714,
"grad_norm": 0.3576123118400574,
"learning_rate": 1.894042089611929e-05,
"loss": 1.3003771305084229,
"step": 334
},
{
"epoch": 0.6,
"grad_norm": 0.3541733920574188,
"learning_rate": 1.89236801850257e-05,
"loss": 1.207617998123169,
"step": 336
},
{
"epoch": 0.6035714285714285,
"grad_norm": 0.6847231984138489,
"learning_rate": 1.8906816667718854e-05,
"loss": 1.2353670597076416,
"step": 338
},
{
"epoch": 0.6071428571428571,
"grad_norm": 0.28425782918930054,
"learning_rate": 1.8889830605560234e-05,
"loss": 1.3947781324386597,
"step": 340
},
{
"epoch": 0.6107142857142858,
"grad_norm": 0.6358458995819092,
"learning_rate": 1.8872722261810576e-05,
"loss": 1.209977626800537,
"step": 342
},
{
"epoch": 0.6142857142857143,
"grad_norm": 0.5836315155029297,
"learning_rate": 1.8855491901625835e-05,
"loss": 1.2867995500564575,
"step": 344
},
{
"epoch": 0.6178571428571429,
"grad_norm": 0.5231335163116455,
"learning_rate": 1.883813979205304e-05,
"loss": 1.2911901473999023,
"step": 346
},
{
"epoch": 0.6214285714285714,
"grad_norm": 0.43874913454055786,
"learning_rate": 1.8820666202026172e-05,
"loss": 1.2187429666519165,
"step": 348
},
{
"epoch": 0.625,
"grad_norm": 0.7828032970428467,
"learning_rate": 1.8803071402361995e-05,
"loss": 1.492908000946045,
"step": 350
},
{
"epoch": 0.6285714285714286,
"grad_norm": 0.5225529074668884,
"learning_rate": 1.8785355665755864e-05,
"loss": 1.5098028182983398,
"step": 352
},
{
"epoch": 0.6321428571428571,
"grad_norm": 0.47736281156539917,
"learning_rate": 1.8767519266777473e-05,
"loss": 1.176377296447754,
"step": 354
},
{
"epoch": 0.6357142857142857,
"grad_norm": 0.27933818101882935,
"learning_rate": 1.8749562481866632e-05,
"loss": 1.3661816120147705,
"step": 356
},
{
"epoch": 0.6392857142857142,
"grad_norm": 0.9762703776359558,
"learning_rate": 1.8731485589328968e-05,
"loss": 1.6452014446258545,
"step": 358
},
{
"epoch": 0.6428571428571429,
"grad_norm": 0.41242146492004395,
"learning_rate": 1.8713288869331608e-05,
"loss": 1.4132412672042847,
"step": 360
},
{
"epoch": 0.6464285714285715,
"grad_norm": 0.4835861623287201,
"learning_rate": 1.8694972603898834e-05,
"loss": 1.2498993873596191,
"step": 362
},
{
"epoch": 0.65,
"grad_norm": 0.26400306820869446,
"learning_rate": 1.867653707690774e-05,
"loss": 1.52461576461792,
"step": 364
},
{
"epoch": 0.6535714285714286,
"grad_norm": 0.38874751329421997,
"learning_rate": 1.8657982574083784e-05,
"loss": 1.5055546760559082,
"step": 366
},
{
"epoch": 0.6571428571428571,
"grad_norm": 0.48226460814476013,
"learning_rate": 1.863930938299641e-05,
"loss": 1.2804465293884277,
"step": 368
},
{
"epoch": 0.6607142857142857,
"grad_norm": 0.5018228888511658,
"learning_rate": 1.862051779305456e-05,
"loss": 1.2952604293823242,
"step": 370
},
{
"epoch": 0.6642857142857143,
"grad_norm": 0.7374165058135986,
"learning_rate": 1.8601608095502186e-05,
"loss": 1.203336238861084,
"step": 372
},
{
"epoch": 0.6678571428571428,
"grad_norm": 0.48408135771751404,
"learning_rate": 1.8582580583413762e-05,
"loss": 1.3007687330245972,
"step": 374
},
{
"epoch": 0.6714285714285714,
"grad_norm": 0.4113704562187195,
"learning_rate": 1.8563435551689714e-05,
"loss": 1.354430913925171,
"step": 376
},
{
"epoch": 0.675,
"grad_norm": 1.2474658489227295,
"learning_rate": 1.8544173297051873e-05,
"loss": 0.8642697334289551,
"step": 378
},
{
"epoch": 0.6785714285714286,
"grad_norm": 0.4104273021221161,
"learning_rate": 1.852479411803886e-05,
"loss": 1.2896859645843506,
"step": 380
},
{
"epoch": 0.6821428571428572,
"grad_norm": 0.5409207940101624,
"learning_rate": 1.850529831500146e-05,
"loss": 1.4960516691207886,
"step": 382
},
{
"epoch": 0.6857142857142857,
"grad_norm": 0.48803240060806274,
"learning_rate": 1.8485686190097975e-05,
"loss": 1.7281725406646729,
"step": 384
},
{
"epoch": 0.6892857142857143,
"grad_norm": 0.9719665050506592,
"learning_rate": 1.8465958047289535e-05,
"loss": 1.2362210750579834,
"step": 386
},
{
"epoch": 0.6928571428571428,
"grad_norm": 0.3114999234676361,
"learning_rate": 1.8446114192335393e-05,
"loss": 1.2179937362670898,
"step": 388
},
{
"epoch": 0.6964285714285714,
"grad_norm": 0.3131682872772217,
"learning_rate": 1.8426154932788176e-05,
"loss": 1.2936997413635254,
"step": 390
},
{
"epoch": 0.7,
"grad_norm": 0.32079121470451355,
"learning_rate": 1.8406080577989132e-05,
"loss": 1.2510591745376587,
"step": 392
},
{
"epoch": 0.7035714285714286,
"grad_norm": 0.9477353692054749,
"learning_rate": 1.8385891439063325e-05,
"loss": 1.2309963703155518,
"step": 394
},
{
"epoch": 0.7071428571428572,
"grad_norm": 0.4169057309627533,
"learning_rate": 1.8365587828914804e-05,
"loss": 1.4043102264404297,
"step": 396
},
{
"epoch": 0.7107142857142857,
"grad_norm": 1.3407492637634277,
"learning_rate": 1.834517006222179e-05,
"loss": 1.486006259918213,
"step": 398
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.4785768389701843,
"learning_rate": 1.8324638455431755e-05,
"loss": 1.158205270767212,
"step": 400
},
{
"epoch": 0.7178571428571429,
"grad_norm": 3.806015729904175,
"learning_rate": 1.8303993326756543e-05,
"loss": 1.4163193702697754,
"step": 402
},
{
"epoch": 0.7214285714285714,
"grad_norm": 0.3417515158653259,
"learning_rate": 1.8283234996167434e-05,
"loss": 1.2975351810455322,
"step": 404
},
{
"epoch": 0.725,
"grad_norm": 1.0901787281036377,
"learning_rate": 1.8262363785390177e-05,
"loss": 1.4190306663513184,
"step": 406
},
{
"epoch": 0.7285714285714285,
"grad_norm": 0.4423401355743408,
"learning_rate": 1.8241380017900015e-05,
"loss": 1.5579520463943481,
"step": 408
},
{
"epoch": 0.7321428571428571,
"grad_norm": 0.5694108605384827,
"learning_rate": 1.8220284018916667e-05,
"loss": 1.2755184173583984,
"step": 410
},
{
"epoch": 0.7357142857142858,
"grad_norm": 0.292386919260025,
"learning_rate": 1.8199076115399285e-05,
"loss": 1.4448673725128174,
"step": 412
},
{
"epoch": 0.7392857142857143,
"grad_norm": 0.3983137309551239,
"learning_rate": 1.817775663604138e-05,
"loss": 0.8253436088562012,
"step": 414
},
{
"epoch": 0.7428571428571429,
"grad_norm": 0.39176154136657715,
"learning_rate": 1.8156325911265756e-05,
"loss": 1.2878429889678955,
"step": 416
},
{
"epoch": 0.7464285714285714,
"grad_norm": 0.38600635528564453,
"learning_rate": 1.8134784273219345e-05,
"loss": 1.1995046138763428,
"step": 418
},
{
"epoch": 0.75,
"grad_norm": 0.3774026036262512,
"learning_rate": 1.8113132055768102e-05,
"loss": 1.284184217453003,
"step": 420
},
{
"epoch": 0.7535714285714286,
"grad_norm": 0.3247928321361542,
"learning_rate": 1.8091369594491805e-05,
"loss": 1.392996072769165,
"step": 422
},
{
"epoch": 0.7571428571428571,
"grad_norm": 0.41736844182014465,
"learning_rate": 1.8069497226678853e-05,
"loss": 1.5332679748535156,
"step": 424
},
{
"epoch": 0.7607142857142857,
"grad_norm": 0.6577937006950378,
"learning_rate": 1.8047515291321062e-05,
"loss": 1.2635902166366577,
"step": 426
},
{
"epoch": 0.7642857142857142,
"grad_norm": 0.5530592799186707,
"learning_rate": 1.802542412910838e-05,
"loss": 1.4461865425109863,
"step": 428
},
{
"epoch": 0.7678571428571429,
"grad_norm": 0.2581753432750702,
"learning_rate": 1.8003224082423634e-05,
"loss": 1.0822758674621582,
"step": 430
},
{
"epoch": 0.7714285714285715,
"grad_norm": 1.1362017393112183,
"learning_rate": 1.79809154953372e-05,
"loss": 1.272403359413147,
"step": 432
},
{
"epoch": 0.775,
"grad_norm": 0.2005966454744339,
"learning_rate": 1.7958498713601692e-05,
"loss": 1.1220377683639526,
"step": 434
},
{
"epoch": 0.7785714285714286,
"grad_norm": 0.5320938229560852,
"learning_rate": 1.7935974084646585e-05,
"loss": 0.9426363706588745,
"step": 436
},
{
"epoch": 0.7821428571428571,
"grad_norm": 0.5724827647209167,
"learning_rate": 1.7913341957572846e-05,
"loss": 1.35481858253479,
"step": 438
},
{
"epoch": 0.7857142857142857,
"grad_norm": 0.7783690690994263,
"learning_rate": 1.7890602683147515e-05,
"loss": 1.2968411445617676,
"step": 440
},
{
"epoch": 0.7892857142857143,
"grad_norm": 1.0548959970474243,
"learning_rate": 1.786775661379826e-05,
"loss": 1.4600225687026978,
"step": 442
},
{
"epoch": 0.7928571428571428,
"grad_norm": 0.41852042078971863,
"learning_rate": 1.7844804103607935e-05,
"loss": 1.274878978729248,
"step": 444
},
{
"epoch": 0.7964285714285714,
"grad_norm": 1.4088575839996338,
"learning_rate": 1.782174550830908e-05,
"loss": 1.4733103513717651,
"step": 446
},
{
"epoch": 0.8,
"grad_norm": 0.2099597156047821,
"learning_rate": 1.77985811852784e-05,
"loss": 1.112687349319458,
"step": 448
},
{
"epoch": 0.8035714285714286,
"grad_norm": 0.44812679290771484,
"learning_rate": 1.777531149353125e-05,
"loss": 1.2501091957092285,
"step": 450
},
{
"epoch": 0.8071428571428572,
"grad_norm": 1.0644121170043945,
"learning_rate": 1.7751936793716045e-05,
"loss": 1.2617871761322021,
"step": 452
},
{
"epoch": 0.8107142857142857,
"grad_norm": 0.6780351996421814,
"learning_rate": 1.7728457448108683e-05,
"loss": 1.270803689956665,
"step": 454
},
{
"epoch": 0.8142857142857143,
"grad_norm": 0.5940962433815002,
"learning_rate": 1.7704873820606932e-05,
"loss": 1.253537893295288,
"step": 456
},
{
"epoch": 0.8178571428571428,
"grad_norm": 0.3232758045196533,
"learning_rate": 1.768118627672479e-05,
"loss": 1.240506887435913,
"step": 458
},
{
"epoch": 0.8214285714285714,
"grad_norm": 0.7501121163368225,
"learning_rate": 1.765739518358681e-05,
"loss": 1.2568142414093018,
"step": 460
},
{
"epoch": 0.825,
"grad_norm": 1.307832956314087,
"learning_rate": 1.7633500909922413e-05,
"loss": 1.516510009765625,
"step": 462
},
{
"epoch": 0.8285714285714286,
"grad_norm": 0.7196856141090393,
"learning_rate": 1.760950382606019e-05,
"loss": 1.267121434211731,
"step": 464
},
{
"epoch": 0.8321428571428572,
"grad_norm": 0.27105146646499634,
"learning_rate": 1.7585404303922147e-05,
"loss": 1.211654782295227,
"step": 466
},
{
"epoch": 0.8357142857142857,
"grad_norm": 3.3737258911132812,
"learning_rate": 1.7561202717017933e-05,
"loss": 1.4474252462387085,
"step": 468
},
{
"epoch": 0.8392857142857143,
"grad_norm": 0.6952375769615173,
"learning_rate": 1.7536899440439066e-05,
"loss": 1.3239004611968994,
"step": 470
},
{
"epoch": 0.8428571428571429,
"grad_norm": 0.34531912207603455,
"learning_rate": 1.751249485085312e-05,
"loss": 1.492138147354126,
"step": 472
},
{
"epoch": 0.8464285714285714,
"grad_norm": 0.23176339268684387,
"learning_rate": 1.7487989326497878e-05,
"loss": 1.1729906797409058,
"step": 474
},
{
"epoch": 0.85,
"grad_norm": 0.6648816466331482,
"learning_rate": 1.746338324717548e-05,
"loss": 1.2582666873931885,
"step": 476
},
{
"epoch": 0.8535714285714285,
"grad_norm": 0.6908137202262878,
"learning_rate": 1.7438676994246515e-05,
"loss": 1.216418743133545,
"step": 478
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.8889988660812378,
"learning_rate": 1.7413870950624146e-05,
"loss": 1.2808120250701904,
"step": 480
},
{
"epoch": 0.8607142857142858,
"grad_norm": 0.5396649241447449,
"learning_rate": 1.7388965500768138e-05,
"loss": 1.3011693954467773,
"step": 482
},
{
"epoch": 0.8642857142857143,
"grad_norm": 0.4081900119781494,
"learning_rate": 1.736396103067893e-05,
"loss": 1.322187066078186,
"step": 484
},
{
"epoch": 0.8678571428571429,
"grad_norm": 3.7584619522094727,
"learning_rate": 1.733885792789163e-05,
"loss": 1.2870206832885742,
"step": 486
},
{
"epoch": 0.8714285714285714,
"grad_norm": 1.3534671068191528,
"learning_rate": 1.7313656581470025e-05,
"loss": 1.52611243724823,
"step": 488
},
{
"epoch": 0.875,
"grad_norm": 0.27716994285583496,
"learning_rate": 1.7288357382000544e-05,
"loss": 1.1492018699645996,
"step": 490
},
{
"epoch": 0.8785714285714286,
"grad_norm": 0.3399287760257721,
"learning_rate": 1.726296072158619e-05,
"loss": 1.170601725578308,
"step": 492
},
{
"epoch": 0.8821428571428571,
"grad_norm": 0.49777647852897644,
"learning_rate": 1.72374669938405e-05,
"loss": 1.689265251159668,
"step": 494
},
{
"epoch": 0.8857142857142857,
"grad_norm": 0.4312261641025543,
"learning_rate": 1.7211876593881404e-05,
"loss": 1.3613009452819824,
"step": 496
},
{
"epoch": 0.8892857142857142,
"grad_norm": 0.4204741418361664,
"learning_rate": 1.718618991832513e-05,
"loss": 1.2536442279815674,
"step": 498
},
{
"epoch": 0.8928571428571429,
"grad_norm": 1.1467082500457764,
"learning_rate": 1.716040736528004e-05,
"loss": 1.47482168674469,
"step": 500
},
{
"epoch": 0.8964285714285715,
"grad_norm": 0.3066917359828949,
"learning_rate": 1.7134529334340465e-05,
"loss": 1.1949682235717773,
"step": 502
},
{
"epoch": 0.9,
"grad_norm": 0.28838351368904114,
"learning_rate": 1.7108556226580524e-05,
"loss": 0.8847708702087402,
"step": 504
},
{
"epoch": 0.9035714285714286,
"grad_norm": 0.6763066053390503,
"learning_rate": 1.7082488444547883e-05,
"loss": 1.4644018411636353,
"step": 506
},
{
"epoch": 0.9071428571428571,
"grad_norm": 0.4099554121494293,
"learning_rate": 1.7056326392257535e-05,
"loss": 1.2191145420074463,
"step": 508
},
{
"epoch": 0.9107142857142857,
"grad_norm": 0.42622798681259155,
"learning_rate": 1.703007047518554e-05,
"loss": 1.4777641296386719,
"step": 510
},
{
"epoch": 0.9142857142857143,
"grad_norm": 0.29898732900619507,
"learning_rate": 1.7003721100262723e-05,
"loss": 1.1827527284622192,
"step": 512
},
{
"epoch": 0.9178571428571428,
"grad_norm": 0.1540851891040802,
"learning_rate": 1.6977278675868376e-05,
"loss": 1.115896463394165,
"step": 514
},
{
"epoch": 0.9214285714285714,
"grad_norm": 0.31116917729377747,
"learning_rate": 1.695074361182395e-05,
"loss": 1.376265287399292,
"step": 516
},
{
"epoch": 0.925,
"grad_norm": 0.3563463091850281,
"learning_rate": 1.6924116319386665e-05,
"loss": 1.10237455368042,
"step": 518
},
{
"epoch": 0.9285714285714286,
"grad_norm": 0.8681960701942444,
"learning_rate": 1.689739721124316e-05,
"loss": 1.2694408893585205,
"step": 520
},
{
"epoch": 0.9321428571428572,
"grad_norm": 0.3502778708934784,
"learning_rate": 1.687058670150309e-05,
"loss": 1.1879040002822876,
"step": 522
},
{
"epoch": 0.9357142857142857,
"grad_norm": 0.6757563352584839,
"learning_rate": 1.6843685205692724e-05,
"loss": 1.612572431564331,
"step": 524
},
{
"epoch": 0.9392857142857143,
"grad_norm": 0.5540065765380859,
"learning_rate": 1.681669314074847e-05,
"loss": 1.2043344974517822,
"step": 526
},
{
"epoch": 0.9428571428571428,
"grad_norm": 0.5378439426422119,
"learning_rate": 1.6789610925010448e-05,
"loss": 1.2300772666931152,
"step": 528
},
{
"epoch": 0.9464285714285714,
"grad_norm": 0.3303406536579132,
"learning_rate": 1.6762438978215984e-05,
"loss": 0.998758852481842,
"step": 530
},
{
"epoch": 0.95,
"grad_norm": 0.5480644106864929,
"learning_rate": 1.673517772149312e-05,
"loss": 1.2909802198410034,
"step": 532
},
{
"epoch": 0.9535714285714286,
"grad_norm": 0.2534736096858978,
"learning_rate": 1.6707827577354072e-05,
"loss": 1.2177340984344482,
"step": 534
},
{
"epoch": 0.9571428571428572,
"grad_norm": 0.28689199686050415,
"learning_rate": 1.66803889696887e-05,
"loss": 1.5511302947998047,
"step": 536
},
{
"epoch": 0.9607142857142857,
"grad_norm": 0.3565874993801117,
"learning_rate": 1.6652862323757914e-05,
"loss": 1.2498445510864258,
"step": 538
},
{
"epoch": 0.9642857142857143,
"grad_norm": 0.6810623407363892,
"learning_rate": 1.662524806618711e-05,
"loss": 1.2677786350250244,
"step": 540
},
{
"epoch": 0.9678571428571429,
"grad_norm": 0.9204868078231812,
"learning_rate": 1.6597546624959534e-05,
"loss": 1.2525708675384521,
"step": 542
},
{
"epoch": 0.9714285714285714,
"grad_norm": 0.39553532004356384,
"learning_rate": 1.656975842940967e-05,
"loss": 1.5089502334594727,
"step": 544
},
{
"epoch": 0.975,
"grad_norm": 2.675163984298706,
"learning_rate": 1.6541883910216562e-05,
"loss": 1.4569265842437744,
"step": 546
},
{
"epoch": 0.9785714285714285,
"grad_norm": 0.6487977504730225,
"learning_rate": 1.6513923499397165e-05,
"loss": 1.218340516090393,
"step": 548
},
{
"epoch": 0.9821428571428571,
"grad_norm": 0.7652715444564819,
"learning_rate": 1.6485877630299633e-05,
"loss": 0.6461201310157776,
"step": 550
},
{
"epoch": 0.9857142857142858,
"grad_norm": 0.3120361566543579,
"learning_rate": 1.6457746737596608e-05,
"loss": 0.6979402303695679,
"step": 552
},
{
"epoch": 0.9892857142857143,
"grad_norm": 0.3177029490470886,
"learning_rate": 1.642953125727847e-05,
"loss": 1.2062575817108154,
"step": 554
},
{
"epoch": 0.9928571428571429,
"grad_norm": 0.3227110803127289,
"learning_rate": 1.6401231626646612e-05,
"loss": 1.3390659093856812,
"step": 556
},
{
"epoch": 0.9964285714285714,
"grad_norm": 7.338903903961182,
"learning_rate": 1.637284828430662e-05,
"loss": 0.8427339792251587,
"step": 558
},
{
"epoch": 1.0,
"grad_norm": 0.5971918106079102,
"learning_rate": 1.6344381670161514e-05,
"loss": 1.6298896074295044,
"step": 560
},
{
"epoch": 1.0035714285714286,
"grad_norm": 0.4188767075538635,
"learning_rate": 1.6315832225404905e-05,
"loss": 1.122739315032959,
"step": 562
},
{
"epoch": 1.0071428571428571,
"grad_norm": 0.6393467783927917,
"learning_rate": 1.6287200392514172e-05,
"loss": 1.4107418060302734,
"step": 564
},
{
"epoch": 1.0107142857142857,
"grad_norm": 0.7803854942321777,
"learning_rate": 1.6258486615243583e-05,
"loss": 1.3382797241210938,
"step": 566
},
{
"epoch": 1.0142857142857142,
"grad_norm": 0.8164628744125366,
"learning_rate": 1.6229691338617447e-05,
"loss": 1.170792579650879,
"step": 568
},
{
"epoch": 1.0178571428571428,
"grad_norm": 0.5098171234130859,
"learning_rate": 1.620081500892319e-05,
"loss": 1.1756045818328857,
"step": 570
},
{
"epoch": 1.0214285714285714,
"grad_norm": 0.35069453716278076,
"learning_rate": 1.6171858073704472e-05,
"loss": 1.2409474849700928,
"step": 572
},
{
"epoch": 1.025,
"grad_norm": 0.5415301322937012,
"learning_rate": 1.6142820981754194e-05,
"loss": 1.2160595655441284,
"step": 574
},
{
"epoch": 1.0285714285714285,
"grad_norm": 0.3839040994644165,
"learning_rate": 1.61137041831076e-05,
"loss": 1.2669594287872314,
"step": 576
},
{
"epoch": 1.032142857142857,
"grad_norm": 0.3261508047580719,
"learning_rate": 1.6084508129035285e-05,
"loss": 1.2169692516326904,
"step": 578
},
{
"epoch": 1.0357142857142858,
"grad_norm": 2.2598116397857666,
"learning_rate": 1.605523327203617e-05,
"loss": 1.412977933883667,
"step": 580
},
{
"epoch": 1.0392857142857144,
"grad_norm": 0.43175196647644043,
"learning_rate": 1.6025880065830527e-05,
"loss": 1.138507604598999,
"step": 582
},
{
"epoch": 1.042857142857143,
"grad_norm": 1.0159385204315186,
"learning_rate": 1.5996448965352946e-05,
"loss": 1.2821019887924194,
"step": 584
},
{
"epoch": 1.0464285714285715,
"grad_norm": 0.5554090738296509,
"learning_rate": 1.596694042674525e-05,
"loss": 1.3544402122497559,
"step": 586
},
{
"epoch": 1.05,
"grad_norm": 0.4888118505477905,
"learning_rate": 1.593735490734946e-05,
"loss": 1.1748173236846924,
"step": 588
},
{
"epoch": 1.0535714285714286,
"grad_norm": 0.8884871602058411,
"learning_rate": 1.590769286570069e-05,
"loss": 1.4871482849121094,
"step": 590
},
{
"epoch": 1.0571428571428572,
"grad_norm": 1.121586799621582,
"learning_rate": 1.587795476152005e-05,
"loss": 1.3661131858825684,
"step": 592
},
{
"epoch": 1.0607142857142857,
"grad_norm": 0.33232274651527405,
"learning_rate": 1.58481410557075e-05,
"loss": 1.1384379863739014,
"step": 594
},
{
"epoch": 1.0642857142857143,
"grad_norm": 0.5710160136222839,
"learning_rate": 1.5818252210334746e-05,
"loss": 1.2044832706451416,
"step": 596
},
{
"epoch": 1.0678571428571428,
"grad_norm": 0.4304794669151306,
"learning_rate": 1.578828868863803e-05,
"loss": 1.1721159219741821,
"step": 598
},
{
"epoch": 1.0714285714285714,
"grad_norm": 0.29414913058280945,
"learning_rate": 1.575825095501099e-05,
"loss": 1.3607425689697266,
"step": 600
},
{
"epoch": 1.075,
"grad_norm": 0.3909018933773041,
"learning_rate": 1.5728139474997445e-05,
"loss": 1.3812944889068604,
"step": 602
},
{
"epoch": 1.0785714285714285,
"grad_norm": 0.7157077789306641,
"learning_rate": 1.5697954715284177e-05,
"loss": 1.3088464736938477,
"step": 604
},
{
"epoch": 1.082142857142857,
"grad_norm": 0.4474976062774658,
"learning_rate": 1.566769714369371e-05,
"loss": 1.2116174697875977,
"step": 606
},
{
"epoch": 1.0857142857142856,
"grad_norm": 0.3516829013824463,
"learning_rate": 1.5637367229177046e-05,
"loss": 1.199174404144287,
"step": 608
},
{
"epoch": 1.0892857142857142,
"grad_norm": 0.4679516851902008,
"learning_rate": 1.560696544180641e-05,
"loss": 1.4164745807647705,
"step": 610
},
{
"epoch": 1.092857142857143,
"grad_norm": 0.47901520133018494,
"learning_rate": 1.5576492252767954e-05,
"loss": 1.189131259918213,
"step": 612
},
{
"epoch": 1.0964285714285715,
"grad_norm": 2.753019094467163,
"learning_rate": 1.554594813435446e-05,
"loss": 1.3877692222595215,
"step": 614
},
{
"epoch": 1.1,
"grad_norm": 0.3647393584251404,
"learning_rate": 1.5515333559958015e-05,
"loss": 1.1784098148345947,
"step": 616
},
{
"epoch": 1.1035714285714286,
"grad_norm": 0.517415463924408,
"learning_rate": 1.548464900406268e-05,
"loss": 1.1289467811584473,
"step": 618
},
{
"epoch": 1.1071428571428572,
"grad_norm": 1.6620982885360718,
"learning_rate": 1.545389494223714e-05,
"loss": 1.1143990755081177,
"step": 620
},
{
"epoch": 1.1107142857142858,
"grad_norm": 0.9535795450210571,
"learning_rate": 1.542307185112731e-05,
"loss": 1.2819159030914307,
"step": 622
},
{
"epoch": 1.1142857142857143,
"grad_norm": 0.34654900431632996,
"learning_rate": 1.5392180208448984e-05,
"loss": 1.1823941469192505,
"step": 624
},
{
"epoch": 1.1178571428571429,
"grad_norm": 0.46996039152145386,
"learning_rate": 1.5361220492980398e-05,
"loss": 1.1919305324554443,
"step": 626
},
{
"epoch": 1.1214285714285714,
"grad_norm": 0.2986307144165039,
"learning_rate": 1.533019318455483e-05,
"loss": 1.0924913883209229,
"step": 628
},
{
"epoch": 1.125,
"grad_norm": 0.25168025493621826,
"learning_rate": 1.529909876405315e-05,
"loss": 0.9394223690032959,
"step": 630
},
{
"epoch": 1.1285714285714286,
"grad_norm": 0.6047491431236267,
"learning_rate": 1.5267937713396384e-05,
"loss": 1.1416627168655396,
"step": 632
},
{
"epoch": 1.1321428571428571,
"grad_norm": 0.2771312892436981,
"learning_rate": 1.5236710515538223e-05,
"loss": 1.3538787364959717,
"step": 634
},
{
"epoch": 1.1357142857142857,
"grad_norm": 0.9521869421005249,
"learning_rate": 1.5205417654457559e-05,
"loss": 1.3267796039581299,
"step": 636
},
{
"epoch": 1.1392857142857142,
"grad_norm": 0.33584362268447876,
"learning_rate": 1.5174059615150965e-05,
"loss": 1.0853066444396973,
"step": 638
},
{
"epoch": 1.1428571428571428,
"grad_norm": 0.3648921251296997,
"learning_rate": 1.5142636883625197e-05,
"loss": 1.1428154706954956,
"step": 640
},
{
"epoch": 1.1464285714285714,
"grad_norm": 1.2066301107406616,
"learning_rate": 1.511114994688964e-05,
"loss": 0.8075799345970154,
"step": 642
},
{
"epoch": 1.15,
"grad_norm": 3.704089879989624,
"learning_rate": 1.5079599292948785e-05,
"loss": 1.2435736656188965,
"step": 644
},
{
"epoch": 1.1535714285714285,
"grad_norm": 0.711872935295105,
"learning_rate": 1.5047985410794641e-05,
"loss": 1.0868031978607178,
"step": 646
},
{
"epoch": 1.157142857142857,
"grad_norm": 0.5934475064277649,
"learning_rate": 1.5016308790399183e-05,
"loss": 1.1679191589355469,
"step": 648
},
{
"epoch": 1.1607142857142858,
"grad_norm": 0.2927475571632385,
"learning_rate": 1.4984569922706722e-05,
"loss": 0.7266023755073547,
"step": 650
},
{
"epoch": 1.1642857142857144,
"grad_norm": 0.4356221556663513,
"learning_rate": 1.4952769299626335e-05,
"loss": 1.1431584358215332,
"step": 652
},
{
"epoch": 1.167857142857143,
"grad_norm": 0.5513753294944763,
"learning_rate": 1.4920907414024215e-05,
"loss": 1.1460659503936768,
"step": 654
},
{
"epoch": 1.1714285714285715,
"grad_norm": 0.5610360503196716,
"learning_rate": 1.4888984759716041e-05,
"loss": 1.1650118827819824,
"step": 656
},
{
"epoch": 1.175,
"grad_norm": 0.405282586812973,
"learning_rate": 1.4857001831459326e-05,
"loss": 1.1098073720932007,
"step": 658
},
{
"epoch": 1.1785714285714286,
"grad_norm": 0.3328110873699188,
"learning_rate": 1.4824959124945746e-05,
"loss": 1.1764960289001465,
"step": 660
},
{
"epoch": 1.1821428571428572,
"grad_norm": 1.3929952383041382,
"learning_rate": 1.4792857136793457e-05,
"loss": 1.2145479917526245,
"step": 662
},
{
"epoch": 1.1857142857142857,
"grad_norm": 0.8867573738098145,
"learning_rate": 1.4760696364539402e-05,
"loss": 0.9081100225448608,
"step": 664
},
{
"epoch": 1.1892857142857143,
"grad_norm": 0.3415173590183258,
"learning_rate": 1.472847730663159e-05,
"loss": 1.1348457336425781,
"step": 666
},
{
"epoch": 1.1928571428571428,
"grad_norm": 0.4172976016998291,
"learning_rate": 1.4696200462421393e-05,
"loss": 1.1158576011657715,
"step": 668
},
{
"epoch": 1.1964285714285714,
"grad_norm": 0.447843998670578,
"learning_rate": 1.4663866332155772e-05,
"loss": 1.2849993705749512,
"step": 670
},
{
"epoch": 1.2,
"grad_norm": 0.9494065046310425,
"learning_rate": 1.463147541696956e-05,
"loss": 1.3352279663085938,
"step": 672
},
{
"epoch": 1.2035714285714285,
"grad_norm": 0.621944785118103,
"learning_rate": 1.459902821887767e-05,
"loss": 1.2335541248321533,
"step": 674
},
{
"epoch": 1.207142857142857,
"grad_norm": 0.48863765597343445,
"learning_rate": 1.4566525240767328e-05,
"loss": 1.3280656337738037,
"step": 676
},
{
"epoch": 1.2107142857142856,
"grad_norm": 0.5822184681892395,
"learning_rate": 1.4533966986390266e-05,
"loss": 1.265816569328308,
"step": 678
},
{
"epoch": 1.2142857142857142,
"grad_norm": 0.5110518932342529,
"learning_rate": 1.4501353960354935e-05,
"loss": 1.225487232208252,
"step": 680
},
{
"epoch": 1.217857142857143,
"grad_norm": 0.8750176429748535,
"learning_rate": 1.4468686668118663e-05,
"loss": 1.2301876544952393,
"step": 682
},
{
"epoch": 1.2214285714285715,
"grad_norm": 0.8533264398574829,
"learning_rate": 1.443596561597983e-05,
"loss": 1.161440134048462,
"step": 684
},
{
"epoch": 1.225,
"grad_norm": 0.44423893094062805,
"learning_rate": 1.4403191311070022e-05,
"loss": 1.0979464054107666,
"step": 686
},
{
"epoch": 1.2285714285714286,
"grad_norm": 0.43870681524276733,
"learning_rate": 1.4370364261346175e-05,
"loss": 1.1086313724517822,
"step": 688
},
{
"epoch": 1.2321428571428572,
"grad_norm": 0.49559494853019714,
"learning_rate": 1.433748497558269e-05,
"loss": 1.0904359817504883,
"step": 690
},
{
"epoch": 1.2357142857142858,
"grad_norm": 0.37396514415740967,
"learning_rate": 1.4304553963363563e-05,
"loss": 1.1759669780731201,
"step": 692
},
{
"epoch": 1.2392857142857143,
"grad_norm": 0.4118036925792694,
"learning_rate": 1.427157173507447e-05,
"loss": 1.157477855682373,
"step": 694
},
{
"epoch": 1.2428571428571429,
"grad_norm": 0.7096890211105347,
"learning_rate": 1.4238538801894875e-05,
"loss": 1.1737724542617798,
"step": 696
},
{
"epoch": 1.2464285714285714,
"grad_norm": 0.6166890263557434,
"learning_rate": 1.4205455675790097e-05,
"loss": 1.2456121444702148,
"step": 698
},
{
"epoch": 1.25,
"grad_norm": 1.1848427057266235,
"learning_rate": 1.4172322869503368e-05,
"loss": 0.8353923559188843,
"step": 700
},
{
"epoch": 1.2535714285714286,
"grad_norm": 0.38646364212036133,
"learning_rate": 1.4139140896547902e-05,
"loss": 0.7373632192611694,
"step": 702
},
{
"epoch": 1.2571428571428571,
"grad_norm": 0.6813467144966125,
"learning_rate": 1.4105910271198937e-05,
"loss": 1.3092610836029053,
"step": 704
},
{
"epoch": 1.2607142857142857,
"grad_norm": 0.3635100722312927,
"learning_rate": 1.407263150848574e-05,
"loss": 1.2204773426055908,
"step": 706
},
{
"epoch": 1.2642857142857142,
"grad_norm": 0.507089376449585,
"learning_rate": 1.4039305124183653e-05,
"loss": 1.182018756866455,
"step": 708
},
{
"epoch": 1.2678571428571428,
"grad_norm": 0.27979063987731934,
"learning_rate": 1.4005931634806085e-05,
"loss": 1.095738410949707,
"step": 710
},
{
"epoch": 1.2714285714285714,
"grad_norm": 0.4857190251350403,
"learning_rate": 1.3972511557596506e-05,
"loss": 1.1410452127456665,
"step": 712
},
{
"epoch": 1.275,
"grad_norm": 0.3612891435623169,
"learning_rate": 1.3939045410520435e-05,
"loss": 1.1210098266601562,
"step": 714
},
{
"epoch": 1.2785714285714285,
"grad_norm": 0.3975180685520172,
"learning_rate": 1.3905533712257418e-05,
"loss": 1.2363688945770264,
"step": 716
},
{
"epoch": 1.282142857142857,
"grad_norm": 0.9986791610717773,
"learning_rate": 1.3871976982192971e-05,
"loss": 1.1617156267166138,
"step": 718
},
{
"epoch": 1.2857142857142856,
"grad_norm": 1.0810168981552124,
"learning_rate": 1.383837574041055e-05,
"loss": 1.2290289402008057,
"step": 720
},
{
"epoch": 1.2892857142857144,
"grad_norm": 0.4217285215854645,
"learning_rate": 1.3804730507683473e-05,
"loss": 1.1525651216506958,
"step": 722
},
{
"epoch": 1.292857142857143,
"grad_norm": 0.40972352027893066,
"learning_rate": 1.377104180546687e-05,
"loss": 1.1072614192962646,
"step": 724
},
{
"epoch": 1.2964285714285715,
"grad_norm": 0.3110302686691284,
"learning_rate": 1.3737310155889575e-05,
"loss": 1.1427106857299805,
"step": 726
},
{
"epoch": 1.3,
"grad_norm": 0.4348861873149872,
"learning_rate": 1.370353608174606e-05,
"loss": 0.9627160429954529,
"step": 728
},
{
"epoch": 1.3035714285714286,
"grad_norm": 0.7683548331260681,
"learning_rate": 1.3669720106488308e-05,
"loss": 1.2495923042297363,
"step": 730
},
{
"epoch": 1.3071428571428572,
"grad_norm": 1.5341452360153198,
"learning_rate": 1.3635862754217725e-05,
"loss": 1.1818249225616455,
"step": 732
},
{
"epoch": 1.3107142857142857,
"grad_norm": 0.359321653842926,
"learning_rate": 1.360196454967699e-05,
"loss": 1.0857856273651123,
"step": 734
},
{
"epoch": 1.3142857142857143,
"grad_norm": 0.5810532569885254,
"learning_rate": 1.356802601824195e-05,
"loss": 1.1771756410598755,
"step": 736
},
{
"epoch": 1.3178571428571428,
"grad_norm": 0.7885570526123047,
"learning_rate": 1.353404768591345e-05,
"loss": 1.1475200653076172,
"step": 738
},
{
"epoch": 1.3214285714285714,
"grad_norm": 1.2528671026229858,
"learning_rate": 1.3500030079309206e-05,
"loss": 1.1383891105651855,
"step": 740
},
{
"epoch": 1.325,
"grad_norm": 4.210450649261475,
"learning_rate": 1.3465973725655625e-05,
"loss": 1.0303577184677124,
"step": 742
},
{
"epoch": 1.3285714285714285,
"grad_norm": 1.3920871019363403,
"learning_rate": 1.3431879152779643e-05,
"loss": 1.1899462938308716,
"step": 744
},
{
"epoch": 1.332142857142857,
"grad_norm": 0.3303053081035614,
"learning_rate": 1.3397746889100542e-05,
"loss": 1.118945598602295,
"step": 746
},
{
"epoch": 1.3357142857142856,
"grad_norm": 0.4941954016685486,
"learning_rate": 1.336357746362176e-05,
"loss": 1.1191518306732178,
"step": 748
},
{
"epoch": 1.3392857142857144,
"grad_norm": 0.4031634032726288,
"learning_rate": 1.3329371405922688e-05,
"loss": 0.8386397361755371,
"step": 750
},
{
"epoch": 1.342857142857143,
"grad_norm": 0.44179248809814453,
"learning_rate": 1.3295129246150472e-05,
"loss": 0.6230043172836304,
"step": 752
},
{
"epoch": 1.3464285714285715,
"grad_norm": 0.4847067594528198,
"learning_rate": 1.3260851515011788e-05,
"loss": 1.3626277446746826,
"step": 754
},
{
"epoch": 1.35,
"grad_norm": 0.2796545624732971,
"learning_rate": 1.3226538743764617e-05,
"loss": 1.1133283376693726,
"step": 756
},
{
"epoch": 1.3535714285714286,
"grad_norm": 1.6174930334091187,
"learning_rate": 1.3192191464210023e-05,
"loss": 1.1741091012954712,
"step": 758
},
{
"epoch": 1.3571428571428572,
"grad_norm": 0.7611403465270996,
"learning_rate": 1.3157810208683887e-05,
"loss": 0.9258888959884644,
"step": 760
},
{
"epoch": 1.3607142857142858,
"grad_norm": 0.38805851340293884,
"learning_rate": 1.3123395510048687e-05,
"loss": 1.196305274963379,
"step": 762
},
{
"epoch": 1.3642857142857143,
"grad_norm": 1.080810785293579,
"learning_rate": 1.3088947901685212e-05,
"loss": 0.9452080726623535,
"step": 764
},
{
"epoch": 1.3678571428571429,
"grad_norm": 0.8032243847846985,
"learning_rate": 1.3054467917484308e-05,
"loss": 1.3384945392608643,
"step": 766
},
{
"epoch": 1.3714285714285714,
"grad_norm": 0.7725457549095154,
"learning_rate": 1.3019956091838614e-05,
"loss": 1.093704104423523,
"step": 768
},
{
"epoch": 1.375,
"grad_norm": 0.5379224419593811,
"learning_rate": 1.298541295963425e-05,
"loss": 0.9955064058303833,
"step": 770
},
{
"epoch": 1.3785714285714286,
"grad_norm": 0.6937380433082581,
"learning_rate": 1.2950839056242557e-05,
"loss": 1.1630027294158936,
"step": 772
},
{
"epoch": 1.3821428571428571,
"grad_norm": 0.6541162729263306,
"learning_rate": 1.291623491751178e-05,
"loss": 1.1202328205108643,
"step": 774
},
{
"epoch": 1.3857142857142857,
"grad_norm": 0.4741293787956238,
"learning_rate": 1.2881601079758784e-05,
"loss": 1.1508142948150635,
"step": 776
},
{
"epoch": 1.3892857142857142,
"grad_norm": 0.631596565246582,
"learning_rate": 1.284693807976071e-05,
"loss": 1.3536005020141602,
"step": 778
},
{
"epoch": 1.3928571428571428,
"grad_norm": 0.30716219544410706,
"learning_rate": 1.2812246454746687e-05,
"loss": 1.1645984649658203,
"step": 780
},
{
"epoch": 1.3964285714285714,
"grad_norm": 0.4866527318954468,
"learning_rate": 1.2777526742389483e-05,
"loss": 1.1908864974975586,
"step": 782
},
{
"epoch": 1.4,
"grad_norm": 0.5402504801750183,
"learning_rate": 1.2742779480797194e-05,
"loss": 1.0720609426498413,
"step": 784
},
{
"epoch": 1.4035714285714285,
"grad_norm": 0.5585693120956421,
"learning_rate": 1.270800520850488e-05,
"loss": 1.1451340913772583,
"step": 786
},
{
"epoch": 1.407142857142857,
"grad_norm": 0.6004486680030823,
"learning_rate": 1.2673204464466233e-05,
"loss": 1.1766554117202759,
"step": 788
},
{
"epoch": 1.4107142857142856,
"grad_norm": 0.8663070797920227,
"learning_rate": 1.2638377788045223e-05,
"loss": 1.087449312210083,
"step": 790
},
{
"epoch": 1.4142857142857144,
"grad_norm": 0.8936390280723572,
"learning_rate": 1.2603525719007738e-05,
"loss": 1.0832384824752808,
"step": 792
},
{
"epoch": 1.417857142857143,
"grad_norm": 0.62984699010849,
"learning_rate": 1.2568648797513212e-05,
"loss": 1.1712042093276978,
"step": 794
},
{
"epoch": 1.4214285714285715,
"grad_norm": 0.6213272213935852,
"learning_rate": 1.2533747564106262e-05,
"loss": 1.0903995037078857,
"step": 796
},
{
"epoch": 1.425,
"grad_norm": 0.5741475224494934,
"learning_rate": 1.2498822559708304e-05,
"loss": 1.1080608367919922,
"step": 798
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.6108075380325317,
"learning_rate": 1.2463874325609168e-05,
"loss": 1.1283464431762695,
"step": 800
},
{
"epoch": 1.4321428571428572,
"grad_norm": 1.3531321287155151,
"learning_rate": 1.2428903403458725e-05,
"loss": 0.9932132959365845,
"step": 802
},
{
"epoch": 1.4357142857142857,
"grad_norm": 0.31752732396125793,
"learning_rate": 1.2393910335258472e-05,
"loss": 1.1548457145690918,
"step": 804
},
{
"epoch": 1.4392857142857143,
"grad_norm": 0.3582463562488556,
"learning_rate": 1.2358895663353132e-05,
"loss": 1.1124224662780762,
"step": 806
},
{
"epoch": 1.4428571428571428,
"grad_norm": 0.7238558530807495,
"learning_rate": 1.232385993042227e-05,
"loss": 1.2339575290679932,
"step": 808
},
{
"epoch": 1.4464285714285714,
"grad_norm": 0.6423646211624146,
"learning_rate": 1.2288803679471861e-05,
"loss": 0.7599235773086548,
"step": 810
},
{
"epoch": 1.45,
"grad_norm": 0.41928139328956604,
"learning_rate": 1.225372745382588e-05,
"loss": 1.160172939300537,
"step": 812
},
{
"epoch": 1.4535714285714285,
"grad_norm": 0.5137944221496582,
"learning_rate": 1.2218631797117885e-05,
"loss": 1.1503641605377197,
"step": 814
},
{
"epoch": 1.457142857142857,
"grad_norm": 0.3738017678260803,
"learning_rate": 1.2183517253282591e-05,
"loss": 1.0606850385665894,
"step": 816
},
{
"epoch": 1.4607142857142856,
"grad_norm": 0.9808754920959473,
"learning_rate": 1.2148384366547428e-05,
"loss": 0.6364516019821167,
"step": 818
},
{
"epoch": 1.4642857142857144,
"grad_norm": 0.4231683313846588,
"learning_rate": 1.211323368142413e-05,
"loss": 1.2867590188980103,
"step": 820
},
{
"epoch": 1.467857142857143,
"grad_norm": 0.40807411074638367,
"learning_rate": 1.2078065742700272e-05,
"loss": 1.2712998390197754,
"step": 822
},
{
"epoch": 1.4714285714285715,
"grad_norm": 0.4421752393245697,
"learning_rate": 1.2042881095430836e-05,
"loss": 1.084946632385254,
"step": 824
},
{
"epoch": 1.475,
"grad_norm": 0.43936818838119507,
"learning_rate": 1.2007680284929773e-05,
"loss": 1.1451640129089355,
"step": 826
},
{
"epoch": 1.4785714285714286,
"grad_norm": 0.4863755702972412,
"learning_rate": 1.1972463856761529e-05,
"loss": 0.8599765300750732,
"step": 828
},
{
"epoch": 1.4821428571428572,
"grad_norm": 0.5459559559822083,
"learning_rate": 1.1937232356732609e-05,
"loss": 1.2189276218414307,
"step": 830
},
{
"epoch": 1.4857142857142858,
"grad_norm": 0.7105582356452942,
"learning_rate": 1.190198633088312e-05,
"loss": 1.143498420715332,
"step": 832
},
{
"epoch": 1.4892857142857143,
"grad_norm": 0.5701984763145447,
"learning_rate": 1.1866726325478277e-05,
"loss": 1.1005926132202148,
"step": 834
},
{
"epoch": 1.4928571428571429,
"grad_norm": 0.609856903553009,
"learning_rate": 1.1831452886999984e-05,
"loss": 1.1103310585021973,
"step": 836
},
{
"epoch": 1.4964285714285714,
"grad_norm": 0.7349211573600769,
"learning_rate": 1.179616656213832e-05,
"loss": 0.8592168092727661,
"step": 838
},
{
"epoch": 1.5,
"grad_norm": 0.4150165617465973,
"learning_rate": 1.1760867897783097e-05,
"loss": 1.1124815940856934,
"step": 840
},
{
"epoch": 1.5035714285714286,
"grad_norm": 0.36004072427749634,
"learning_rate": 1.1725557441015369e-05,
"loss": 1.1074330806732178,
"step": 842
},
{
"epoch": 1.5071428571428571,
"grad_norm": 0.5040198564529419,
"learning_rate": 1.1690235739098953e-05,
"loss": 1.1491334438323975,
"step": 844
},
{
"epoch": 1.5107142857142857,
"grad_norm": 1.0856701135635376,
"learning_rate": 1.1654903339471954e-05,
"loss": 1.2160296440124512,
"step": 846
},
{
"epoch": 1.5142857142857142,
"grad_norm": 0.4830666780471802,
"learning_rate": 1.161956078973828e-05,
"loss": 1.051498293876648,
"step": 848
},
{
"epoch": 1.5178571428571428,
"grad_norm": 1.711748480796814,
"learning_rate": 1.158420863765914e-05,
"loss": 0.8320801258087158,
"step": 850
},
{
"epoch": 1.5214285714285714,
"grad_norm": 0.5154921412467957,
"learning_rate": 1.1548847431144578e-05,
"loss": 1.3413938283920288,
"step": 852
},
{
"epoch": 1.525,
"grad_norm": 0.41332578659057617,
"learning_rate": 1.1513477718244967e-05,
"loss": 1.0733758211135864,
"step": 854
},
{
"epoch": 1.5285714285714285,
"grad_norm": 0.9633269309997559,
"learning_rate": 1.1478100047142516e-05,
"loss": 1.2646903991699219,
"step": 856
},
{
"epoch": 1.532142857142857,
"grad_norm": 0.38085678219795227,
"learning_rate": 1.1442714966142773e-05,
"loss": 1.1131600141525269,
"step": 858
},
{
"epoch": 1.5357142857142856,
"grad_norm": 0.6264724731445312,
"learning_rate": 1.1407323023666127e-05,
"loss": 1.1462935209274292,
"step": 860
},
{
"epoch": 1.5392857142857141,
"grad_norm": 3.2465555667877197,
"learning_rate": 1.137192476823932e-05,
"loss": 1.2240748405456543,
"step": 862
},
{
"epoch": 1.5428571428571427,
"grad_norm": 0.5931512117385864,
"learning_rate": 1.1336520748486934e-05,
"loss": 1.1074802875518799,
"step": 864
},
{
"epoch": 1.5464285714285713,
"grad_norm": 0.6654783487319946,
"learning_rate": 1.1301111513122877e-05,
"loss": 1.1554946899414062,
"step": 866
},
{
"epoch": 1.55,
"grad_norm": 0.8768860101699829,
"learning_rate": 1.1265697610941915e-05,
"loss": 1.1281225681304932,
"step": 868
},
{
"epoch": 1.5535714285714286,
"grad_norm": 0.4611034691333771,
"learning_rate": 1.1230279590811118e-05,
"loss": 1.2249135971069336,
"step": 870
},
{
"epoch": 1.5571428571428572,
"grad_norm": 0.5411264896392822,
"learning_rate": 1.11948580016614e-05,
"loss": 1.0645391941070557,
"step": 872
},
{
"epoch": 1.5607142857142857,
"grad_norm": 0.31846562027931213,
"learning_rate": 1.1159433392478973e-05,
"loss": 1.0551997423171997,
"step": 874
},
{
"epoch": 1.5642857142857143,
"grad_norm": 0.3621332347393036,
"learning_rate": 1.1124006312296869e-05,
"loss": 1.0810638666152954,
"step": 876
},
{
"epoch": 1.5678571428571428,
"grad_norm": 0.5022686123847961,
"learning_rate": 1.1088577310186406e-05,
"loss": 1.1075555086135864,
"step": 878
},
{
"epoch": 1.5714285714285714,
"grad_norm": 1.8955113887786865,
"learning_rate": 1.1053146935248701e-05,
"loss": 1.0974161624908447,
"step": 880
},
{
"epoch": 1.575,
"grad_norm": 0.26390373706817627,
"learning_rate": 1.1017715736606137e-05,
"loss": 0.9959229230880737,
"step": 882
},
{
"epoch": 1.5785714285714287,
"grad_norm": 0.32717210054397583,
"learning_rate": 1.0982284263393868e-05,
"loss": 1.2565701007843018,
"step": 884
},
{
"epoch": 1.5821428571428573,
"grad_norm": 0.36438724398612976,
"learning_rate": 1.0946853064751301e-05,
"loss": 1.256880760192871,
"step": 886
},
{
"epoch": 1.5857142857142859,
"grad_norm": 0.41332828998565674,
"learning_rate": 1.0911422689813594e-05,
"loss": 1.090340495109558,
"step": 888
},
{
"epoch": 1.5892857142857144,
"grad_norm": 0.6816041469573975,
"learning_rate": 1.0875993687703134e-05,
"loss": 1.1211128234863281,
"step": 890
},
{
"epoch": 1.592857142857143,
"grad_norm": 0.4995070993900299,
"learning_rate": 1.084056660752103e-05,
"loss": 1.1537326574325562,
"step": 892
},
{
"epoch": 1.5964285714285715,
"grad_norm": 0.5382480621337891,
"learning_rate": 1.0805141998338607e-05,
"loss": 1.1503942012786865,
"step": 894
},
{
"epoch": 1.6,
"grad_norm": 0.22450421750545502,
"learning_rate": 1.0769720409188883e-05,
"loss": 1.068176031112671,
"step": 896
},
{
"epoch": 1.6035714285714286,
"grad_norm": 0.7616103291511536,
"learning_rate": 1.073430238905809e-05,
"loss": 1.0579860210418701,
"step": 898
},
{
"epoch": 1.6071428571428572,
"grad_norm": 0.4256003499031067,
"learning_rate": 1.0698888486877126e-05,
"loss": 1.016850471496582,
"step": 900
},
{
"epoch": 1.6107142857142858,
"grad_norm": 0.43691617250442505,
"learning_rate": 1.066347925151307e-05,
"loss": 1.052515983581543,
"step": 902
},
{
"epoch": 1.6142857142857143,
"grad_norm": 1.1696960926055908,
"learning_rate": 1.0628075231760682e-05,
"loss": 1.1436378955841064,
"step": 904
},
{
"epoch": 1.6178571428571429,
"grad_norm": 0.3926475942134857,
"learning_rate": 1.0592676976333877e-05,
"loss": 1.0450983047485352,
"step": 906
},
{
"epoch": 1.6214285714285714,
"grad_norm": 0.3293057978153229,
"learning_rate": 1.0557285033857234e-05,
"loss": 1.0548239946365356,
"step": 908
},
{
"epoch": 1.625,
"grad_norm": 2.7279651165008545,
"learning_rate": 1.052189995285749e-05,
"loss": 0.8727450966835022,
"step": 910
},
{
"epoch": 1.6285714285714286,
"grad_norm": 0.513361930847168,
"learning_rate": 1.0486522281755034e-05,
"loss": 1.1863266229629517,
"step": 912
},
{
"epoch": 1.6321428571428571,
"grad_norm": 0.7765159606933594,
"learning_rate": 1.0451152568855424e-05,
"loss": 1.0367705821990967,
"step": 914
},
{
"epoch": 1.6357142857142857,
"grad_norm": 0.6320871114730835,
"learning_rate": 1.0415791362340864e-05,
"loss": 1.191881775856018,
"step": 916
},
{
"epoch": 1.6392857142857142,
"grad_norm": 2.4710655212402344,
"learning_rate": 1.0380439210261726e-05,
"loss": 1.0561764240264893,
"step": 918
},
{
"epoch": 1.6428571428571428,
"grad_norm": 0.6536027193069458,
"learning_rate": 1.0345096660528047e-05,
"loss": 0.9318988919258118,
"step": 920
},
{
"epoch": 1.6464285714285714,
"grad_norm": 1.375860333442688,
"learning_rate": 1.030976426090105e-05,
"loss": 1.101020336151123,
"step": 922
},
{
"epoch": 1.65,
"grad_norm": 0.2552533745765686,
"learning_rate": 1.0274442558984634e-05,
"loss": 1.2564234733581543,
"step": 924
},
{
"epoch": 1.6535714285714285,
"grad_norm": 0.5600205659866333,
"learning_rate": 1.0239132102216906e-05,
"loss": 1.2616956233978271,
"step": 926
},
{
"epoch": 1.657142857142857,
"grad_norm": 0.5227077007293701,
"learning_rate": 1.020383343786168e-05,
"loss": 1.1424009799957275,
"step": 928
},
{
"epoch": 1.6607142857142856,
"grad_norm": 1.377939224243164,
"learning_rate": 1.016854711300002e-05,
"loss": 1.0064213275909424,
"step": 930
},
{
"epoch": 1.6642857142857141,
"grad_norm": 0.4621258080005646,
"learning_rate": 1.0133273674521726e-05,
"loss": 1.0637047290802002,
"step": 932
},
{
"epoch": 1.6678571428571427,
"grad_norm": 0.4151983857154846,
"learning_rate": 1.0098013669116886e-05,
"loss": 1.1586838960647583,
"step": 934
},
{
"epoch": 1.6714285714285713,
"grad_norm": 0.45863673090934753,
"learning_rate": 1.006276764326739e-05,
"loss": 1.1697707176208496,
"step": 936
},
{
"epoch": 1.675,
"grad_norm": 0.6595650315284729,
"learning_rate": 1.0027536143238474e-05,
"loss": 0.6215699911117554,
"step": 938
},
{
"epoch": 1.6785714285714286,
"grad_norm": 0.5221896171569824,
"learning_rate": 9.992319715070231e-06,
"loss": 1.132964849472046,
"step": 940
},
{
"epoch": 1.6821428571428572,
"grad_norm": 0.499324768781662,
"learning_rate": 9.957118904569167e-06,
"loss": 1.2663060426712036,
"step": 942
},
{
"epoch": 1.6857142857142857,
"grad_norm": 0.766228437423706,
"learning_rate": 9.921934257299731e-06,
"loss": 1.3311316967010498,
"step": 944
},
{
"epoch": 1.6892857142857143,
"grad_norm": 0.7923848628997803,
"learning_rate": 9.886766318575871e-06,
"loss": 0.8178808689117432,
"step": 946
},
{
"epoch": 1.6928571428571428,
"grad_norm": 1.8682267665863037,
"learning_rate": 9.851615633452577e-06,
"loss": 1.1221369504928589,
"step": 948
},
{
"epoch": 1.6964285714285714,
"grad_norm": 0.32414621114730835,
"learning_rate": 9.816482746717415e-06,
"loss": 1.1450505256652832,
"step": 950
},
{
"epoch": 1.7,
"grad_norm": 0.48412182927131653,
"learning_rate": 9.781368202882118e-06,
"loss": 1.1050865650177002,
"step": 952
},
{
"epoch": 1.7035714285714287,
"grad_norm": 0.7146490812301636,
"learning_rate": 9.746272546174122e-06,
"loss": 0.9220115542411804,
"step": 954
},
{
"epoch": 1.7071428571428573,
"grad_norm": 0.4684576392173767,
"learning_rate": 9.711196320528142e-06,
"loss": 1.2169872522354126,
"step": 956
},
{
"epoch": 1.7107142857142859,
"grad_norm": 0.9166097640991211,
"learning_rate": 9.67614006957773e-06,
"loss": 1.2747939825057983,
"step": 958
},
{
"epoch": 1.7142857142857144,
"grad_norm": 0.3798523247241974,
"learning_rate": 9.641104336646868e-06,
"loss": 1.029056429862976,
"step": 960
},
{
"epoch": 1.717857142857143,
"grad_norm": 1.7487893104553223,
"learning_rate": 9.60608966474153e-06,
"loss": 1.1041202545166016,
"step": 962
},
{
"epoch": 1.7214285714285715,
"grad_norm": 0.5260602235794067,
"learning_rate": 9.571096596541279e-06,
"loss": 1.1489906311035156,
"step": 964
},
{
"epoch": 1.725,
"grad_norm": 1.3120121955871582,
"learning_rate": 9.536125674390834e-06,
"loss": 1.1129286289215088,
"step": 966
},
{
"epoch": 1.7285714285714286,
"grad_norm": 0.6453574299812317,
"learning_rate": 9.501177440291697e-06,
"loss": 1.0809402465820312,
"step": 968
},
{
"epoch": 1.7321428571428572,
"grad_norm": 0.4150318205356598,
"learning_rate": 9.46625243589374e-06,
"loss": 1.142215609550476,
"step": 970
},
{
"epoch": 1.7357142857142858,
"grad_norm": 0.30773821473121643,
"learning_rate": 9.431351202486792e-06,
"loss": 1.0678926706314087,
"step": 972
},
{
"epoch": 1.7392857142857143,
"grad_norm": 0.5969177484512329,
"learning_rate": 9.396474280992265e-06,
"loss": 0.61910080909729,
"step": 974
},
{
"epoch": 1.7428571428571429,
"grad_norm": 0.3318014442920685,
"learning_rate": 9.36162221195478e-06,
"loss": 1.1399273872375488,
"step": 976
},
{
"epoch": 1.7464285714285714,
"grad_norm": 0.5802143216133118,
"learning_rate": 9.32679553553377e-06,
"loss": 1.0623743534088135,
"step": 978
},
{
"epoch": 1.75,
"grad_norm": 0.6982766389846802,
"learning_rate": 9.291994791495125e-06,
"loss": 1.130384922027588,
"step": 980
},
{
"epoch": 1.7535714285714286,
"grad_norm": 0.9474343061447144,
"learning_rate": 9.257220519202812e-06,
"loss": 1.1633626222610474,
"step": 982
},
{
"epoch": 1.7571428571428571,
"grad_norm": 0.823501706123352,
"learning_rate": 9.222473257610519e-06,
"loss": 1.3282148838043213,
"step": 984
},
{
"epoch": 1.7607142857142857,
"grad_norm": 0.8721451163291931,
"learning_rate": 9.187753545253318e-06,
"loss": 1.1099696159362793,
"step": 986
},
{
"epoch": 1.7642857142857142,
"grad_norm": 1.4551762342453003,
"learning_rate": 9.153061920239291e-06,
"loss": 1.2010962963104248,
"step": 988
},
{
"epoch": 1.7678571428571428,
"grad_norm": 0.22830167412757874,
"learning_rate": 9.11839892024122e-06,
"loss": 0.9588916301727295,
"step": 990
},
{
"epoch": 1.7714285714285714,
"grad_norm": 0.7184726595878601,
"learning_rate": 9.08376508248822e-06,
"loss": 0.9959466457366943,
"step": 992
},
{
"epoch": 1.775,
"grad_norm": 0.8741506934165955,
"learning_rate": 9.049160943757447e-06,
"loss": 1.015051007270813,
"step": 994
},
{
"epoch": 1.7785714285714285,
"grad_norm": 0.341207891702652,
"learning_rate": 9.014587040365754e-06,
"loss": 0.6152805685997009,
"step": 996
},
{
"epoch": 1.782142857142857,
"grad_norm": 0.5346217155456543,
"learning_rate": 8.98004390816139e-06,
"loss": 1.0692589282989502,
"step": 998
},
{
"epoch": 1.7857142857142856,
"grad_norm": 1.2236759662628174,
"learning_rate": 8.945532082515692e-06,
"loss": 1.1441328525543213,
"step": 1000
},
{
"epoch": 1.7892857142857141,
"grad_norm": 0.7906798124313354,
"learning_rate": 8.911052098314791e-06,
"loss": 0.9982239007949829,
"step": 1002
},
{
"epoch": 1.7928571428571427,
"grad_norm": 0.48270416259765625,
"learning_rate": 8.876604489951317e-06,
"loss": 1.1276839971542358,
"step": 1004
},
{
"epoch": 1.7964285714285713,
"grad_norm": 1.3317867517471313,
"learning_rate": 8.842189791316116e-06,
"loss": 1.1620763540267944,
"step": 1006
},
{
"epoch": 1.8,
"grad_norm": 0.13755838572978973,
"learning_rate": 8.807808535789982e-06,
"loss": 0.9927979707717896,
"step": 1008
},
{
"epoch": 1.8035714285714286,
"grad_norm": 0.6385722160339355,
"learning_rate": 8.773461256235385e-06,
"loss": 1.0914297103881836,
"step": 1010
},
{
"epoch": 1.8071428571428572,
"grad_norm": 0.46439942717552185,
"learning_rate": 8.739148484988216e-06,
"loss": 1.1059390306472778,
"step": 1012
},
{
"epoch": 1.8107142857142857,
"grad_norm": 0.5348365306854248,
"learning_rate": 8.704870753849533e-06,
"loss": 1.0481466054916382,
"step": 1014
},
{
"epoch": 1.8142857142857143,
"grad_norm": 0.503119707107544,
"learning_rate": 8.670628594077313e-06,
"loss": 1.0685755014419556,
"step": 1016
},
{
"epoch": 1.8178571428571428,
"grad_norm": 0.45715904235839844,
"learning_rate": 8.636422536378241e-06,
"loss": 1.1131443977355957,
"step": 1018
},
{
"epoch": 1.8214285714285714,
"grad_norm": 0.4219473600387573,
"learning_rate": 8.602253110899461e-06,
"loss": 1.1202515363693237,
"step": 1020
},
{
"epoch": 1.825,
"grad_norm": 1.506514549255371,
"learning_rate": 8.56812084722036e-06,
"loss": 1.0852363109588623,
"step": 1022
},
{
"epoch": 1.8285714285714287,
"grad_norm": 1.1264326572418213,
"learning_rate": 8.534026274344378e-06,
"loss": 1.1459194421768188,
"step": 1024
},
{
"epoch": 1.8321428571428573,
"grad_norm": 0.19118832051753998,
"learning_rate": 8.499969920690799e-06,
"loss": 1.0966734886169434,
"step": 1026
},
{
"epoch": 1.8357142857142859,
"grad_norm": 5.950002670288086,
"learning_rate": 8.465952314086554e-06,
"loss": 0.9605913162231445,
"step": 1028
},
{
"epoch": 1.8392857142857144,
"grad_norm": 0.9554546475410461,
"learning_rate": 8.431973981758055e-06,
"loss": 0.8658078908920288,
"step": 1030
},
{
"epoch": 1.842857142857143,
"grad_norm": 0.35784661769866943,
"learning_rate": 8.39803545032301e-06,
"loss": 1.21623957157135,
"step": 1032
},
{
"epoch": 1.8464285714285715,
"grad_norm": 0.3995438516139984,
"learning_rate": 8.364137245782278e-06,
"loss": 1.0300588607788086,
"step": 1034
},
{
"epoch": 1.85,
"grad_norm": 0.4243587255477905,
"learning_rate": 8.330279893511695e-06,
"loss": 1.1283214092254639,
"step": 1036
},
{
"epoch": 1.8535714285714286,
"grad_norm": 1.56551992893219,
"learning_rate": 8.296463918253945e-06,
"loss": 1.0735278129577637,
"step": 1038
},
{
"epoch": 1.8571428571428572,
"grad_norm": 0.5571054220199585,
"learning_rate": 8.262689844110426e-06,
"loss": 1.1387474536895752,
"step": 1040
},
{
"epoch": 1.8607142857142858,
"grad_norm": 0.8355203866958618,
"learning_rate": 8.228958194533134e-06,
"loss": 1.1476329565048218,
"step": 1042
},
{
"epoch": 1.8642857142857143,
"grad_norm": 1.1273201704025269,
"learning_rate": 8.195269492316531e-06,
"loss": 1.1415425539016724,
"step": 1044
},
{
"epoch": 1.8678571428571429,
"grad_norm": 0.48498058319091797,
"learning_rate": 8.161624259589456e-06,
"loss": 1.2414381504058838,
"step": 1046
},
{
"epoch": 1.8714285714285714,
"grad_norm": 2.3717753887176514,
"learning_rate": 8.128023017807032e-06,
"loss": 1.0288283824920654,
"step": 1048
},
{
"epoch": 1.875,
"grad_norm": 0.4442752003669739,
"learning_rate": 8.094466287742583e-06,
"loss": 1.0729954242706299,
"step": 1050
},
{
"epoch": 1.8785714285714286,
"grad_norm": 1.309718370437622,
"learning_rate": 8.060954589479566e-06,
"loss": 1.0899989604949951,
"step": 1052
},
{
"epoch": 1.8821428571428571,
"grad_norm": 0.6105849146842957,
"learning_rate": 8.027488442403499e-06,
"loss": 1.247680425643921,
"step": 1054
},
{
"epoch": 1.8857142857142857,
"grad_norm": 0.5027915835380554,
"learning_rate": 7.994068365193916e-06,
"loss": 1.1438732147216797,
"step": 1056
},
{
"epoch": 1.8892857142857142,
"grad_norm": 0.8934045433998108,
"learning_rate": 7.96069487581635e-06,
"loss": 1.1180412769317627,
"step": 1058
},
{
"epoch": 1.8928571428571428,
"grad_norm": 1.2043625116348267,
"learning_rate": 7.927368491514263e-06,
"loss": 1.289278507232666,
"step": 1060
},
{
"epoch": 1.8964285714285714,
"grad_norm": 0.3672593832015991,
"learning_rate": 7.894089728801069e-06,
"loss": 1.0988218784332275,
"step": 1062
},
{
"epoch": 1.9,
"grad_norm": 0.4244534373283386,
"learning_rate": 7.860859103452099e-06,
"loss": 0.6889010667800903,
"step": 1064
},
{
"epoch": 1.9035714285714285,
"grad_norm": 4.469996452331543,
"learning_rate": 7.827677130496635e-06,
"loss": 0.995285153388977,
"step": 1066
},
{
"epoch": 1.907142857142857,
"grad_norm": 0.39180439710617065,
"learning_rate": 7.794544324209909e-06,
"loss": 1.084833025932312,
"step": 1068
},
{
"epoch": 1.9107142857142856,
"grad_norm": 2.244345188140869,
"learning_rate": 7.76146119810513e-06,
"loss": 1.1369450092315674,
"step": 1070
},
{
"epoch": 1.9142857142857141,
"grad_norm": 0.685583233833313,
"learning_rate": 7.728428264925531e-06,
"loss": 0.9934486150741577,
"step": 1072
},
{
"epoch": 1.9178571428571427,
"grad_norm": 0.3774887025356293,
"learning_rate": 7.69544603663644e-06,
"loss": 0.9882639646530151,
"step": 1074
},
{
"epoch": 1.9214285714285713,
"grad_norm": 0.53889000415802,
"learning_rate": 7.662515024417315e-06,
"loss": 1.1985116004943848,
"step": 1076
},
{
"epoch": 1.925,
"grad_norm": 1.1523969173431396,
"learning_rate": 7.62963573865383e-06,
"loss": 0.9715709686279297,
"step": 1078
},
{
"epoch": 1.9285714285714286,
"grad_norm": 0.540876030921936,
"learning_rate": 7.5968086889299795e-06,
"loss": 1.1391596794128418,
"step": 1080
},
{
"epoch": 1.9321428571428572,
"grad_norm": 0.4845031797885895,
"learning_rate": 7.564034384020174e-06,
"loss": 1.0560252666473389,
"step": 1082
},
{
"epoch": 1.9357142857142857,
"grad_norm": 1.0255088806152344,
"learning_rate": 7.53131333188134e-06,
"loss": 1.1791609525680542,
"step": 1084
},
{
"epoch": 1.9392857142857143,
"grad_norm": 0.47408270835876465,
"learning_rate": 7.498646039645068e-06,
"loss": 1.0764998197555542,
"step": 1086
},
{
"epoch": 1.9428571428571428,
"grad_norm": 0.5158260464668274,
"learning_rate": 7.4660330136097345e-06,
"loss": 1.1066300868988037,
"step": 1088
},
{
"epoch": 1.9464285714285714,
"grad_norm": 0.636821985244751,
"learning_rate": 7.433474759232675e-06,
"loss": 0.7171211242675781,
"step": 1090
},
{
"epoch": 1.95,
"grad_norm": 0.4269849956035614,
"learning_rate": 7.400971781122334e-06,
"loss": 1.1467523574829102,
"step": 1092
},
{
"epoch": 1.9535714285714287,
"grad_norm": 0.4313041865825653,
"learning_rate": 7.3685245830304455e-06,
"loss": 1.088484287261963,
"step": 1094
},
{
"epoch": 1.9571428571428573,
"grad_norm": 0.607107937335968,
"learning_rate": 7.336133667844232e-06,
"loss": 1.1424229145050049,
"step": 1096
},
{
"epoch": 1.9607142857142859,
"grad_norm": 0.5271368622779846,
"learning_rate": 7.3037995375786105e-06,
"loss": 1.1263647079467773,
"step": 1098
},
{
"epoch": 1.9642857142857144,
"grad_norm": 0.5421050190925598,
"learning_rate": 7.271522693368412e-06,
"loss": 1.1368845701217651,
"step": 1100
},
{
"epoch": 1.967857142857143,
"grad_norm": 0.45558372139930725,
"learning_rate": 7.239303635460604e-06,
"loss": 1.0905189514160156,
"step": 1102
},
{
"epoch": 1.9714285714285715,
"grad_norm": 0.6813379526138306,
"learning_rate": 7.207142863206544e-06,
"loss": 1.300194501876831,
"step": 1104
},
{
"epoch": 1.975,
"grad_norm": 3.9907617568969727,
"learning_rate": 7.175040875054256e-06,
"loss": 1.2036380767822266,
"step": 1106
},
{
"epoch": 1.9785714285714286,
"grad_norm": 0.3055593967437744,
"learning_rate": 7.142998168540676e-06,
"loss": 1.092109203338623,
"step": 1108
},
{
"epoch": 1.9821428571428572,
"grad_norm": 0.5111721158027649,
"learning_rate": 7.111015240283963e-06,
"loss": 0.5738495588302612,
"step": 1110
},
{
"epoch": 1.9857142857142858,
"grad_norm": 0.3026096522808075,
"learning_rate": 7.079092585975789e-06,
"loss": 0.5167784094810486,
"step": 1112
},
{
"epoch": 1.9892857142857143,
"grad_norm": 0.37242579460144043,
"learning_rate": 7.047230700373669e-06,
"loss": 1.086807131767273,
"step": 1114
},
{
"epoch": 1.9928571428571429,
"grad_norm": 0.5165676474571228,
"learning_rate": 7.015430077293281e-06,
"loss": 1.1821894645690918,
"step": 1116
},
{
"epoch": 1.9964285714285714,
"grad_norm": 2.6484367847442627,
"learning_rate": 6.983691209600821e-06,
"loss": 0.7227582931518555,
"step": 1118
},
{
"epoch": 2.0,
"grad_norm": 0.8464747071266174,
"learning_rate": 6.952014589205357e-06,
"loss": 1.2078351974487305,
"step": 1120
},
{
"epoch": 2.0035714285714286,
"grad_norm": 0.5015007853507996,
"learning_rate": 6.92040070705122e-06,
"loss": 1.0274121761322021,
"step": 1122
},
{
"epoch": 2.007142857142857,
"grad_norm": 0.7175948023796082,
"learning_rate": 6.888850053110364e-06,
"loss": 1.2103437185287476,
"step": 1124
},
{
"epoch": 2.0107142857142857,
"grad_norm": 0.7134751081466675,
"learning_rate": 6.857363116374809e-06,
"loss": 1.1368117332458496,
"step": 1126
},
{
"epoch": 2.0142857142857142,
"grad_norm": 0.363921582698822,
"learning_rate": 6.825940384849035e-06,
"loss": 1.0515990257263184,
"step": 1128
},
{
"epoch": 2.017857142857143,
"grad_norm": 1.2383047342300415,
"learning_rate": 6.794582345542442e-06,
"loss": 0.8165490627288818,
"step": 1130
},
{
"epoch": 2.0214285714285714,
"grad_norm": 0.3281400799751282,
"learning_rate": 6.763289484461777e-06,
"loss": 0.9551868438720703,
"step": 1132
},
{
"epoch": 2.025,
"grad_norm": 2.324735641479492,
"learning_rate": 6.732062286603622e-06,
"loss": 0.8535771369934082,
"step": 1134
},
{
"epoch": 2.0285714285714285,
"grad_norm": 0.450276643037796,
"learning_rate": 6.700901235946851e-06,
"loss": 1.1220966577529907,
"step": 1136
},
{
"epoch": 2.032142857142857,
"grad_norm": 0.4659401476383209,
"learning_rate": 6.669806815445174e-06,
"loss": 1.1011137962341309,
"step": 1138
},
{
"epoch": 2.0357142857142856,
"grad_norm": 0.7880271077156067,
"learning_rate": 6.638779507019606e-06,
"loss": 1.2896149158477783,
"step": 1140
},
{
"epoch": 2.039285714285714,
"grad_norm": 0.27034834027290344,
"learning_rate": 6.60781979155102e-06,
"loss": 1.01216459274292,
"step": 1142
},
{
"epoch": 2.0428571428571427,
"grad_norm": 1.3774323463439941,
"learning_rate": 6.576928148872692e-06,
"loss": 1.0622344017028809,
"step": 1144
},
{
"epoch": 2.0464285714285713,
"grad_norm": 0.41495487093925476,
"learning_rate": 6.546105057762866e-06,
"loss": 1.0661730766296387,
"step": 1146
},
{
"epoch": 2.05,
"grad_norm": 0.49956122040748596,
"learning_rate": 6.515350995937322e-06,
"loss": 1.1045374870300293,
"step": 1148
},
{
"epoch": 2.0535714285714284,
"grad_norm": 1.3499337434768677,
"learning_rate": 6.484666440041989e-06,
"loss": 1.1958565711975098,
"step": 1150
},
{
"epoch": 2.057142857142857,
"grad_norm": 0.8459187746047974,
"learning_rate": 6.454051865645541e-06,
"loss": 1.0263601541519165,
"step": 1152
},
{
"epoch": 2.0607142857142855,
"grad_norm": 0.8016642928123474,
"learning_rate": 6.423507747232047e-06,
"loss": 1.0019943714141846,
"step": 1154
},
{
"epoch": 2.064285714285714,
"grad_norm": 0.3757397532463074,
"learning_rate": 6.3930345581935934e-06,
"loss": 1.091320514678955,
"step": 1156
},
{
"epoch": 2.067857142857143,
"grad_norm": 0.6635928153991699,
"learning_rate": 6.3626327708229585e-06,
"loss": 1.0494961738586426,
"step": 1158
},
{
"epoch": 2.0714285714285716,
"grad_norm": 0.3476162254810333,
"learning_rate": 6.332302856306293e-06,
"loss": 1.1452181339263916,
"step": 1160
},
{
"epoch": 2.075,
"grad_norm": 0.5809051990509033,
"learning_rate": 6.302045284715825e-06,
"loss": 1.1838127374649048,
"step": 1162
},
{
"epoch": 2.0785714285714287,
"grad_norm": 0.7286864519119263,
"learning_rate": 6.271860525002558e-06,
"loss": 0.9906060695648193,
"step": 1164
},
{
"epoch": 2.0821428571428573,
"grad_norm": 0.7779916524887085,
"learning_rate": 6.241749044989012e-06,
"loss": 1.0825626850128174,
"step": 1166
},
{
"epoch": 2.085714285714286,
"grad_norm": 0.624515175819397,
"learning_rate": 6.211711311361972e-06,
"loss": 1.0792040824890137,
"step": 1168
},
{
"epoch": 2.0892857142857144,
"grad_norm": 0.5379583239555359,
"learning_rate": 6.181747789665256e-06,
"loss": 1.233987808227539,
"step": 1170
},
{
"epoch": 2.092857142857143,
"grad_norm": 0.5427027940750122,
"learning_rate": 6.1518589442925e-06,
"loss": 1.0380905866622925,
"step": 1172
},
{
"epoch": 2.0964285714285715,
"grad_norm": 0.8613371253013611,
"learning_rate": 6.122045238479953e-06,
"loss": 1.1964986324310303,
"step": 1174
},
{
"epoch": 2.1,
"grad_norm": 0.749973714351654,
"learning_rate": 6.0923071342993075e-06,
"loss": 1.0324103832244873,
"step": 1176
},
{
"epoch": 2.1035714285714286,
"grad_norm": 0.41390734910964966,
"learning_rate": 6.062645092650543e-06,
"loss": 0.7683989405632019,
"step": 1178
},
{
"epoch": 2.107142857142857,
"grad_norm": 0.4426082968711853,
"learning_rate": 6.033059573254753e-06,
"loss": 0.9940497875213623,
"step": 1180
},
{
"epoch": 2.1107142857142858,
"grad_norm": 1.0037376880645752,
"learning_rate": 6.003551034647059e-06,
"loss": 0.975098192691803,
"step": 1182
},
{
"epoch": 2.1142857142857143,
"grad_norm": 1.797518014907837,
"learning_rate": 5.974119934169473e-06,
"loss": 1.0679240226745605,
"step": 1184
},
{
"epoch": 2.117857142857143,
"grad_norm": 0.9084307551383972,
"learning_rate": 5.944766727963834e-06,
"loss": 0.9722939729690552,
"step": 1186
},
{
"epoch": 2.1214285714285714,
"grad_norm": 0.4284418523311615,
"learning_rate": 5.9154918709647204e-06,
"loss": 0.9965044260025024,
"step": 1188
},
{
"epoch": 2.125,
"grad_norm": 0.9147275686264038,
"learning_rate": 5.8862958168924025e-06,
"loss": 0.6787570714950562,
"step": 1190
},
{
"epoch": 2.1285714285714286,
"grad_norm": 0.6834171414375305,
"learning_rate": 5.8571790182458085e-06,
"loss": 1.0144072771072388,
"step": 1192
},
{
"epoch": 2.132142857142857,
"grad_norm": 0.5649306178092957,
"learning_rate": 5.828141926295533e-06,
"loss": 1.1785552501678467,
"step": 1194
},
{
"epoch": 2.1357142857142857,
"grad_norm": 0.9754287004470825,
"learning_rate": 5.7991849910768096e-06,
"loss": 1.1542279720306396,
"step": 1196
},
{
"epoch": 2.1392857142857142,
"grad_norm": 0.540823757648468,
"learning_rate": 5.770308661382556e-06,
"loss": 0.8885504007339478,
"step": 1198
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.378530889749527,
"learning_rate": 5.741513384756421e-06,
"loss": 1.0100677013397217,
"step": 1200
},
{
"epoch": 2.1464285714285714,
"grad_norm": 1.1241729259490967,
"learning_rate": 5.712799607485832e-06,
"loss": 0.654653787612915,
"step": 1202
},
{
"epoch": 2.15,
"grad_norm": 1.2759966850280762,
"learning_rate": 5.6841677745950965e-06,
"loss": 1.1008085012435913,
"step": 1204
},
{
"epoch": 2.1535714285714285,
"grad_norm": 0.34266769886016846,
"learning_rate": 5.6556183298384885e-06,
"loss": 1.011092185974121,
"step": 1206
},
{
"epoch": 2.157142857142857,
"grad_norm": 0.4193215072154999,
"learning_rate": 5.62715171569338e-06,
"loss": 1.0412278175354004,
"step": 1208
},
{
"epoch": 2.1607142857142856,
"grad_norm": 0.32657843828201294,
"learning_rate": 5.598768373353392e-06,
"loss": 0.5627393126487732,
"step": 1210
},
{
"epoch": 2.164285714285714,
"grad_norm": 0.5068192481994629,
"learning_rate": 5.570468742721532e-06,
"loss": 1.0111093521118164,
"step": 1212
},
{
"epoch": 2.1678571428571427,
"grad_norm": 0.8486136794090271,
"learning_rate": 5.542253262403397e-06,
"loss": 1.0036768913269043,
"step": 1214
},
{
"epoch": 2.1714285714285713,
"grad_norm": 0.4339812695980072,
"learning_rate": 5.514122369700366e-06,
"loss": 0.858208179473877,
"step": 1216
},
{
"epoch": 2.175,
"grad_norm": 0.3655393421649933,
"learning_rate": 5.486076500602836e-06,
"loss": 0.9979751110076904,
"step": 1218
},
{
"epoch": 2.1785714285714284,
"grad_norm": 0.7398764491081238,
"learning_rate": 5.458116089783441e-06,
"loss": 1.089611530303955,
"step": 1220
},
{
"epoch": 2.182142857142857,
"grad_norm": 2.556060314178467,
"learning_rate": 5.430241570590335e-06,
"loss": 0.9004594087600708,
"step": 1222
},
{
"epoch": 2.185714285714286,
"grad_norm": 0.24670109152793884,
"learning_rate": 5.40245337504047e-06,
"loss": 0.8174819946289062,
"step": 1224
},
{
"epoch": 2.189285714285714,
"grad_norm": 0.414289265871048,
"learning_rate": 5.374751933812895e-06,
"loss": 1.019258737564087,
"step": 1226
},
{
"epoch": 2.192857142857143,
"grad_norm": 0.4929758310317993,
"learning_rate": 5.34713767624209e-06,
"loss": 0.9936923384666443,
"step": 1228
},
{
"epoch": 2.1964285714285716,
"grad_norm": 0.42652517557144165,
"learning_rate": 5.319611030311306e-06,
"loss": 1.1034317016601562,
"step": 1230
},
{
"epoch": 2.2,
"grad_norm": 1.5116170644760132,
"learning_rate": 5.292172422645931e-06,
"loss": 1.107032299041748,
"step": 1232
},
{
"epoch": 2.2035714285714287,
"grad_norm": 1.4664469957351685,
"learning_rate": 5.2648222785068845e-06,
"loss": 1.0061404705047607,
"step": 1234
},
{
"epoch": 2.2071428571428573,
"grad_norm": 0.6140238642692566,
"learning_rate": 5.237561021784021e-06,
"loss": 1.1637604236602783,
"step": 1236
},
{
"epoch": 2.210714285714286,
"grad_norm": 0.9620886445045471,
"learning_rate": 5.210389074989558e-06,
"loss": 1.1427053213119507,
"step": 1238
},
{
"epoch": 2.2142857142857144,
"grad_norm": 0.43619751930236816,
"learning_rate": 5.183306859251531e-06,
"loss": 1.1551034450531006,
"step": 1240
},
{
"epoch": 2.217857142857143,
"grad_norm": 1.3034915924072266,
"learning_rate": 5.1563147943072775e-06,
"loss": 0.9217376708984375,
"step": 1242
},
{
"epoch": 2.2214285714285715,
"grad_norm": 0.7653930187225342,
"learning_rate": 5.129413298496913e-06,
"loss": 1.0179123878479004,
"step": 1244
},
{
"epoch": 2.225,
"grad_norm": 0.39587950706481934,
"learning_rate": 5.102602788756847e-06,
"loss": 0.9925167560577393,
"step": 1246
},
{
"epoch": 2.2285714285714286,
"grad_norm": 0.6801050305366516,
"learning_rate": 5.075883680613338e-06,
"loss": 0.9864404797554016,
"step": 1248
},
{
"epoch": 2.232142857142857,
"grad_norm": 0.5718074440956116,
"learning_rate": 5.049256388176054e-06,
"loss": 1.0245519876480103,
"step": 1250
},
{
"epoch": 2.2357142857142858,
"grad_norm": 0.4339280426502228,
"learning_rate": 5.022721324131626e-06,
"loss": 1.0415318012237549,
"step": 1252
},
{
"epoch": 2.2392857142857143,
"grad_norm": 1.2172452211380005,
"learning_rate": 4.996278899737283e-06,
"loss": 1.045757532119751,
"step": 1254
},
{
"epoch": 2.242857142857143,
"grad_norm": 1.0536972284317017,
"learning_rate": 4.969929524814464e-06,
"loss": 0.9379353523254395,
"step": 1256
},
{
"epoch": 2.2464285714285714,
"grad_norm": 0.9581257104873657,
"learning_rate": 4.943673607742466e-06,
"loss": 1.0260355472564697,
"step": 1258
},
{
"epoch": 2.25,
"grad_norm": 1.8986560106277466,
"learning_rate": 4.91751155545212e-06,
"loss": 0.6572850942611694,
"step": 1260
},
{
"epoch": 2.2535714285714286,
"grad_norm": 0.6189560890197754,
"learning_rate": 4.891443773419479e-06,
"loss": 0.5532552599906921,
"step": 1262
},
{
"epoch": 2.257142857142857,
"grad_norm": 0.4603727459907532,
"learning_rate": 4.865470665659535e-06,
"loss": 1.104417324066162,
"step": 1264
},
{
"epoch": 2.2607142857142857,
"grad_norm": 0.7528354525566101,
"learning_rate": 4.839592634719966e-06,
"loss": 0.906780481338501,
"step": 1266
},
{
"epoch": 2.2642857142857142,
"grad_norm": 0.8098517060279846,
"learning_rate": 4.813810081674875e-06,
"loss": 1.0541425943374634,
"step": 1268
},
{
"epoch": 2.267857142857143,
"grad_norm": 1.1421626806259155,
"learning_rate": 4.7881234061186e-06,
"loss": 0.977279782295227,
"step": 1270
},
{
"epoch": 2.2714285714285714,
"grad_norm": 0.5860005617141724,
"learning_rate": 4.7625330061595025e-06,
"loss": 1.0465322732925415,
"step": 1272
},
{
"epoch": 2.275,
"grad_norm": 0.753535270690918,
"learning_rate": 4.73703927841381e-06,
"loss": 1.0138700008392334,
"step": 1274
},
{
"epoch": 2.2785714285714285,
"grad_norm": 0.5131919384002686,
"learning_rate": 4.711642617999461e-06,
"loss": 1.0608528852462769,
"step": 1276
},
{
"epoch": 2.282142857142857,
"grad_norm": 2.5389108657836914,
"learning_rate": 4.6863434185299784e-06,
"loss": 0.9582719802856445,
"step": 1278
},
{
"epoch": 2.2857142857142856,
"grad_norm": 0.9607290029525757,
"learning_rate": 4.661142072108371e-06,
"loss": 1.059136152267456,
"step": 1280
},
{
"epoch": 2.289285714285714,
"grad_norm": 0.3775724470615387,
"learning_rate": 4.636038969321073e-06,
"loss": 1.0284271240234375,
"step": 1282
},
{
"epoch": 2.2928571428571427,
"grad_norm": 0.4234330356121063,
"learning_rate": 4.611034499231865e-06,
"loss": 1.0011167526245117,
"step": 1284
},
{
"epoch": 2.2964285714285713,
"grad_norm": 0.36131852865219116,
"learning_rate": 4.586129049375857e-06,
"loss": 1.0314903259277344,
"step": 1286
},
{
"epoch": 2.3,
"grad_norm": 0.7630230188369751,
"learning_rate": 4.561323005753489e-06,
"loss": 0.6767295598983765,
"step": 1288
},
{
"epoch": 2.3035714285714284,
"grad_norm": 1.119634985923767,
"learning_rate": 4.536616752824525e-06,
"loss": 1.0448781251907349,
"step": 1290
},
{
"epoch": 2.307142857142857,
"grad_norm": 1.1402679681777954,
"learning_rate": 4.512010673502125e-06,
"loss": 1.046932578086853,
"step": 1292
},
{
"epoch": 2.310714285714286,
"grad_norm": 0.5093026161193848,
"learning_rate": 4.48750514914688e-06,
"loss": 0.9864715337753296,
"step": 1294
},
{
"epoch": 2.314285714285714,
"grad_norm": 0.5392947196960449,
"learning_rate": 4.463100559560935e-06,
"loss": 0.959062933921814,
"step": 1296
},
{
"epoch": 2.317857142857143,
"grad_norm": 1.170287847518921,
"learning_rate": 4.438797282982069e-06,
"loss": 0.8960469961166382,
"step": 1298
},
{
"epoch": 2.3214285714285716,
"grad_norm": 1.2749667167663574,
"learning_rate": 4.414595696077857e-06,
"loss": 0.829791247844696,
"step": 1300
},
{
"epoch": 2.325,
"grad_norm": 1.079351782798767,
"learning_rate": 4.390496173939808e-06,
"loss": 0.9560054540634155,
"step": 1302
},
{
"epoch": 2.3285714285714287,
"grad_norm": 1.3681961297988892,
"learning_rate": 4.366499090077587e-06,
"loss": 1.0540683269500732,
"step": 1304
},
{
"epoch": 2.3321428571428573,
"grad_norm": 0.41432955861091614,
"learning_rate": 4.342604816413193e-06,
"loss": 1.0061728954315186,
"step": 1306
},
{
"epoch": 2.335714285714286,
"grad_norm": 1.46927809715271,
"learning_rate": 4.318813723275211e-06,
"loss": 1.0353810787200928,
"step": 1308
},
{
"epoch": 2.3392857142857144,
"grad_norm": 0.609827995300293,
"learning_rate": 4.295126179393067e-06,
"loss": 0.6233819127082825,
"step": 1310
},
{
"epoch": 2.342857142857143,
"grad_norm": 0.36403512954711914,
"learning_rate": 4.271542551891319e-06,
"loss": 0.5161199569702148,
"step": 1312
},
{
"epoch": 2.3464285714285715,
"grad_norm": 0.453372985124588,
"learning_rate": 4.248063206283959e-06,
"loss": 1.1536060571670532,
"step": 1314
},
{
"epoch": 2.35,
"grad_norm": 0.2660079896450043,
"learning_rate": 4.224688506468754e-06,
"loss": 1.0320401191711426,
"step": 1316
},
{
"epoch": 2.3535714285714286,
"grad_norm": 0.5758374333381653,
"learning_rate": 4.201418814721599e-06,
"loss": 1.025177240371704,
"step": 1318
},
{
"epoch": 2.357142857142857,
"grad_norm": 0.6305975317955017,
"learning_rate": 4.178254491690923e-06,
"loss": 0.7249777317047119,
"step": 1320
},
{
"epoch": 2.3607142857142858,
"grad_norm": 0.4613308310508728,
"learning_rate": 4.155195896392069e-06,
"loss": 0.9757188558578491,
"step": 1322
},
{
"epoch": 2.3642857142857143,
"grad_norm": 0.8076472878456116,
"learning_rate": 4.132243386201743e-06,
"loss": 0.7056589126586914,
"step": 1324
},
{
"epoch": 2.367857142857143,
"grad_norm": 0.3006990849971771,
"learning_rate": 4.109397316852488e-06,
"loss": 1.2015197277069092,
"step": 1326
},
{
"epoch": 2.3714285714285714,
"grad_norm": 0.4321019649505615,
"learning_rate": 4.086658042427154e-06,
"loss": 1.0216331481933594,
"step": 1328
},
{
"epoch": 2.375,
"grad_norm": 0.21614263951778412,
"learning_rate": 4.0640259153534165e-06,
"loss": 0.7432563900947571,
"step": 1330
},
{
"epoch": 2.3785714285714286,
"grad_norm": 0.5420116782188416,
"learning_rate": 4.041501286398311e-06,
"loss": 1.0380558967590332,
"step": 1332
},
{
"epoch": 2.382142857142857,
"grad_norm": 0.6743971705436707,
"learning_rate": 4.019084504662803e-06,
"loss": 0.9956204295158386,
"step": 1334
},
{
"epoch": 2.3857142857142857,
"grad_norm": 0.8979402780532837,
"learning_rate": 3.99677591757637e-06,
"loss": 0.9765022993087769,
"step": 1336
},
{
"epoch": 2.3892857142857142,
"grad_norm": 0.6747384667396545,
"learning_rate": 3.974575870891622e-06,
"loss": 1.1052476167678833,
"step": 1338
},
{
"epoch": 2.392857142857143,
"grad_norm": 0.27344566583633423,
"learning_rate": 3.952484708678942e-06,
"loss": 1.0951298475265503,
"step": 1340
},
{
"epoch": 2.3964285714285714,
"grad_norm": 0.4220183193683624,
"learning_rate": 3.93050277332115e-06,
"loss": 1.0463672876358032,
"step": 1342
},
{
"epoch": 2.4,
"grad_norm": 0.434129923582077,
"learning_rate": 3.9086304055082005e-06,
"loss": 0.9638611078262329,
"step": 1344
},
{
"epoch": 2.4035714285714285,
"grad_norm": 0.30925750732421875,
"learning_rate": 3.886867944231901e-06,
"loss": 1.068469762802124,
"step": 1346
},
{
"epoch": 2.407142857142857,
"grad_norm": 0.7762970328330994,
"learning_rate": 3.865215726780658e-06,
"loss": 1.0488204956054688,
"step": 1348
},
{
"epoch": 2.4107142857142856,
"grad_norm": 0.9941971898078918,
"learning_rate": 3.8436740887342464e-06,
"loss": 0.9059903621673584,
"step": 1350
},
{
"epoch": 2.414285714285714,
"grad_norm": 0.7018589377403259,
"learning_rate": 3.82224336395862e-06,
"loss": 0.9785033464431763,
"step": 1352
},
{
"epoch": 2.4178571428571427,
"grad_norm": 0.4142831861972809,
"learning_rate": 3.800923884600718e-06,
"loss": 0.9408230781555176,
"step": 1354
},
{
"epoch": 2.4214285714285713,
"grad_norm": 0.3824380040168762,
"learning_rate": 3.7797159810833356e-06,
"loss": 0.979665994644165,
"step": 1356
},
{
"epoch": 2.425,
"grad_norm": 0.4297451972961426,
"learning_rate": 3.758619982099985e-06,
"loss": 0.9747478365898132,
"step": 1358
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.6103563904762268,
"learning_rate": 3.737636214609825e-06,
"loss": 1.060492992401123,
"step": 1360
},
{
"epoch": 2.432142857142857,
"grad_norm": 0.26877671480178833,
"learning_rate": 3.7167650038325685e-06,
"loss": 0.9182797074317932,
"step": 1362
},
{
"epoch": 2.435714285714286,
"grad_norm": 0.7906038761138916,
"learning_rate": 3.696006673243458e-06,
"loss": 1.034062147140503,
"step": 1364
},
{
"epoch": 2.439285714285714,
"grad_norm": 0.4611876308917999,
"learning_rate": 3.6753615445682463e-06,
"loss": 1.0039832592010498,
"step": 1366
},
{
"epoch": 2.442857142857143,
"grad_norm": 0.9368420243263245,
"learning_rate": 3.6548299377782113e-06,
"loss": 1.0611416101455688,
"step": 1368
},
{
"epoch": 2.4464285714285716,
"grad_norm": 0.7252188324928284,
"learning_rate": 3.634412171085197e-06,
"loss": 0.5861155986785889,
"step": 1370
},
{
"epoch": 2.45,
"grad_norm": 1.0302289724349976,
"learning_rate": 3.614108560936681e-06,
"loss": 1.044318675994873,
"step": 1372
},
{
"epoch": 2.4535714285714287,
"grad_norm": 0.5836187601089478,
"learning_rate": 3.5939194220108687e-06,
"loss": 1.0356674194335938,
"step": 1374
},
{
"epoch": 2.4571428571428573,
"grad_norm": 0.49724525213241577,
"learning_rate": 3.5738450672118265e-06,
"loss": 0.9499672651290894,
"step": 1376
},
{
"epoch": 2.460714285714286,
"grad_norm": 0.6018553376197815,
"learning_rate": 3.5538858076646115e-06,
"loss": 0.501020073890686,
"step": 1378
},
{
"epoch": 2.4642857142857144,
"grad_norm": 0.47356775403022766,
"learning_rate": 3.5340419527104685e-06,
"loss": 1.16387140750885,
"step": 1380
},
{
"epoch": 2.467857142857143,
"grad_norm": 0.3433539867401123,
"learning_rate": 3.514313809902028e-06,
"loss": 1.0770752429962158,
"step": 1382
},
{
"epoch": 2.4714285714285715,
"grad_norm": 0.5331788063049316,
"learning_rate": 3.494701684998542e-06,
"loss": 0.9788726568222046,
"step": 1384
},
{
"epoch": 2.475,
"grad_norm": 0.6456915140151978,
"learning_rate": 3.4752058819611417e-06,
"loss": 1.0846669673919678,
"step": 1386
},
{
"epoch": 2.4785714285714286,
"grad_norm": 0.5334643721580505,
"learning_rate": 3.455826702948129e-06,
"loss": 0.6731972694396973,
"step": 1388
},
{
"epoch": 2.482142857142857,
"grad_norm": 0.543170154094696,
"learning_rate": 3.436564448310287e-06,
"loss": 1.0429730415344238,
"step": 1390
},
{
"epoch": 2.4857142857142858,
"grad_norm": 0.9272890686988831,
"learning_rate": 3.417419416586242e-06,
"loss": 1.0558149814605713,
"step": 1392
},
{
"epoch": 2.4892857142857143,
"grad_norm": 0.6048328876495361,
"learning_rate": 3.3983919044978163e-06,
"loss": 1.000851035118103,
"step": 1394
},
{
"epoch": 2.492857142857143,
"grad_norm": 0.4908715486526489,
"learning_rate": 3.3794822069454434e-06,
"loss": 1.0060486793518066,
"step": 1396
},
{
"epoch": 2.4964285714285714,
"grad_norm": 1.0622693300247192,
"learning_rate": 3.36069061700359e-06,
"loss": 0.6252748966217041,
"step": 1398
},
{
"epoch": 2.5,
"grad_norm": 0.41150182485580444,
"learning_rate": 3.3420174259162173e-06,
"loss": 0.9904586672782898,
"step": 1400
},
{
"epoch": 2.5035714285714286,
"grad_norm": 0.5609564185142517,
"learning_rate": 3.323462923092265e-06,
"loss": 1.0023245811462402,
"step": 1402
},
{
"epoch": 2.507142857142857,
"grad_norm": 0.529987633228302,
"learning_rate": 3.305027396101167e-06,
"loss": 0.9853721857070923,
"step": 1404
},
{
"epoch": 2.5107142857142857,
"grad_norm": 1.71652090549469,
"learning_rate": 3.286711130668395e-06,
"loss": 0.9943090677261353,
"step": 1406
},
{
"epoch": 2.5142857142857142,
"grad_norm": 0.6390452980995178,
"learning_rate": 3.268514410671033e-06,
"loss": 0.9306870102882385,
"step": 1408
},
{
"epoch": 2.517857142857143,
"grad_norm": 1.2343145608901978,
"learning_rate": 3.25043751813337e-06,
"loss": 0.6592239141464233,
"step": 1410
},
{
"epoch": 2.5214285714285714,
"grad_norm": 0.4998447000980377,
"learning_rate": 3.2324807332225307e-06,
"loss": 1.1522630453109741,
"step": 1412
},
{
"epoch": 2.525,
"grad_norm": 0.384732186794281,
"learning_rate": 3.2146443342441384e-06,
"loss": 0.9651750922203064,
"step": 1414
},
{
"epoch": 2.5285714285714285,
"grad_norm": 1.3452273607254028,
"learning_rate": 3.1969285976380044e-06,
"loss": 1.0436429977416992,
"step": 1416
},
{
"epoch": 2.532142857142857,
"grad_norm": 0.4325752556324005,
"learning_rate": 3.1793337979738303e-06,
"loss": 1.0330166816711426,
"step": 1418
},
{
"epoch": 2.5357142857142856,
"grad_norm": 0.6546037793159485,
"learning_rate": 3.161860207946963e-06,
"loss": 1.0312299728393555,
"step": 1420
},
{
"epoch": 2.539285714285714,
"grad_norm": 2.958420515060425,
"learning_rate": 3.1445080983741676e-06,
"loss": 1.0116913318634033,
"step": 1422
},
{
"epoch": 2.5428571428571427,
"grad_norm": 0.3540860712528229,
"learning_rate": 3.127277738189425e-06,
"loss": 0.994167149066925,
"step": 1424
},
{
"epoch": 2.5464285714285713,
"grad_norm": 1.0055928230285645,
"learning_rate": 3.1101693944397702e-06,
"loss": 1.0106935501098633,
"step": 1426
},
{
"epoch": 2.55,
"grad_norm": 0.5099295377731323,
"learning_rate": 3.0931833322811467e-06,
"loss": 1.0180671215057373,
"step": 1428
},
{
"epoch": 2.553571428571429,
"grad_norm": 0.4464087188243866,
"learning_rate": 3.0763198149743024e-06,
"loss": 1.0863144397735596,
"step": 1430
},
{
"epoch": 2.557142857142857,
"grad_norm": 0.4935389459133148,
"learning_rate": 3.0595791038807127e-06,
"loss": 0.9617230892181396,
"step": 1432
},
{
"epoch": 2.560714285714286,
"grad_norm": 0.3790127635002136,
"learning_rate": 3.0429614584585134e-06,
"loss": 0.9597178101539612,
"step": 1434
},
{
"epoch": 2.564285714285714,
"grad_norm": 0.44204089045524597,
"learning_rate": 3.0264671362584983e-06,
"loss": 0.9430738687515259,
"step": 1436
},
{
"epoch": 2.567857142857143,
"grad_norm": 0.6700682640075684,
"learning_rate": 3.0100963929201165e-06,
"loss": 1.0032017230987549,
"step": 1438
},
{
"epoch": 2.571428571428571,
"grad_norm": 0.6156457662582397,
"learning_rate": 2.993849482167518e-06,
"loss": 1.018100380897522,
"step": 1440
},
{
"epoch": 2.575,
"grad_norm": 0.21810433268547058,
"learning_rate": 2.9777266558056136e-06,
"loss": 0.8953680992126465,
"step": 1442
},
{
"epoch": 2.5785714285714287,
"grad_norm": 0.3726331293582916,
"learning_rate": 2.961728163716177e-06,
"loss": 1.0808253288269043,
"step": 1444
},
{
"epoch": 2.5821428571428573,
"grad_norm": 0.46072059869766235,
"learning_rate": 2.9458542538539646e-06,
"loss": 1.1591756343841553,
"step": 1446
},
{
"epoch": 2.585714285714286,
"grad_norm": 0.5146509408950806,
"learning_rate": 2.9301051722428908e-06,
"loss": 0.9750396013259888,
"step": 1448
},
{
"epoch": 2.5892857142857144,
"grad_norm": 0.27188050746917725,
"learning_rate": 2.914481162972191e-06,
"loss": 1.0626856088638306,
"step": 1450
},
{
"epoch": 2.592857142857143,
"grad_norm": 2.597723960876465,
"learning_rate": 2.8989824681926573e-06,
"loss": 1.0312139987945557,
"step": 1452
},
{
"epoch": 2.5964285714285715,
"grad_norm": 0.5178307890892029,
"learning_rate": 2.883609328112874e-06,
"loss": 1.0592162609100342,
"step": 1454
},
{
"epoch": 2.6,
"grad_norm": 0.3589671850204468,
"learning_rate": 2.8683619809955022e-06,
"loss": 0.9793047904968262,
"step": 1456
},
{
"epoch": 2.6035714285714286,
"grad_norm": 1.327312707901001,
"learning_rate": 2.8532406631535835e-06,
"loss": 0.9267488718032837,
"step": 1458
},
{
"epoch": 2.607142857142857,
"grad_norm": 0.6031576991081238,
"learning_rate": 2.8382456089468774e-06,
"loss": 0.7647952437400818,
"step": 1460
},
{
"epoch": 2.6107142857142858,
"grad_norm": 0.740556001663208,
"learning_rate": 2.8233770507782284e-06,
"loss": 0.9490369558334351,
"step": 1462
},
{
"epoch": 2.6142857142857143,
"grad_norm": 0.4929530918598175,
"learning_rate": 2.8086352190899685e-06,
"loss": 1.063023567199707,
"step": 1464
},
{
"epoch": 2.617857142857143,
"grad_norm": 0.36942094564437866,
"learning_rate": 2.7940203423603424e-06,
"loss": 0.8732975721359253,
"step": 1466
},
{
"epoch": 2.6214285714285714,
"grad_norm": 1.9331592321395874,
"learning_rate": 2.7795326470999594e-06,
"loss": 0.9476880431175232,
"step": 1468
},
{
"epoch": 2.625,
"grad_norm": 1.912407636642456,
"learning_rate": 2.765172357848298e-06,
"loss": 0.5515610575675964,
"step": 1470
},
{
"epoch": 2.6285714285714286,
"grad_norm": 0.522169291973114,
"learning_rate": 2.7509396971702148e-06,
"loss": 0.9627550840377808,
"step": 1472
},
{
"epoch": 2.632142857142857,
"grad_norm": 0.4970167279243469,
"learning_rate": 2.736834885652498e-06,
"loss": 0.9455366134643555,
"step": 1474
},
{
"epoch": 2.6357142857142857,
"grad_norm": 0.6564792394638062,
"learning_rate": 2.7228581419004484e-06,
"loss": 1.0734994411468506,
"step": 1476
},
{
"epoch": 2.6392857142857142,
"grad_norm": 1.9194797277450562,
"learning_rate": 2.7090096825344918e-06,
"loss": 0.7485166192054749,
"step": 1478
},
{
"epoch": 2.642857142857143,
"grad_norm": 0.6008222699165344,
"learning_rate": 2.6952897221868208e-06,
"loss": 0.7062904834747314,
"step": 1480
},
{
"epoch": 2.6464285714285714,
"grad_norm": 0.586191713809967,
"learning_rate": 2.681698473498069e-06,
"loss": 1.01906156539917,
"step": 1482
},
{
"epoch": 2.65,
"grad_norm": 0.5794109106063843,
"learning_rate": 2.6682361471140162e-06,
"loss": 1.0513708591461182,
"step": 1484
},
{
"epoch": 2.6535714285714285,
"grad_norm": 0.5425417423248291,
"learning_rate": 2.654902951682319e-06,
"loss": 1.0853989124298096,
"step": 1486
},
{
"epoch": 2.657142857142857,
"grad_norm": 0.755917489528656,
"learning_rate": 2.641699093849289e-06,
"loss": 1.0674422979354858,
"step": 1488
},
{
"epoch": 2.6607142857142856,
"grad_norm": 0.7524024844169617,
"learning_rate": 2.628624778256671e-06,
"loss": 0.8497341275215149,
"step": 1490
},
{
"epoch": 2.664285714285714,
"grad_norm": 0.8485654592514038,
"learning_rate": 2.6156802075384903e-06,
"loss": 0.9786584377288818,
"step": 1492
},
{
"epoch": 2.6678571428571427,
"grad_norm": 0.47498440742492676,
"learning_rate": 2.602865582317899e-06,
"loss": 1.0709497928619385,
"step": 1494
},
{
"epoch": 2.6714285714285713,
"grad_norm": 0.4130082428455353,
"learning_rate": 2.5901811012040766e-06,
"loss": 1.049452781677246,
"step": 1496
},
{
"epoch": 2.675,
"grad_norm": 1.847033977508545,
"learning_rate": 2.5776269607891426e-06,
"loss": 0.5213537216186523,
"step": 1498
},
{
"epoch": 2.678571428571429,
"grad_norm": 0.40808621048927307,
"learning_rate": 2.5652033556451156e-06,
"loss": 1.0305453538894653,
"step": 1500
},
{
"epoch": 2.682142857142857,
"grad_norm": 0.6981632709503174,
"learning_rate": 2.5529104783208936e-06,
"loss": 1.158233404159546,
"step": 1502
},
{
"epoch": 2.685714285714286,
"grad_norm": 1.0872377157211304,
"learning_rate": 2.5407485193392765e-06,
"loss": 1.0444018840789795,
"step": 1504
},
{
"epoch": 2.689285714285714,
"grad_norm": 1.1252286434173584,
"learning_rate": 2.5287176671940043e-06,
"loss": 0.6784479022026062,
"step": 1506
},
{
"epoch": 2.692857142857143,
"grad_norm": 0.35642820596694946,
"learning_rate": 2.5168181083468433e-06,
"loss": 1.0736439228057861,
"step": 1508
},
{
"epoch": 2.696428571428571,
"grad_norm": 0.5642552375793457,
"learning_rate": 2.505050027224692e-06,
"loss": 1.065712809562683,
"step": 1510
},
{
"epoch": 2.7,
"grad_norm": 0.44292396306991577,
"learning_rate": 2.4934136062167245e-06,
"loss": 1.0252857208251953,
"step": 1512
},
{
"epoch": 2.7035714285714287,
"grad_norm": 0.8530491590499878,
"learning_rate": 2.481909025671561e-06,
"loss": 0.7706226706504822,
"step": 1514
},
{
"epoch": 2.7071428571428573,
"grad_norm": 1.9386380910873413,
"learning_rate": 2.470536463894476e-06,
"loss": 1.1056604385375977,
"step": 1516
},
{
"epoch": 2.710714285714286,
"grad_norm": 1.086133360862732,
"learning_rate": 2.459296097144634e-06,
"loss": 1.1189930438995361,
"step": 1518
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.3476833999156952,
"learning_rate": 2.4481880996323595e-06,
"loss": 0.9641977548599243,
"step": 1520
},
{
"epoch": 2.717857142857143,
"grad_norm": 1.6515637636184692,
"learning_rate": 2.4372126435164287e-06,
"loss": 0.7884814143180847,
"step": 1522
},
{
"epoch": 2.7214285714285715,
"grad_norm": 0.8657196760177612,
"learning_rate": 2.426369898901412e-06,
"loss": 1.0643302202224731,
"step": 1524
},
{
"epoch": 2.725,
"grad_norm": 1.4171736240386963,
"learning_rate": 2.4156600338350315e-06,
"loss": 0.894715428352356,
"step": 1526
},
{
"epoch": 2.7285714285714286,
"grad_norm": 0.8196776509284973,
"learning_rate": 2.40508321430556e-06,
"loss": 0.7902459502220154,
"step": 1528
},
{
"epoch": 2.732142857142857,
"grad_norm": 0.982892632484436,
"learning_rate": 2.3946396042392415e-06,
"loss": 1.0700161457061768,
"step": 1530
},
{
"epoch": 2.7357142857142858,
"grad_norm": 0.47785484790802,
"learning_rate": 2.3843293654977613e-06,
"loss": 0.8019880056381226,
"step": 1532
},
{
"epoch": 2.7392857142857143,
"grad_norm": 0.591884970664978,
"learning_rate": 2.374152657875726e-06,
"loss": 0.5509794354438782,
"step": 1534
},
{
"epoch": 2.742857142857143,
"grad_norm": 0.584327220916748,
"learning_rate": 2.364109639098196e-06,
"loss": 1.0576118230819702,
"step": 1536
},
{
"epoch": 2.7464285714285714,
"grad_norm": 0.3715934455394745,
"learning_rate": 2.3542004648182366e-06,
"loss": 0.9892575740814209,
"step": 1538
},
{
"epoch": 2.75,
"grad_norm": 0.7555781602859497,
"learning_rate": 2.344425288614504e-06,
"loss": 1.0496368408203125,
"step": 1540
},
{
"epoch": 2.7535714285714286,
"grad_norm": 0.47145044803619385,
"learning_rate": 2.334784261988871e-06,
"loss": 1.0199828147888184,
"step": 1542
},
{
"epoch": 2.757142857142857,
"grad_norm": 0.4803292155265808,
"learning_rate": 2.3252775343640726e-06,
"loss": 1.2034311294555664,
"step": 1544
},
{
"epoch": 2.7607142857142857,
"grad_norm": 0.5480961203575134,
"learning_rate": 2.3159052530813944e-06,
"loss": 1.0263570547103882,
"step": 1546
},
{
"epoch": 2.7642857142857142,
"grad_norm": 0.7437337040901184,
"learning_rate": 2.3066675633983863e-06,
"loss": 1.065530776977539,
"step": 1548
},
{
"epoch": 2.767857142857143,
"grad_norm": 0.33723706007003784,
"learning_rate": 2.2975646084866126e-06,
"loss": 0.8913886547088623,
"step": 1550
},
{
"epoch": 2.7714285714285714,
"grad_norm": 0.7276679277420044,
"learning_rate": 2.2885965294294334e-06,
"loss": 0.8413328528404236,
"step": 1552
},
{
"epoch": 2.775,
"grad_norm": 0.4004630744457245,
"learning_rate": 2.2797634652198187e-06,
"loss": 0.957332968711853,
"step": 1554
},
{
"epoch": 2.7785714285714285,
"grad_norm": 0.6412128806114197,
"learning_rate": 2.2710655527581886e-06,
"loss": 0.3783353269100189,
"step": 1556
},
{
"epoch": 2.782142857142857,
"grad_norm": 1.0582815408706665,
"learning_rate": 2.2625029268502984e-06,
"loss": 0.8345463275909424,
"step": 1558
},
{
"epoch": 2.7857142857142856,
"grad_norm": 0.5835173726081848,
"learning_rate": 2.25407572020515e-06,
"loss": 1.0623098611831665,
"step": 1560
},
{
"epoch": 2.789285714285714,
"grad_norm": 2.572283983230591,
"learning_rate": 2.245784063432925e-06,
"loss": 0.7072303295135498,
"step": 1562
},
{
"epoch": 2.7928571428571427,
"grad_norm": 0.5214061737060547,
"learning_rate": 2.237628085042972e-06,
"loss": 1.0455104112625122,
"step": 1564
},
{
"epoch": 2.7964285714285713,
"grad_norm": 1.3393778800964355,
"learning_rate": 2.2296079114418113e-06,
"loss": 0.9467559456825256,
"step": 1566
},
{
"epoch": 2.8,
"grad_norm": 0.25553128123283386,
"learning_rate": 2.2217236669311687e-06,
"loss": 0.928917407989502,
"step": 1568
},
{
"epoch": 2.803571428571429,
"grad_norm": 0.4923466145992279,
"learning_rate": 2.2139754737060606e-06,
"loss": 1.0020575523376465,
"step": 1570
},
{
"epoch": 2.807142857142857,
"grad_norm": 0.5088347792625427,
"learning_rate": 2.206363451852891e-06,
"loss": 1.0174009799957275,
"step": 1572
},
{
"epoch": 2.810714285714286,
"grad_norm": 1.0624254941940308,
"learning_rate": 2.1988877193475942e-06,
"loss": 0.943864107131958,
"step": 1574
},
{
"epoch": 2.814285714285714,
"grad_norm": 1.2148317098617554,
"learning_rate": 2.1915483920538098e-06,
"loss": 0.9655317068099976,
"step": 1576
},
{
"epoch": 2.817857142857143,
"grad_norm": 0.40001875162124634,
"learning_rate": 2.184345583721078e-06,
"loss": 1.03749680519104,
"step": 1578
},
{
"epoch": 2.821428571428571,
"grad_norm": 0.5738964080810547,
"learning_rate": 2.177279405983083e-06,
"loss": 1.0436820983886719,
"step": 1580
},
{
"epoch": 2.825,
"grad_norm": 1.9478952884674072,
"learning_rate": 2.1703499683559218e-06,
"loss": 0.823712944984436,
"step": 1582
},
{
"epoch": 2.8285714285714287,
"grad_norm": 0.5372344255447388,
"learning_rate": 2.1635573782364083e-06,
"loss": 1.0900218486785889,
"step": 1584
},
{
"epoch": 2.8321428571428573,
"grad_norm": 0.4769842028617859,
"learning_rate": 2.1569017409004058e-06,
"loss": 1.032717227935791,
"step": 1586
},
{
"epoch": 2.835714285714286,
"grad_norm": 3.2297580242156982,
"learning_rate": 2.1503831595011972e-06,
"loss": 0.6889293193817139,
"step": 1588
},
{
"epoch": 2.8392857142857144,
"grad_norm": 1.6764355897903442,
"learning_rate": 2.1440017350678806e-06,
"loss": 0.5947354435920715,
"step": 1590
},
{
"epoch": 2.842857142857143,
"grad_norm": 0.44973084330558777,
"learning_rate": 2.137757566503817e-06,
"loss": 1.1642112731933594,
"step": 1592
},
{
"epoch": 2.8464285714285715,
"grad_norm": 0.27710893750190735,
"learning_rate": 2.1316507505850835e-06,
"loss": 0.9468015432357788,
"step": 1594
},
{
"epoch": 2.85,
"grad_norm": 0.4122758209705353,
"learning_rate": 2.125681381958982e-06,
"loss": 1.0558395385742188,
"step": 1596
},
{
"epoch": 2.8535714285714286,
"grad_norm": 0.8191478848457336,
"learning_rate": 2.1198495531425663e-06,
"loss": 1.0057258605957031,
"step": 1598
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.8358668088912964,
"learning_rate": 2.114155354521211e-06,
"loss": 1.0604225397109985,
"step": 1600
},
{
"epoch": 2.8607142857142858,
"grad_norm": 0.897541880607605,
"learning_rate": 2.1085988743472153e-06,
"loss": 1.0604546070098877,
"step": 1602
},
{
"epoch": 2.8642857142857143,
"grad_norm": 0.9542982578277588,
"learning_rate": 2.1031801987384255e-06,
"loss": 1.0365536212921143,
"step": 1604
},
{
"epoch": 2.867857142857143,
"grad_norm": 0.7912425398826599,
"learning_rate": 2.097899411676908e-06,
"loss": 0.9559098482131958,
"step": 1606
},
{
"epoch": 2.8714285714285714,
"grad_norm": 1.4533398151397705,
"learning_rate": 2.092756595007645e-06,
"loss": 0.7953978776931763,
"step": 1608
},
{
"epoch": 2.875,
"grad_norm": 0.358332097530365,
"learning_rate": 2.087751828437267e-06,
"loss": 1.0427325963974,
"step": 1610
},
{
"epoch": 2.8785714285714286,
"grad_norm": 0.2145242542028427,
"learning_rate": 2.082885189532815e-06,
"loss": 1.0498052835464478,
"step": 1612
},
{
"epoch": 2.882142857142857,
"grad_norm": 1.0103882551193237,
"learning_rate": 2.0781567537205387e-06,
"loss": 1.0514143705368042,
"step": 1614
},
{
"epoch": 2.8857142857142857,
"grad_norm": 0.669739305973053,
"learning_rate": 2.0735665942847328e-06,
"loss": 1.0135245323181152,
"step": 1616
},
{
"epoch": 2.8892857142857142,
"grad_norm": 0.576935887336731,
"learning_rate": 2.0691147823665953e-06,
"loss": 1.0457009077072144,
"step": 1618
},
{
"epoch": 2.892857142857143,
"grad_norm": 1.905529499053955,
"learning_rate": 2.0648013869631246e-06,
"loss": 1.1598899364471436,
"step": 1620
},
{
"epoch": 2.8964285714285714,
"grad_norm": 0.44723814725875854,
"learning_rate": 2.060626474926055e-06,
"loss": 1.0284682512283325,
"step": 1622
},
{
"epoch": 2.9,
"grad_norm": 0.4723893702030182,
"learning_rate": 2.056590110960817e-06,
"loss": 0.5460047721862793,
"step": 1624
},
{
"epoch": 2.9035714285714285,
"grad_norm": 0.8015878796577454,
"learning_rate": 2.0526923576255363e-06,
"loss": 0.7874211668968201,
"step": 1626
},
{
"epoch": 2.907142857142857,
"grad_norm": 0.6421289443969727,
"learning_rate": 2.0489332753300613e-06,
"loss": 1.013157844543457,
"step": 1628
},
{
"epoch": 2.9107142857142856,
"grad_norm": 0.5567857027053833,
"learning_rate": 2.0453129223350293e-06,
"loss": 0.9200034737586975,
"step": 1630
},
{
"epoch": 2.914285714285714,
"grad_norm": 0.8033483624458313,
"learning_rate": 2.041831354750966e-06,
"loss": 0.9019091129302979,
"step": 1632
},
{
"epoch": 2.9178571428571427,
"grad_norm": 0.24038927257061005,
"learning_rate": 2.0384886265374078e-06,
"loss": 0.9200948476791382,
"step": 1634
},
{
"epoch": 2.9214285714285713,
"grad_norm": 0.48659539222717285,
"learning_rate": 2.0352847895020754e-06,
"loss": 1.117785096168518,
"step": 1636
},
{
"epoch": 2.925,
"grad_norm": 0.6561591029167175,
"learning_rate": 2.032219893300064e-06,
"loss": 0.909339189529419,
"step": 1638
},
{
"epoch": 2.928571428571429,
"grad_norm": 0.4943861663341522,
"learning_rate": 2.029293985433074e-06,
"loss": 1.068084478378296,
"step": 1640
},
{
"epoch": 2.932142857142857,
"grad_norm": 0.4287692606449127,
"learning_rate": 2.02650711124868e-06,
"loss": 0.9846458435058594,
"step": 1642
},
{
"epoch": 2.935714285714286,
"grad_norm": 0.9221391677856445,
"learning_rate": 2.0238593139396225e-06,
"loss": 0.9131702184677124,
"step": 1644
},
{
"epoch": 2.939285714285714,
"grad_norm": 0.5019553303718567,
"learning_rate": 2.02135063454314e-06,
"loss": 0.990814208984375,
"step": 1646
},
{
"epoch": 2.942857142857143,
"grad_norm": 0.3528009057044983,
"learning_rate": 2.0189811119403356e-06,
"loss": 1.043567419052124,
"step": 1648
},
{
"epoch": 2.946428571428571,
"grad_norm": 1.9814422130584717,
"learning_rate": 2.0167507828555718e-06,
"loss": 0.600018322467804,
"step": 1650
},
{
"epoch": 2.95,
"grad_norm": 0.4034280776977539,
"learning_rate": 2.014659681855898e-06,
"loss": 1.066345453262329,
"step": 1652
},
{
"epoch": 2.9535714285714287,
"grad_norm": 0.35513538122177124,
"learning_rate": 2.012707841350526e-06,
"loss": 1.023656964302063,
"step": 1654
},
{
"epoch": 2.9571428571428573,
"grad_norm": 0.3866799771785736,
"learning_rate": 2.0108952915903134e-06,
"loss": 0.89092618227005,
"step": 1656
},
{
"epoch": 2.960714285714286,
"grad_norm": 0.4807373583316803,
"learning_rate": 2.0092220606673037e-06,
"loss": 1.0590665340423584,
"step": 1658
},
{
"epoch": 2.9642857142857144,
"grad_norm": 1.271697998046875,
"learning_rate": 2.0076881745142912e-06,
"loss": 1.0663166046142578,
"step": 1660
},
{
"epoch": 2.967857142857143,
"grad_norm": 0.5819157361984253,
"learning_rate": 2.0062936569044127e-06,
"loss": 1.0056861639022827,
"step": 1662
},
{
"epoch": 2.9714285714285715,
"grad_norm": 1.1669540405273438,
"learning_rate": 2.0050385294507877e-06,
"loss": 1.175034761428833,
"step": 1664
},
{
"epoch": 2.975,
"grad_norm": 1.3715438842773438,
"learning_rate": 2.0039228116061747e-06,
"loss": 1.0603545904159546,
"step": 1666
},
{
"epoch": 2.9785714285714286,
"grad_norm": 0.3443331718444824,
"learning_rate": 2.002946520662675e-06,
"loss": 1.029945731163025,
"step": 1668
},
{
"epoch": 2.982142857142857,
"grad_norm": 0.9261486530303955,
"learning_rate": 2.0021096717514655e-06,
"loss": 0.5534359812736511,
"step": 1670
},
{
"epoch": 2.9857142857142858,
"grad_norm": 0.34926119446754456,
"learning_rate": 2.001412277842559e-06,
"loss": 0.4894912838935852,
"step": 1672
},
{
"epoch": 2.9892857142857143,
"grad_norm": 0.3799968361854553,
"learning_rate": 2.0008543497446085e-06,
"loss": 1.021775722503662,
"step": 1674
},
{
"epoch": 2.992857142857143,
"grad_norm": 0.33220550417900085,
"learning_rate": 2.0004358961047375e-06,
"loss": 1.0939745903015137,
"step": 1676
},
{
"epoch": 2.9964285714285714,
"grad_norm": 1.253113031387329,
"learning_rate": 2.0001569234084046e-06,
"loss": 0.5844765901565552,
"step": 1678
},
{
"epoch": 3.0,
"grad_norm": 1.1644887924194336,
"learning_rate": 2.0000174359793066e-06,
"loss": 0.9837155342102051,
"step": 1680
},
{
"epoch": 3.0,
"step": 1680,
"total_flos": 2.50959750918203e+18,
"train_loss": 1.167248018547183,
"train_runtime": 8368.8806,
"train_samples_per_second": 3.212,
"train_steps_per_second": 0.201
}
],
"logging_steps": 2,
"max_steps": 1680,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 9999999,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.50959750918203e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}