| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 49.50563607085346, |
| "eval_steps": 100, |
| "global_step": 1900, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.2576489533011272, |
| "grad_norm": 5.974159240722656, |
| "learning_rate": 1.0526315789473685e-06, |
| "loss": 8.6521, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.5152979066022544, |
| "grad_norm": 3.2873778343200684, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 8.3497, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.7729468599033816, |
| "grad_norm": 2.522794485092163, |
| "learning_rate": 3.157894736842105e-06, |
| "loss": 7.9896, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.0418679549114331, |
| "grad_norm": 1.5867993831634521, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 8.1084, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.2995169082125604, |
| "grad_norm": 1.300277829170227, |
| "learning_rate": 5.263157894736842e-06, |
| "loss": 7.3393, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.5571658615136876, |
| "grad_norm": 1.1117545366287231, |
| "learning_rate": 6.31578947368421e-06, |
| "loss": 7.1266, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.8148148148148149, |
| "grad_norm": 0.9784226417541504, |
| "learning_rate": 7.368421052631579e-06, |
| "loss": 6.9662, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.0837359098228663, |
| "grad_norm": 0.9447280764579773, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 7.2558, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.3413848631239937, |
| "grad_norm": 1.1698005199432373, |
| "learning_rate": 9.473684210526315e-06, |
| "loss": 6.6472, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.5990338164251208, |
| "grad_norm": 1.544875144958496, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 6.392, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.5990338164251208, |
| "eval_loss": 6.279848575592041, |
| "eval_runtime": 8.7699, |
| "eval_samples_per_second": 183.012, |
| "eval_steps_per_second": 5.815, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.8566827697262482, |
| "grad_norm": 1.4024465084075928, |
| "learning_rate": 1.1578947368421053e-05, |
| "loss": 6.1704, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.1256038647342996, |
| "grad_norm": 1.2708696126937866, |
| "learning_rate": 1.263157894736842e-05, |
| "loss": 6.369, |
| "step": 120 |
| }, |
| { |
| "epoch": 3.3832528180354267, |
| "grad_norm": 0.7357354760169983, |
| "learning_rate": 1.3684210526315791e-05, |
| "loss": 5.8538, |
| "step": 130 |
| }, |
| { |
| "epoch": 3.640901771336554, |
| "grad_norm": 0.5254281163215637, |
| "learning_rate": 1.4736842105263159e-05, |
| "loss": 5.7556, |
| "step": 140 |
| }, |
| { |
| "epoch": 3.898550724637681, |
| "grad_norm": 0.8618260025978088, |
| "learning_rate": 1.578947368421053e-05, |
| "loss": 5.6495, |
| "step": 150 |
| }, |
| { |
| "epoch": 4.1674718196457325, |
| "grad_norm": 1.0980093479156494, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 5.9024, |
| "step": 160 |
| }, |
| { |
| "epoch": 4.42512077294686, |
| "grad_norm": 1.3449251651763916, |
| "learning_rate": 1.7894736842105264e-05, |
| "loss": 5.4768, |
| "step": 170 |
| }, |
| { |
| "epoch": 4.6827697262479875, |
| "grad_norm": 1.2047125101089478, |
| "learning_rate": 1.894736842105263e-05, |
| "loss": 5.3661, |
| "step": 180 |
| }, |
| { |
| "epoch": 4.940418679549114, |
| "grad_norm": 1.6401121616363525, |
| "learning_rate": 2e-05, |
| "loss": 5.2836, |
| "step": 190 |
| }, |
| { |
| "epoch": 5.209339774557166, |
| "grad_norm": 1.8774281740188599, |
| "learning_rate": 1.999831241633323e-05, |
| "loss": 5.5386, |
| "step": 200 |
| }, |
| { |
| "epoch": 5.209339774557166, |
| "eval_loss": 5.1901116371154785, |
| "eval_runtime": 8.0613, |
| "eval_samples_per_second": 199.099, |
| "eval_steps_per_second": 6.326, |
| "step": 200 |
| }, |
| { |
| "epoch": 5.466988727858293, |
| "grad_norm": 2.0122838020324707, |
| "learning_rate": 1.9993250234920638e-05, |
| "loss": 5.145, |
| "step": 210 |
| }, |
| { |
| "epoch": 5.72463768115942, |
| "grad_norm": 1.5243710279464722, |
| "learning_rate": 1.9984815164333163e-05, |
| "loss": 5.0726, |
| "step": 220 |
| }, |
| { |
| "epoch": 5.982286634460547, |
| "grad_norm": 1.613467812538147, |
| "learning_rate": 1.9973010051548274e-05, |
| "loss": 5.1139, |
| "step": 230 |
| }, |
| { |
| "epoch": 6.251207729468599, |
| "grad_norm": 1.169236183166504, |
| "learning_rate": 1.9957838880989076e-05, |
| "loss": 5.1586, |
| "step": 240 |
| }, |
| { |
| "epoch": 6.508856682769727, |
| "grad_norm": 1.2285501956939697, |
| "learning_rate": 1.9939306773179498e-05, |
| "loss": 4.9011, |
| "step": 250 |
| }, |
| { |
| "epoch": 6.766505636070853, |
| "grad_norm": 1.4358283281326294, |
| "learning_rate": 1.9917419983016025e-05, |
| "loss": 4.8479, |
| "step": 260 |
| }, |
| { |
| "epoch": 7.035426731078905, |
| "grad_norm": 1.3988443613052368, |
| "learning_rate": 1.989218589765658e-05, |
| "loss": 5.097, |
| "step": 270 |
| }, |
| { |
| "epoch": 7.293075684380033, |
| "grad_norm": 1.1683136224746704, |
| "learning_rate": 1.9863613034027224e-05, |
| "loss": 4.7351, |
| "step": 280 |
| }, |
| { |
| "epoch": 7.550724637681159, |
| "grad_norm": 1.2952402830123901, |
| "learning_rate": 1.9831711035947552e-05, |
| "loss": 4.6976, |
| "step": 290 |
| }, |
| { |
| "epoch": 7.808373590982287, |
| "grad_norm": 1.4266207218170166, |
| "learning_rate": 1.979649067087574e-05, |
| "loss": 4.6391, |
| "step": 300 |
| }, |
| { |
| "epoch": 7.808373590982287, |
| "eval_loss": 4.603694438934326, |
| "eval_runtime": 8.3521, |
| "eval_samples_per_second": 192.167, |
| "eval_steps_per_second": 6.106, |
| "step": 300 |
| }, |
| { |
| "epoch": 8.077294685990339, |
| "grad_norm": 2.173689126968384, |
| "learning_rate": 1.9757963826274357e-05, |
| "loss": 4.8403, |
| "step": 310 |
| }, |
| { |
| "epoch": 8.334943639291465, |
| "grad_norm": 1.874500036239624, |
| "learning_rate": 1.971614350559814e-05, |
| "loss": 4.4785, |
| "step": 320 |
| }, |
| { |
| "epoch": 8.592592592592592, |
| "grad_norm": 1.9841059446334839, |
| "learning_rate": 1.967104382390511e-05, |
| "loss": 4.4265, |
| "step": 330 |
| }, |
| { |
| "epoch": 8.85024154589372, |
| "grad_norm": 2.523819923400879, |
| "learning_rate": 1.9622680003092503e-05, |
| "loss": 4.3497, |
| "step": 340 |
| }, |
| { |
| "epoch": 9.119162640901772, |
| "grad_norm": 2.2171430587768555, |
| "learning_rate": 1.9571068366759143e-05, |
| "loss": 4.5573, |
| "step": 350 |
| }, |
| { |
| "epoch": 9.376811594202898, |
| "grad_norm": 2.516089916229248, |
| "learning_rate": 1.951622633469592e-05, |
| "loss": 4.2197, |
| "step": 360 |
| }, |
| { |
| "epoch": 9.634460547504025, |
| "grad_norm": 3.2976479530334473, |
| "learning_rate": 1.9458172417006347e-05, |
| "loss": 4.1526, |
| "step": 370 |
| }, |
| { |
| "epoch": 9.892109500805153, |
| "grad_norm": 2.310131311416626, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 4.1294, |
| "step": 380 |
| }, |
| { |
| "epoch": 10.161030595813205, |
| "grad_norm": 2.445627450942993, |
| "learning_rate": 1.933250837887457e-05, |
| "loss": 4.3354, |
| "step": 390 |
| }, |
| { |
| "epoch": 10.418679549114332, |
| "grad_norm": 2.188768148422241, |
| "learning_rate": 1.9264940672148018e-05, |
| "loss": 4.0545, |
| "step": 400 |
| }, |
| { |
| "epoch": 10.418679549114332, |
| "eval_loss": 4.0533294677734375, |
| "eval_runtime": 8.3508, |
| "eval_samples_per_second": 192.197, |
| "eval_steps_per_second": 6.107, |
| "step": 400 |
| }, |
| { |
| "epoch": 10.676328502415458, |
| "grad_norm": 3.4046390056610107, |
| "learning_rate": 1.9194245892911077e-05, |
| "loss": 4.0121, |
| "step": 410 |
| }, |
| { |
| "epoch": 10.933977455716587, |
| "grad_norm": 2.701680898666382, |
| "learning_rate": 1.9120447901834708e-05, |
| "loss": 3.9847, |
| "step": 420 |
| }, |
| { |
| "epoch": 11.202898550724637, |
| "grad_norm": 2.2474334239959717, |
| "learning_rate": 1.9043571606975776e-05, |
| "loss": 4.1875, |
| "step": 430 |
| }, |
| { |
| "epoch": 11.460547504025765, |
| "grad_norm": 2.2355682849884033, |
| "learning_rate": 1.8963642955370203e-05, |
| "loss": 3.9325, |
| "step": 440 |
| }, |
| { |
| "epoch": 11.718196457326892, |
| "grad_norm": 2.977954626083374, |
| "learning_rate": 1.888068892427538e-05, |
| "loss": 3.8992, |
| "step": 450 |
| }, |
| { |
| "epoch": 11.97584541062802, |
| "grad_norm": 2.2921981811523438, |
| "learning_rate": 1.879473751206489e-05, |
| "loss": 3.858, |
| "step": 460 |
| }, |
| { |
| "epoch": 12.24476650563607, |
| "grad_norm": 1.8335373401641846, |
| "learning_rate": 1.8705817728778626e-05, |
| "loss": 4.0718, |
| "step": 470 |
| }, |
| { |
| "epoch": 12.502415458937199, |
| "grad_norm": 2.2911853790283203, |
| "learning_rate": 1.8613959586331364e-05, |
| "loss": 3.8305, |
| "step": 480 |
| }, |
| { |
| "epoch": 12.760064412238325, |
| "grad_norm": 2.170738697052002, |
| "learning_rate": 1.851919408838327e-05, |
| "loss": 3.7923, |
| "step": 490 |
| }, |
| { |
| "epoch": 13.028985507246377, |
| "grad_norm": 1.8763988018035889, |
| "learning_rate": 1.842155321987566e-05, |
| "loss": 4.014, |
| "step": 500 |
| }, |
| { |
| "epoch": 13.028985507246377, |
| "eval_loss": 3.779900550842285, |
| "eval_runtime": 8.1613, |
| "eval_samples_per_second": 196.661, |
| "eval_steps_per_second": 6.249, |
| "step": 500 |
| }, |
| { |
| "epoch": 13.286634460547504, |
| "grad_norm": 2.2329862117767334, |
| "learning_rate": 1.8321069936235503e-05, |
| "loss": 3.753, |
| "step": 510 |
| }, |
| { |
| "epoch": 13.544283413848632, |
| "grad_norm": 2.3311359882354736, |
| "learning_rate": 1.821777815225245e-05, |
| "loss": 3.7099, |
| "step": 520 |
| }, |
| { |
| "epoch": 13.801932367149758, |
| "grad_norm": 2.5093774795532227, |
| "learning_rate": 1.8111712730632024e-05, |
| "loss": 3.6944, |
| "step": 530 |
| }, |
| { |
| "epoch": 14.07085346215781, |
| "grad_norm": 2.2526538372039795, |
| "learning_rate": 1.800290947022884e-05, |
| "loss": 3.9225, |
| "step": 540 |
| }, |
| { |
| "epoch": 14.328502415458937, |
| "grad_norm": 2.5449490547180176, |
| "learning_rate": 1.789140509396394e-05, |
| "loss": 3.6411, |
| "step": 550 |
| }, |
| { |
| "epoch": 14.586151368760065, |
| "grad_norm": 2.722266435623169, |
| "learning_rate": 1.777723723643014e-05, |
| "loss": 3.6571, |
| "step": 560 |
| }, |
| { |
| "epoch": 14.843800322061192, |
| "grad_norm": 2.3767805099487305, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 3.6036, |
| "step": 570 |
| }, |
| { |
| "epoch": 15.112721417069244, |
| "grad_norm": 2.4693193435668945, |
| "learning_rate": 1.7541066097768965e-05, |
| "loss": 3.8201, |
| "step": 580 |
| }, |
| { |
| "epoch": 15.37037037037037, |
| "grad_norm": 2.3329803943634033, |
| "learning_rate": 1.7419142528352815e-05, |
| "loss": 3.5643, |
| "step": 590 |
| }, |
| { |
| "epoch": 15.628019323671497, |
| "grad_norm": 2.396742343902588, |
| "learning_rate": 1.729471487418621e-05, |
| "loss": 3.5476, |
| "step": 600 |
| }, |
| { |
| "epoch": 15.628019323671497, |
| "eval_loss": 3.5582447052001953, |
| "eval_runtime": 8.3607, |
| "eval_samples_per_second": 191.969, |
| "eval_steps_per_second": 6.1, |
| "step": 600 |
| }, |
| { |
| "epoch": 15.885668276972625, |
| "grad_norm": 2.3643219470977783, |
| "learning_rate": 1.7167825131684516e-05, |
| "loss": 3.5246, |
| "step": 610 |
| }, |
| { |
| "epoch": 16.154589371980677, |
| "grad_norm": 3.809258460998535, |
| "learning_rate": 1.7038516128259118e-05, |
| "loss": 3.7499, |
| "step": 620 |
| }, |
| { |
| "epoch": 16.412238325281802, |
| "grad_norm": 2.6895406246185303, |
| "learning_rate": 1.6906831507862446e-05, |
| "loss": 3.499, |
| "step": 630 |
| }, |
| { |
| "epoch": 16.66988727858293, |
| "grad_norm": 2.7662012577056885, |
| "learning_rate": 1.6772815716257414e-05, |
| "loss": 3.4918, |
| "step": 640 |
| }, |
| { |
| "epoch": 16.92753623188406, |
| "grad_norm": 2.217256784439087, |
| "learning_rate": 1.6636513986016215e-05, |
| "loss": 3.4541, |
| "step": 650 |
| }, |
| { |
| "epoch": 17.19645732689211, |
| "grad_norm": 3.010216474533081, |
| "learning_rate": 1.64979723212536e-05, |
| "loss": 3.6718, |
| "step": 660 |
| }, |
| { |
| "epoch": 17.454106280193237, |
| "grad_norm": 3.430988073348999, |
| "learning_rate": 1.6357237482099682e-05, |
| "loss": 3.4517, |
| "step": 670 |
| }, |
| { |
| "epoch": 17.711755233494365, |
| "grad_norm": 2.378178119659424, |
| "learning_rate": 1.621435696891765e-05, |
| "loss": 3.4259, |
| "step": 680 |
| }, |
| { |
| "epoch": 17.96940418679549, |
| "grad_norm": 2.877561569213867, |
| "learning_rate": 1.606937900627157e-05, |
| "loss": 3.4104, |
| "step": 690 |
| }, |
| { |
| "epoch": 18.238325281803544, |
| "grad_norm": 2.5511293411254883, |
| "learning_rate": 1.5922352526649803e-05, |
| "loss": 3.6157, |
| "step": 700 |
| }, |
| { |
| "epoch": 18.238325281803544, |
| "eval_loss": 3.421673059463501, |
| "eval_runtime": 8.3529, |
| "eval_samples_per_second": 192.149, |
| "eval_steps_per_second": 6.106, |
| "step": 700 |
| }, |
| { |
| "epoch": 18.49597423510467, |
| "grad_norm": 2.8884575366973877, |
| "learning_rate": 1.5773327153949465e-05, |
| "loss": 3.4023, |
| "step": 710 |
| }, |
| { |
| "epoch": 18.753623188405797, |
| "grad_norm": 2.9396398067474365, |
| "learning_rate": 1.5622353186727542e-05, |
| "loss": 3.3875, |
| "step": 720 |
| }, |
| { |
| "epoch": 19.022544283413847, |
| "grad_norm": 3.390272855758667, |
| "learning_rate": 1.5469481581224274e-05, |
| "loss": 3.596, |
| "step": 730 |
| }, |
| { |
| "epoch": 19.280193236714975, |
| "grad_norm": 2.9179751873016357, |
| "learning_rate": 1.531476393416456e-05, |
| "loss": 3.3614, |
| "step": 740 |
| }, |
| { |
| "epoch": 19.537842190016104, |
| "grad_norm": 4.342045307159424, |
| "learning_rate": 1.5158252465343242e-05, |
| "loss": 3.3597, |
| "step": 750 |
| }, |
| { |
| "epoch": 19.79549114331723, |
| "grad_norm": 2.955822706222534, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 3.3584, |
| "step": 760 |
| }, |
| { |
| "epoch": 20.064412238325282, |
| "grad_norm": 2.6151533126831055, |
| "learning_rate": 1.4840059950989992e-05, |
| "loss": 3.5532, |
| "step": 770 |
| }, |
| { |
| "epoch": 20.32206119162641, |
| "grad_norm": 3.1554718017578125, |
| "learning_rate": 1.467848630075608e-05, |
| "loss": 3.3281, |
| "step": 780 |
| }, |
| { |
| "epoch": 20.579710144927535, |
| "grad_norm": 2.572737455368042, |
| "learning_rate": 1.4515333583108896e-05, |
| "loss": 3.3162, |
| "step": 790 |
| }, |
| { |
| "epoch": 20.837359098228664, |
| "grad_norm": 3.3176584243774414, |
| "learning_rate": 1.4350656864820733e-05, |
| "loss": 3.3161, |
| "step": 800 |
| }, |
| { |
| "epoch": 20.837359098228664, |
| "eval_loss": 3.3334078788757324, |
| "eval_runtime": 8.7701, |
| "eval_samples_per_second": 183.009, |
| "eval_steps_per_second": 5.815, |
| "step": 800 |
| }, |
| { |
| "epoch": 21.106280193236714, |
| "grad_norm": 3.9218039512634277, |
| "learning_rate": 1.4184511727039612e-05, |
| "loss": 3.5253, |
| "step": 810 |
| }, |
| { |
| "epoch": 21.363929146537842, |
| "grad_norm": 3.392868995666504, |
| "learning_rate": 1.4016954246529697e-05, |
| "loss": 3.2947, |
| "step": 820 |
| }, |
| { |
| "epoch": 21.62157809983897, |
| "grad_norm": 3.4435510635375977, |
| "learning_rate": 1.3848040976744459e-05, |
| "loss": 3.3021, |
| "step": 830 |
| }, |
| { |
| "epoch": 21.879227053140095, |
| "grad_norm": 3.319772243499756, |
| "learning_rate": 1.3677828928738934e-05, |
| "loss": 3.3022, |
| "step": 840 |
| }, |
| { |
| "epoch": 22.14814814814815, |
| "grad_norm": 3.0634384155273438, |
| "learning_rate": 1.3506375551927546e-05, |
| "loss": 3.4676, |
| "step": 850 |
| }, |
| { |
| "epoch": 22.405797101449274, |
| "grad_norm": 4.093255996704102, |
| "learning_rate": 1.3333738714693958e-05, |
| "loss": 3.2705, |
| "step": 860 |
| }, |
| { |
| "epoch": 22.663446054750402, |
| "grad_norm": 3.1328248977661133, |
| "learning_rate": 1.3159976684859528e-05, |
| "loss": 3.2759, |
| "step": 870 |
| }, |
| { |
| "epoch": 22.92109500805153, |
| "grad_norm": 3.3607850074768066, |
| "learning_rate": 1.2985148110016947e-05, |
| "loss": 3.26, |
| "step": 880 |
| }, |
| { |
| "epoch": 23.19001610305958, |
| "grad_norm": 4.250182628631592, |
| "learning_rate": 1.2809311997735697e-05, |
| "loss": 3.4547, |
| "step": 890 |
| }, |
| { |
| "epoch": 23.44766505636071, |
| "grad_norm": 3.2897439002990723, |
| "learning_rate": 1.2632527695645993e-05, |
| "loss": 3.2482, |
| "step": 900 |
| }, |
| { |
| "epoch": 23.44766505636071, |
| "eval_loss": 3.2658839225769043, |
| "eval_runtime": 8.3079, |
| "eval_samples_per_second": 193.189, |
| "eval_steps_per_second": 6.139, |
| "step": 900 |
| }, |
| { |
| "epoch": 23.705314009661837, |
| "grad_norm": 4.053313255310059, |
| "learning_rate": 1.2454854871407993e-05, |
| "loss": 3.253, |
| "step": 910 |
| }, |
| { |
| "epoch": 23.962962962962962, |
| "grad_norm": 3.2929084300994873, |
| "learning_rate": 1.2276353492572937e-05, |
| "loss": 3.2319, |
| "step": 920 |
| }, |
| { |
| "epoch": 24.231884057971016, |
| "grad_norm": 3.831101655960083, |
| "learning_rate": 1.2097083806343104e-05, |
| "loss": 3.4266, |
| "step": 930 |
| }, |
| { |
| "epoch": 24.48953301127214, |
| "grad_norm": 3.9928274154663086, |
| "learning_rate": 1.1917106319237386e-05, |
| "loss": 3.2313, |
| "step": 940 |
| }, |
| { |
| "epoch": 24.74718196457327, |
| "grad_norm": 3.4913885593414307, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 3.2157, |
| "step": 950 |
| }, |
| { |
| "epoch": 25.01610305958132, |
| "grad_norm": 5.290629863739014, |
| "learning_rate": 1.1555271142444433e-05, |
| "loss": 3.4133, |
| "step": 960 |
| }, |
| { |
| "epoch": 25.273752012882447, |
| "grad_norm": 3.4766597747802734, |
| "learning_rate": 1.1373535578184083e-05, |
| "loss": 3.1999, |
| "step": 970 |
| }, |
| { |
| "epoch": 25.531400966183575, |
| "grad_norm": 2.469715118408203, |
| "learning_rate": 1.1191336422682237e-05, |
| "loss": 3.2024, |
| "step": 980 |
| }, |
| { |
| "epoch": 25.789049919484704, |
| "grad_norm": 3.1561241149902344, |
| "learning_rate": 1.1008735171202685e-05, |
| "loss": 3.2053, |
| "step": 990 |
| }, |
| { |
| "epoch": 26.057971014492754, |
| "grad_norm": 3.2899832725524902, |
| "learning_rate": 1.0825793454723325e-05, |
| "loss": 3.3858, |
| "step": 1000 |
| }, |
| { |
| "epoch": 26.057971014492754, |
| "eval_loss": 3.2082736492156982, |
| "eval_runtime": 8.3503, |
| "eval_samples_per_second": 192.209, |
| "eval_steps_per_second": 6.108, |
| "step": 1000 |
| }, |
| { |
| "epoch": 26.315619967793882, |
| "grad_norm": 3.8740477561950684, |
| "learning_rate": 1.0642573019134703e-05, |
| "loss": 3.1914, |
| "step": 1010 |
| }, |
| { |
| "epoch": 26.573268921095007, |
| "grad_norm": 3.173562526702881, |
| "learning_rate": 1.045913570439972e-05, |
| "loss": 3.1961, |
| "step": 1020 |
| }, |
| { |
| "epoch": 26.830917874396135, |
| "grad_norm": 3.0643789768218994, |
| "learning_rate": 1.0275543423681622e-05, |
| "loss": 3.1666, |
| "step": 1030 |
| }, |
| { |
| "epoch": 27.099838969404185, |
| "grad_norm": 3.262002468109131, |
| "learning_rate": 1.0091858142447266e-05, |
| "loss": 3.3683, |
| "step": 1040 |
| }, |
| { |
| "epoch": 27.357487922705314, |
| "grad_norm": 3.463393211364746, |
| "learning_rate": 9.908141857552737e-06, |
| "loss": 3.1575, |
| "step": 1050 |
| }, |
| { |
| "epoch": 27.615136876006442, |
| "grad_norm": 2.501368761062622, |
| "learning_rate": 9.724456576318383e-06, |
| "loss": 3.1672, |
| "step": 1060 |
| }, |
| { |
| "epoch": 27.872785829307567, |
| "grad_norm": 2.9846651554107666, |
| "learning_rate": 9.540864295600282e-06, |
| "loss": 3.1676, |
| "step": 1070 |
| }, |
| { |
| "epoch": 28.14170692431562, |
| "grad_norm": 3.304994821548462, |
| "learning_rate": 9.3574269808653e-06, |
| "loss": 3.345, |
| "step": 1080 |
| }, |
| { |
| "epoch": 28.39935587761675, |
| "grad_norm": 2.8297245502471924, |
| "learning_rate": 9.174206545276678e-06, |
| "loss": 3.1372, |
| "step": 1090 |
| }, |
| { |
| "epoch": 28.657004830917874, |
| "grad_norm": 2.5740602016448975, |
| "learning_rate": 8.991264828797319e-06, |
| "loss": 3.1598, |
| "step": 1100 |
| }, |
| { |
| "epoch": 28.657004830917874, |
| "eval_loss": 3.1645395755767822, |
| "eval_runtime": 8.7682, |
| "eval_samples_per_second": 183.048, |
| "eval_steps_per_second": 5.816, |
| "step": 1100 |
| }, |
| { |
| "epoch": 28.914653784219002, |
| "grad_norm": 2.494509220123291, |
| "learning_rate": 8.808663577317765e-06, |
| "loss": 3.1424, |
| "step": 1110 |
| }, |
| { |
| "epoch": 29.183574879227052, |
| "grad_norm": 2.84696888923645, |
| "learning_rate": 8.626464421815919e-06, |
| "loss": 3.3423, |
| "step": 1120 |
| }, |
| { |
| "epoch": 29.44122383252818, |
| "grad_norm": 2.335641860961914, |
| "learning_rate": 8.444728857555572e-06, |
| "loss": 3.1343, |
| "step": 1130 |
| }, |
| { |
| "epoch": 29.69887278582931, |
| "grad_norm": 2.024683952331543, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 3.1198, |
| "step": 1140 |
| }, |
| { |
| "epoch": 29.956521739130434, |
| "grad_norm": 2.1325879096984863, |
| "learning_rate": 8.082893680762619e-06, |
| "loss": 3.1287, |
| "step": 1150 |
| }, |
| { |
| "epoch": 30.225442834138487, |
| "grad_norm": 2.7636237144470215, |
| "learning_rate": 7.902916193656898e-06, |
| "loss": 3.3219, |
| "step": 1160 |
| }, |
| { |
| "epoch": 30.483091787439612, |
| "grad_norm": 3.356170177459717, |
| "learning_rate": 7.72364650742707e-06, |
| "loss": 3.1231, |
| "step": 1170 |
| }, |
| { |
| "epoch": 30.74074074074074, |
| "grad_norm": 2.864854335784912, |
| "learning_rate": 7.545145128592009e-06, |
| "loss": 3.1182, |
| "step": 1180 |
| }, |
| { |
| "epoch": 31.00966183574879, |
| "grad_norm": 3.7839581966400146, |
| "learning_rate": 7.367472304354011e-06, |
| "loss": 3.3005, |
| "step": 1190 |
| }, |
| { |
| "epoch": 31.26731078904992, |
| "grad_norm": 2.1702237129211426, |
| "learning_rate": 7.190688002264308e-06, |
| "loss": 3.1283, |
| "step": 1200 |
| }, |
| { |
| "epoch": 31.26731078904992, |
| "eval_loss": 3.1257264614105225, |
| "eval_runtime": 8.1258, |
| "eval_samples_per_second": 197.518, |
| "eval_steps_per_second": 6.276, |
| "step": 1200 |
| }, |
| { |
| "epoch": 31.524959742351047, |
| "grad_norm": 2.120652675628662, |
| "learning_rate": 7.014851889983058e-06, |
| "loss": 3.102, |
| "step": 1210 |
| }, |
| { |
| "epoch": 31.782608695652176, |
| "grad_norm": 1.8794561624526978, |
| "learning_rate": 6.840023315140476e-06, |
| "loss": 3.0918, |
| "step": 1220 |
| }, |
| { |
| "epoch": 32.051529790660226, |
| "grad_norm": 1.735843539237976, |
| "learning_rate": 6.666261285306048e-06, |
| "loss": 3.2699, |
| "step": 1230 |
| }, |
| { |
| "epoch": 32.309178743961354, |
| "grad_norm": 2.205563545227051, |
| "learning_rate": 6.4936244480724575e-06, |
| "loss": 3.0909, |
| "step": 1240 |
| }, |
| { |
| "epoch": 32.56682769726248, |
| "grad_norm": 2.3051886558532715, |
| "learning_rate": 6.322171071261071e-06, |
| "loss": 3.0824, |
| "step": 1250 |
| }, |
| { |
| "epoch": 32.824476650563604, |
| "grad_norm": 1.977542519569397, |
| "learning_rate": 6.151959023255545e-06, |
| "loss": 3.0749, |
| "step": 1260 |
| }, |
| { |
| "epoch": 33.09339774557166, |
| "grad_norm": 1.780962586402893, |
| "learning_rate": 5.983045753470308e-06, |
| "loss": 3.2847, |
| "step": 1270 |
| }, |
| { |
| "epoch": 33.351046698872786, |
| "grad_norm": 1.8276355266571045, |
| "learning_rate": 5.815488272960388e-06, |
| "loss": 3.0734, |
| "step": 1280 |
| }, |
| { |
| "epoch": 33.608695652173914, |
| "grad_norm": 2.030712366104126, |
| "learning_rate": 5.649343135179271e-06, |
| "loss": 3.08, |
| "step": 1290 |
| }, |
| { |
| "epoch": 33.86634460547504, |
| "grad_norm": 1.5411536693572998, |
| "learning_rate": 5.484666416891109e-06, |
| "loss": 3.0715, |
| "step": 1300 |
| }, |
| { |
| "epoch": 33.86634460547504, |
| "eval_loss": 3.0873820781707764, |
| "eval_runtime": 8.3167, |
| "eval_samples_per_second": 192.985, |
| "eval_steps_per_second": 6.132, |
| "step": 1300 |
| }, |
| { |
| "epoch": 34.13526570048309, |
| "grad_norm": 1.5792146921157837, |
| "learning_rate": 5.321513699243924e-06, |
| "loss": 3.2557, |
| "step": 1310 |
| }, |
| { |
| "epoch": 34.39291465378422, |
| "grad_norm": 1.6712641716003418, |
| "learning_rate": 5.159940049010015e-06, |
| "loss": 3.069, |
| "step": 1320 |
| }, |
| { |
| "epoch": 34.650563607085346, |
| "grad_norm": 1.3715860843658447, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 3.0583, |
| "step": 1330 |
| }, |
| { |
| "epoch": 34.908212560386474, |
| "grad_norm": 1.3755162954330444, |
| "learning_rate": 4.8417475346567635e-06, |
| "loss": 3.0383, |
| "step": 1340 |
| }, |
| { |
| "epoch": 35.17713365539453, |
| "grad_norm": 1.6564936637878418, |
| "learning_rate": 4.685236065835443e-06, |
| "loss": 3.2558, |
| "step": 1350 |
| }, |
| { |
| "epoch": 35.43478260869565, |
| "grad_norm": 1.6100966930389404, |
| "learning_rate": 4.530518418775734e-06, |
| "loss": 3.0465, |
| "step": 1360 |
| }, |
| { |
| "epoch": 35.69243156199678, |
| "grad_norm": 1.8294345140457153, |
| "learning_rate": 4.3776468132724605e-06, |
| "loss": 3.0302, |
| "step": 1370 |
| }, |
| { |
| "epoch": 35.950080515297905, |
| "grad_norm": 1.43662428855896, |
| "learning_rate": 4.226672846050538e-06, |
| "loss": 3.0423, |
| "step": 1380 |
| }, |
| { |
| "epoch": 36.21900161030596, |
| "grad_norm": 1.3078149557113647, |
| "learning_rate": 4.077647473350201e-06, |
| "loss": 3.2217, |
| "step": 1390 |
| }, |
| { |
| "epoch": 36.47665056360709, |
| "grad_norm": 1.55823814868927, |
| "learning_rate": 3.930620993728434e-06, |
| "loss": 3.0355, |
| "step": 1400 |
| }, |
| { |
| "epoch": 36.47665056360709, |
| "eval_loss": 3.0503032207489014, |
| "eval_runtime": 8.1448, |
| "eval_samples_per_second": 197.057, |
| "eval_steps_per_second": 6.262, |
| "step": 1400 |
| }, |
| { |
| "epoch": 36.734299516908216, |
| "grad_norm": 1.2544021606445312, |
| "learning_rate": 3.7856430310823546e-06, |
| "loss": 3.0319, |
| "step": 1410 |
| }, |
| { |
| "epoch": 37.00322061191626, |
| "grad_norm": 2.1173486709594727, |
| "learning_rate": 3.6427625179003223e-06, |
| "loss": 3.2062, |
| "step": 1420 |
| }, |
| { |
| "epoch": 37.26086956521739, |
| "grad_norm": 1.023248314857483, |
| "learning_rate": 3.5020276787464058e-06, |
| "loss": 3.0102, |
| "step": 1430 |
| }, |
| { |
| "epoch": 37.51851851851852, |
| "grad_norm": 1.3972448110580444, |
| "learning_rate": 3.3634860139837877e-06, |
| "loss": 3.0139, |
| "step": 1440 |
| }, |
| { |
| "epoch": 37.77616747181965, |
| "grad_norm": 1.1301438808441162, |
| "learning_rate": 3.2271842837425917e-06, |
| "loss": 3.0306, |
| "step": 1450 |
| }, |
| { |
| "epoch": 38.045088566827694, |
| "grad_norm": 1.2840826511383057, |
| "learning_rate": 3.0931684921375572e-06, |
| "loss": 3.1895, |
| "step": 1460 |
| }, |
| { |
| "epoch": 38.30273752012882, |
| "grad_norm": 1.4581588506698608, |
| "learning_rate": 2.9614838717408866e-06, |
| "loss": 3.0103, |
| "step": 1470 |
| }, |
| { |
| "epoch": 38.56038647342995, |
| "grad_norm": 1.152060627937317, |
| "learning_rate": 2.8321748683154893e-06, |
| "loss": 3.0104, |
| "step": 1480 |
| }, |
| { |
| "epoch": 38.81803542673108, |
| "grad_norm": 1.0674842596054077, |
| "learning_rate": 2.7052851258137936e-06, |
| "loss": 3.0031, |
| "step": 1490 |
| }, |
| { |
| "epoch": 39.08695652173913, |
| "grad_norm": 1.0400310754776, |
| "learning_rate": 2.580857471647186e-06, |
| "loss": 3.1836, |
| "step": 1500 |
| }, |
| { |
| "epoch": 39.08695652173913, |
| "eval_loss": 3.0163846015930176, |
| "eval_runtime": 8.3605, |
| "eval_samples_per_second": 191.973, |
| "eval_steps_per_second": 6.1, |
| "step": 1500 |
| }, |
| { |
| "epoch": 39.34460547504026, |
| "grad_norm": 0.9005797505378723, |
| "learning_rate": 2.4589339022310386e-06, |
| "loss": 2.9967, |
| "step": 1510 |
| }, |
| { |
| "epoch": 39.60225442834138, |
| "grad_norm": 1.032657265663147, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 2.9843, |
| "step": 1520 |
| }, |
| { |
| "epoch": 39.85990338164251, |
| "grad_norm": 1.0777302980422974, |
| "learning_rate": 2.2227627635698624e-06, |
| "loss": 2.9965, |
| "step": 1530 |
| }, |
| { |
| "epoch": 40.128824476650564, |
| "grad_norm": 1.085571050643921, |
| "learning_rate": 2.1085949060360654e-06, |
| "loss": 3.177, |
| "step": 1540 |
| }, |
| { |
| "epoch": 40.38647342995169, |
| "grad_norm": 0.7760082483291626, |
| "learning_rate": 1.9970905297711606e-06, |
| "loss": 2.9783, |
| "step": 1550 |
| }, |
| { |
| "epoch": 40.64412238325282, |
| "grad_norm": 0.6137647032737732, |
| "learning_rate": 1.8882872693679787e-06, |
| "loss": 2.9734, |
| "step": 1560 |
| }, |
| { |
| "epoch": 40.90177133655394, |
| "grad_norm": 0.5791866779327393, |
| "learning_rate": 1.7822218477475496e-06, |
| "loss": 2.9795, |
| "step": 1570 |
| }, |
| { |
| "epoch": 41.170692431561996, |
| "grad_norm": 0.5843685865402222, |
| "learning_rate": 1.6789300637645e-06, |
| "loss": 3.1678, |
| "step": 1580 |
| }, |
| { |
| "epoch": 41.428341384863124, |
| "grad_norm": 0.5474081039428711, |
| "learning_rate": 1.578446780124344e-06, |
| "loss": 2.9794, |
| "step": 1590 |
| }, |
| { |
| "epoch": 41.68599033816425, |
| "grad_norm": 0.44598644971847534, |
| "learning_rate": 1.4808059116167306e-06, |
| "loss": 2.9781, |
| "step": 1600 |
| }, |
| { |
| "epoch": 41.68599033816425, |
| "eval_loss": 2.992079734802246, |
| "eval_runtime": 8.1194, |
| "eval_samples_per_second": 197.675, |
| "eval_steps_per_second": 6.281, |
| "step": 1600 |
| }, |
| { |
| "epoch": 41.94363929146538, |
| "grad_norm": 0.4411242604255676, |
| "learning_rate": 1.3860404136686411e-06, |
| "loss": 2.963, |
| "step": 1610 |
| }, |
| { |
| "epoch": 42.21256038647343, |
| "grad_norm": 0.563995361328125, |
| "learning_rate": 1.294182271221377e-06, |
| "loss": 3.1584, |
| "step": 1620 |
| }, |
| { |
| "epoch": 42.470209339774556, |
| "grad_norm": 0.4634499251842499, |
| "learning_rate": 1.2052624879351105e-06, |
| "loss": 2.9709, |
| "step": 1630 |
| }, |
| { |
| "epoch": 42.727858293075684, |
| "grad_norm": 0.477427214384079, |
| "learning_rate": 1.1193110757246251e-06, |
| "loss": 2.9687, |
| "step": 1640 |
| }, |
| { |
| "epoch": 42.98550724637681, |
| "grad_norm": 0.6824309825897217, |
| "learning_rate": 1.0363570446297999e-06, |
| "loss": 3.0892, |
| "step": 1650 |
| }, |
| { |
| "epoch": 43.254428341384866, |
| "grad_norm": 0.5210818648338318, |
| "learning_rate": 9.564283930242258e-07, |
| "loss": 3.0266, |
| "step": 1660 |
| }, |
| { |
| "epoch": 43.51207729468599, |
| "grad_norm": 0.43750736117362976, |
| "learning_rate": 8.79552098165296e-07, |
| "loss": 2.9497, |
| "step": 1670 |
| }, |
| { |
| "epoch": 43.769726247987116, |
| "grad_norm": 0.4109102189540863, |
| "learning_rate": 8.057541070889229e-07, |
| "loss": 2.9692, |
| "step": 1680 |
| }, |
| { |
| "epoch": 44.03864734299517, |
| "grad_norm": 0.400216668844223, |
| "learning_rate": 7.350593278519824e-07, |
| "loss": 3.1485, |
| "step": 1690 |
| }, |
| { |
| "epoch": 44.2962962962963, |
| "grad_norm": 0.3974422514438629, |
| "learning_rate": 6.67491621125429e-07, |
| "loss": 2.9547, |
| "step": 1700 |
| }, |
| { |
| "epoch": 44.2962962962963, |
| "eval_loss": 2.979264736175537, |
| "eval_runtime": 8.314, |
| "eval_samples_per_second": 193.047, |
| "eval_steps_per_second": 6.134, |
| "step": 1700 |
| }, |
| { |
| "epoch": 44.553945249597426, |
| "grad_norm": 0.38310766220092773, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 2.9513, |
| "step": 1710 |
| }, |
| { |
| "epoch": 44.81159420289855, |
| "grad_norm": 0.33993130922317505, |
| "learning_rate": 5.418275829936537e-07, |
| "loss": 2.9661, |
| "step": 1720 |
| }, |
| { |
| "epoch": 45.0805152979066, |
| "grad_norm": 0.43745213747024536, |
| "learning_rate": 4.837736653040825e-07, |
| "loss": 3.162, |
| "step": 1730 |
| }, |
| { |
| "epoch": 45.33816425120773, |
| "grad_norm": 0.3880692720413208, |
| "learning_rate": 4.2893163324085886e-07, |
| "loss": 2.9594, |
| "step": 1740 |
| }, |
| { |
| "epoch": 45.59581320450886, |
| "grad_norm": 0.37400293350219727, |
| "learning_rate": 3.773199969074959e-07, |
| "loss": 2.9473, |
| "step": 1750 |
| }, |
| { |
| "epoch": 45.853462157809986, |
| "grad_norm": 0.2918168306350708, |
| "learning_rate": 3.2895617609489337e-07, |
| "loss": 2.9553, |
| "step": 1760 |
| }, |
| { |
| "epoch": 46.12238325281803, |
| "grad_norm": 0.3325757384300232, |
| "learning_rate": 2.838564944018618e-07, |
| "loss": 3.148, |
| "step": 1770 |
| }, |
| { |
| "epoch": 46.38003220611916, |
| "grad_norm": 0.2728801965713501, |
| "learning_rate": 2.420361737256438e-07, |
| "loss": 2.9573, |
| "step": 1780 |
| }, |
| { |
| "epoch": 46.63768115942029, |
| "grad_norm": 0.29378488659858704, |
| "learning_rate": 2.035093291242607e-07, |
| "loss": 2.9527, |
| "step": 1790 |
| }, |
| { |
| "epoch": 46.89533011272142, |
| "grad_norm": 0.2589620053768158, |
| "learning_rate": 1.6828896405244988e-07, |
| "loss": 2.9652, |
| "step": 1800 |
| }, |
| { |
| "epoch": 46.89533011272142, |
| "eval_loss": 2.97452712059021, |
| "eval_runtime": 8.3612, |
| "eval_samples_per_second": 191.958, |
| "eval_steps_per_second": 6.1, |
| "step": 1800 |
| }, |
| { |
| "epoch": 47.16425120772947, |
| "grad_norm": 0.2649601101875305, |
| "learning_rate": 1.3638696597277678e-07, |
| "loss": 3.121, |
| "step": 1810 |
| }, |
| { |
| "epoch": 47.42190016103059, |
| "grad_norm": 0.2711832523345947, |
| "learning_rate": 1.0781410234342093e-07, |
| "loss": 2.9597, |
| "step": 1820 |
| }, |
| { |
| "epoch": 47.67954911433172, |
| "grad_norm": 0.25574928522109985, |
| "learning_rate": 8.258001698397744e-08, |
| "loss": 2.9549, |
| "step": 1830 |
| }, |
| { |
| "epoch": 47.93719806763285, |
| "grad_norm": 0.2450728863477707, |
| "learning_rate": 6.069322682050516e-08, |
| "loss": 2.9592, |
| "step": 1840 |
| }, |
| { |
| "epoch": 48.2061191626409, |
| "grad_norm": 0.24142402410507202, |
| "learning_rate": 4.216111901092501e-08, |
| "loss": 3.1392, |
| "step": 1850 |
| }, |
| { |
| "epoch": 48.46376811594203, |
| "grad_norm": 0.2513294219970703, |
| "learning_rate": 2.6989948451726643e-08, |
| "loss": 2.9471, |
| "step": 1860 |
| }, |
| { |
| "epoch": 48.72141706924316, |
| "grad_norm": 0.20774802565574646, |
| "learning_rate": 1.518483566683826e-08, |
| "loss": 2.9531, |
| "step": 1870 |
| }, |
| { |
| "epoch": 48.97906602254428, |
| "grad_norm": 0.23109908401966095, |
| "learning_rate": 6.749765079363535e-09, |
| "loss": 2.9571, |
| "step": 1880 |
| }, |
| { |
| "epoch": 49.247987117552334, |
| "grad_norm": 0.220932275056839, |
| "learning_rate": 1.6875836667729073e-09, |
| "loss": 3.1546, |
| "step": 1890 |
| }, |
| { |
| "epoch": 49.50563607085346, |
| "grad_norm": 0.23937705159187317, |
| "learning_rate": 0.0, |
| "loss": 2.9684, |
| "step": 1900 |
| }, |
| { |
| "epoch": 49.50563607085346, |
| "eval_loss": 2.9738194942474365, |
| "eval_runtime": 8.1597, |
| "eval_samples_per_second": 196.699, |
| "eval_steps_per_second": 6.25, |
| "step": 1900 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 1900, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 50, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.8985668658017075e+19, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|