| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 1797, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.016711928138709004, |
| "grad_norm": 0.5531014800071716, |
| "learning_rate": 5e-06, |
| "loss": 0.9993, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.03342385627741801, |
| "grad_norm": 0.5198462009429932, |
| "learning_rate": 1.0555555555555555e-05, |
| "loss": 0.9973, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.05013578441612701, |
| "grad_norm": 0.9699999094009399, |
| "learning_rate": 1.6111111111111115e-05, |
| "loss": 0.9474, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.06684771255483601, |
| "grad_norm": 0.3459213078022003, |
| "learning_rate": 2.1666666666666667e-05, |
| "loss": 0.7622, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.08355964069354502, |
| "grad_norm": 0.24512118101119995, |
| "learning_rate": 2.7222222222222223e-05, |
| "loss": 0.7086, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.10027156883225402, |
| "grad_norm": 0.21762403845787048, |
| "learning_rate": 3.277777777777778e-05, |
| "loss": 0.6895, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.11698349697096302, |
| "grad_norm": 0.22329938411712646, |
| "learning_rate": 3.8333333333333334e-05, |
| "loss": 0.6491, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.13369542510967203, |
| "grad_norm": 0.19780907034873962, |
| "learning_rate": 4.388888888888889e-05, |
| "loss": 0.6225, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.15040735324838103, |
| "grad_norm": 0.17309053242206573, |
| "learning_rate": 4.9444444444444446e-05, |
| "loss": 0.6026, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.16711928138709004, |
| "grad_norm": 0.20399844646453857, |
| "learning_rate": 5.500000000000001e-05, |
| "loss": 0.5435, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.18383120952579904, |
| "grad_norm": 0.21713724732398987, |
| "learning_rate": 6.055555555555555e-05, |
| "loss": 0.581, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.20054313766450804, |
| "grad_norm": 0.24566805362701416, |
| "learning_rate": 6.611111111111111e-05, |
| "loss": 0.5203, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.21725506580321705, |
| "grad_norm": 0.26538822054862976, |
| "learning_rate": 7.166666666666667e-05, |
| "loss": 0.5975, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.23396699394192605, |
| "grad_norm": 0.24092447757720947, |
| "learning_rate": 7.722222222222223e-05, |
| "loss": 0.5627, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2506789220806351, |
| "grad_norm": 0.24146470427513123, |
| "learning_rate": 8.277777777777778e-05, |
| "loss": 0.5289, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.26739085021934406, |
| "grad_norm": 0.8219459652900696, |
| "learning_rate": 8.833333333333333e-05, |
| "loss": 0.5347, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2841027783580531, |
| "grad_norm": 0.2518085241317749, |
| "learning_rate": 9.388888888888889e-05, |
| "loss": 0.5491, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.30081470649676206, |
| "grad_norm": 0.2775290012359619, |
| "learning_rate": 9.944444444444446e-05, |
| "loss": 0.5701, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.3175266346354711, |
| "grad_norm": 0.2761514186859131, |
| "learning_rate": 9.999235647539953e-05, |
| "loss": 0.5489, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.33423856277418007, |
| "grad_norm": 0.32875457406044006, |
| "learning_rate": 9.996593741531468e-05, |
| "loss": 0.5374, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3509504909128891, |
| "grad_norm": 0.2761303186416626, |
| "learning_rate": 9.992065842489567e-05, |
| "loss": 0.4916, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.3676624190515981, |
| "grad_norm": 0.2781451940536499, |
| "learning_rate": 9.985653659495773e-05, |
| "loss": 0.5826, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.3843743471903071, |
| "grad_norm": 0.32615798711776733, |
| "learning_rate": 9.977359612865423e-05, |
| "loss": 0.532, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.4010862753290161, |
| "grad_norm": 0.2824324667453766, |
| "learning_rate": 9.967186833234101e-05, |
| "loss": 0.5482, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.4177982034677251, |
| "grad_norm": 0.2630137503147125, |
| "learning_rate": 9.955139160375959e-05, |
| "loss": 0.4917, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.4345101316064341, |
| "grad_norm": 0.3076798915863037, |
| "learning_rate": 9.941221141754385e-05, |
| "loss": 0.5285, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.4512220597451431, |
| "grad_norm": 0.3244406282901764, |
| "learning_rate": 9.925438030805518e-05, |
| "loss": 0.5178, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.4679339878838521, |
| "grad_norm": 0.3224153220653534, |
| "learning_rate": 9.907795784955327e-05, |
| "loss": 0.5184, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.48464591602256113, |
| "grad_norm": 0.3541305959224701, |
| "learning_rate": 9.888301063370934e-05, |
| "loss": 0.5327, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.5013578441612702, |
| "grad_norm": 0.28691530227661133, |
| "learning_rate": 9.866961224447075e-05, |
| "loss": 0.5461, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.5180697722999791, |
| "grad_norm": 0.4132674038410187, |
| "learning_rate": 9.843784323028638e-05, |
| "loss": 0.5242, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.5347817004386881, |
| "grad_norm": 0.33508822321891785, |
| "learning_rate": 9.818779107370309e-05, |
| "loss": 0.5285, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.5514936285773971, |
| "grad_norm": 0.2662738561630249, |
| "learning_rate": 9.791955015834492e-05, |
| "loss": 0.5287, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.5682055567161062, |
| "grad_norm": 0.30024951696395874, |
| "learning_rate": 9.763322173328753e-05, |
| "loss": 0.5329, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.5849174848548151, |
| "grad_norm": 0.27861520648002625, |
| "learning_rate": 9.732891387484104e-05, |
| "loss": 0.4656, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.6016294129935241, |
| "grad_norm": 0.33017635345458984, |
| "learning_rate": 9.700674144575614e-05, |
| "loss": 0.5334, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.6183413411322332, |
| "grad_norm": 0.3784632384777069, |
| "learning_rate": 9.666682605186835e-05, |
| "loss": 0.4669, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.6350532692709422, |
| "grad_norm": 0.3362145721912384, |
| "learning_rate": 9.63092959961973e-05, |
| "loss": 0.4938, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.6517651974096511, |
| "grad_norm": 0.2863125801086426, |
| "learning_rate": 9.593428623051792e-05, |
| "loss": 0.4815, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.6684771255483601, |
| "grad_norm": 0.23034977912902832, |
| "learning_rate": 9.554193830442229e-05, |
| "loss": 0.5249, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.6851890536870692, |
| "grad_norm": 0.29409608244895935, |
| "learning_rate": 9.513240031189067e-05, |
| "loss": 0.4899, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.7019009818257782, |
| "grad_norm": 0.3073919415473938, |
| "learning_rate": 9.470582683539285e-05, |
| "loss": 0.5115, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.7186129099644871, |
| "grad_norm": 0.33221715688705444, |
| "learning_rate": 9.42623788875399e-05, |
| "loss": 0.5021, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.7353248381031962, |
| "grad_norm": 0.24137817323207855, |
| "learning_rate": 9.380222385030915e-05, |
| "loss": 0.4757, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.7520367662419052, |
| "grad_norm": 0.28623566031455994, |
| "learning_rate": 9.332553541186485e-05, |
| "loss": 0.515, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.7687486943806142, |
| "grad_norm": 0.30202344059944153, |
| "learning_rate": 9.283249350099859e-05, |
| "loss": 0.5259, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.7854606225193231, |
| "grad_norm": 0.264213502407074, |
| "learning_rate": 9.23232842192142e-05, |
| "loss": 0.5224, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.8021725506580322, |
| "grad_norm": 0.2509612739086151, |
| "learning_rate": 9.179809977048248e-05, |
| "loss": 0.5318, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.8188844787967412, |
| "grad_norm": 0.28840652108192444, |
| "learning_rate": 9.125713838869299e-05, |
| "loss": 0.5287, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.8355964069354502, |
| "grad_norm": 0.30437350273132324, |
| "learning_rate": 9.070060426282925e-05, |
| "loss": 0.5256, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.8523083350741592, |
| "grad_norm": 0.3300555646419525, |
| "learning_rate": 9.012870745989663e-05, |
| "loss": 0.4876, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.8690202632128682, |
| "grad_norm": 0.27293577790260315, |
| "learning_rate": 8.954166384563127e-05, |
| "loss": 0.5136, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.8857321913515772, |
| "grad_norm": 0.29159626364707947, |
| "learning_rate": 8.893969500302031e-05, |
| "loss": 0.5224, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.9024441194902862, |
| "grad_norm": 0.34646981954574585, |
| "learning_rate": 8.832302814866416e-05, |
| "loss": 0.5117, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.9191560476289952, |
| "grad_norm": 0.2947717607021332, |
| "learning_rate": 8.76918960470122e-05, |
| "loss": 0.534, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.9358679757677042, |
| "grad_norm": 0.2506949305534363, |
| "learning_rate": 8.704653692250466e-05, |
| "loss": 0.5013, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.9525799039064132, |
| "grad_norm": 0.28196585178375244, |
| "learning_rate": 8.638719436965325e-05, |
| "loss": 0.5089, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.9692918320451223, |
| "grad_norm": 0.26513898372650146, |
| "learning_rate": 8.571411726109519e-05, |
| "loss": 0.5346, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.9860037601838312, |
| "grad_norm": 0.3039480149745941, |
| "learning_rate": 8.50275596536546e-05, |
| "loss": 0.5097, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.0016711928138708, |
| "grad_norm": 0.2783941328525543, |
| "learning_rate": 8.432778069244749e-05, |
| "loss": 0.5139, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.01838312095258, |
| "grad_norm": 0.2535961866378784, |
| "learning_rate": 8.361504451306585e-05, |
| "loss": 0.4471, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.035095049091289, |
| "grad_norm": 0.32639509439468384, |
| "learning_rate": 8.288962014187811e-05, |
| "loss": 0.4725, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.0518069772299978, |
| "grad_norm": 0.3780384361743927, |
| "learning_rate": 8.21517813944837e-05, |
| "loss": 0.4459, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.068518905368707, |
| "grad_norm": 0.297475129365921, |
| "learning_rate": 8.14018067723597e-05, |
| "loss": 0.4515, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.085230833507416, |
| "grad_norm": 0.32445859909057617, |
| "learning_rate": 8.063997935773885e-05, |
| "loss": 0.4648, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.101942761646125, |
| "grad_norm": 0.333881676197052, |
| "learning_rate": 7.986658670675861e-05, |
| "loss": 0.4549, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.118654689784834, |
| "grad_norm": 0.3279685974121094, |
| "learning_rate": 7.908192074092136e-05, |
| "loss": 0.4768, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.1353666179235429, |
| "grad_norm": 0.36196866631507874, |
| "learning_rate": 7.828627763690697e-05, |
| "loss": 0.5039, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.152078546062252, |
| "grad_norm": 0.3395329713821411, |
| "learning_rate": 7.747995771477928e-05, |
| "loss": 0.4628, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.168790474200961, |
| "grad_norm": 0.3666079342365265, |
| "learning_rate": 7.666326532462842e-05, |
| "loss": 0.444, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.1855024023396699, |
| "grad_norm": 0.3585106432437897, |
| "learning_rate": 7.583650873169232e-05, |
| "loss": 0.5077, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.202214330478379, |
| "grad_norm": 0.3411233127117157, |
| "learning_rate": 7.500000000000001e-05, |
| "loss": 0.4689, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.218926258617088, |
| "grad_norm": 0.36491307616233826, |
| "learning_rate": 7.41540548745814e-05, |
| "loss": 0.4509, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.2356381867557968, |
| "grad_norm": 0.3584769070148468, |
| "learning_rate": 7.329899266228748e-05, |
| "loss": 0.4782, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.252350114894506, |
| "grad_norm": 0.3567698299884796, |
| "learning_rate": 7.243513611126608e-05, |
| "loss": 0.4287, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.269062043033215, |
| "grad_norm": 0.36464792490005493, |
| "learning_rate": 7.156281128913871e-05, |
| "loss": 0.4642, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.285773971171924, |
| "grad_norm": 0.41288235783576965, |
| "learning_rate": 7.068234745992456e-05, |
| "loss": 0.4659, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.302485899310633, |
| "grad_norm": 0.8390935063362122, |
| "learning_rate": 6.979407695975776e-05, |
| "loss": 0.4932, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.3191978274493419, |
| "grad_norm": 0.3990253806114197, |
| "learning_rate": 6.889833507144532e-05, |
| "loss": 0.494, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.335909755588051, |
| "grad_norm": 0.39918258786201477, |
| "learning_rate": 6.799545989791268e-05, |
| "loss": 0.4307, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.35262168372676, |
| "grad_norm": 0.3962526321411133, |
| "learning_rate": 6.708579223458475e-05, |
| "loss": 0.4515, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.369333611865469, |
| "grad_norm": 0.366760790348053, |
| "learning_rate": 6.616967544075077e-05, |
| "loss": 0.4783, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.386045540004178, |
| "grad_norm": 0.3852802515029907, |
| "learning_rate": 6.524745530996137e-05, |
| "loss": 0.4933, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.402757468142887, |
| "grad_norm": 0.3308377265930176, |
| "learning_rate": 6.431947993950682e-05, |
| "loss": 0.525, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.4194693962815959, |
| "grad_norm": 0.3748877942562103, |
| "learning_rate": 6.338609959902569e-05, |
| "loss": 0.4752, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.436181324420305, |
| "grad_norm": 0.37608233094215393, |
| "learning_rate": 6.244766659829351e-05, |
| "loss": 0.4818, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.452893252559014, |
| "grad_norm": 0.36020001769065857, |
| "learning_rate": 6.150453515424153e-05, |
| "loss": 0.4888, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.469605180697723, |
| "grad_norm": 0.40768539905548096, |
| "learning_rate": 6.055706125725542e-05, |
| "loss": 0.5247, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.486317108836432, |
| "grad_norm": 0.397224098443985, |
| "learning_rate": 5.9605602536804673e-05, |
| "loss": 0.4356, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.503029036975141, |
| "grad_norm": 0.38998743891716003, |
| "learning_rate": 5.865051812645329e-05, |
| "loss": 0.473, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.51974096511385, |
| "grad_norm": 0.3835464417934418, |
| "learning_rate": 5.7692168528302807e-05, |
| "loss": 0.4716, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.536452893252559, |
| "grad_norm": 0.3747701644897461, |
| "learning_rate": 5.673091547691866e-05, |
| "loss": 0.432, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.553164821391268, |
| "grad_norm": 0.3856564462184906, |
| "learning_rate": 5.576712180279133e-05, |
| "loss": 0.5033, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.569876749529977, |
| "grad_norm": 0.3968600332736969, |
| "learning_rate": 5.480115129538409e-05, |
| "loss": 0.5051, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.586588677668686, |
| "grad_norm": 0.4307195842266083, |
| "learning_rate": 5.383336856581833e-05, |
| "loss": 0.4576, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.6033006058073949, |
| "grad_norm": 0.401885449886322, |
| "learning_rate": 5.2864138909249176e-05, |
| "loss": 0.4866, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.620012533946104, |
| "grad_norm": 0.3600904941558838, |
| "learning_rate": 5.189382816698263e-05, |
| "loss": 0.4844, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.6367244620848131, |
| "grad_norm": 0.35338348150253296, |
| "learning_rate": 5.0922802588386766e-05, |
| "loss": 0.4991, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.653436390223522, |
| "grad_norm": 0.37117472290992737, |
| "learning_rate": 4.9951428692648664e-05, |
| "loss": 0.4709, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.670148318362231, |
| "grad_norm": 0.3381313383579254, |
| "learning_rate": 4.898007313042975e-05, |
| "loss": 0.4474, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.68686024650094, |
| "grad_norm": 0.4141668677330017, |
| "learning_rate": 4.8009102545471355e-05, |
| "loss": 0.4849, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.703572174639649, |
| "grad_norm": 0.34374451637268066, |
| "learning_rate": 4.7038883436202955e-05, |
| "loss": 0.4409, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.7202841027783582, |
| "grad_norm": 0.38313835859298706, |
| "learning_rate": 4.606978201740518e-05, |
| "loss": 0.5049, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.7369960309170671, |
| "grad_norm": 0.4117642939090729, |
| "learning_rate": 4.510216408197996e-05, |
| "loss": 0.4487, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.753707959055776, |
| "grad_norm": 0.3720339238643646, |
| "learning_rate": 4.4136394862879914e-05, |
| "loss": 0.4641, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.770419887194485, |
| "grad_norm": 0.389915406703949, |
| "learning_rate": 4.3172838895249036e-05, |
| "loss": 0.4522, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.787131815333194, |
| "grad_norm": 0.4209120273590088, |
| "learning_rate": 4.221185987882684e-05, |
| "loss": 0.485, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.803843743471903, |
| "grad_norm": 0.35964861512184143, |
| "learning_rate": 4.125382054066781e-05, |
| "loss": 0.4153, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.8205556716106122, |
| "grad_norm": 0.39036113023757935, |
| "learning_rate": 4.029908249822795e-05, |
| "loss": 0.4543, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.837267599749321, |
| "grad_norm": 0.41336700320243835, |
| "learning_rate": 3.934800612287019e-05, |
| "loss": 0.4304, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.85397952788803, |
| "grad_norm": 0.3797999620437622, |
| "learning_rate": 3.840095040384023e-05, |
| "loss": 0.4792, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.870691456026739, |
| "grad_norm": 0.3731176555156708, |
| "learning_rate": 3.7458272812763875e-05, |
| "loss": 0.4559, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.887403384165448, |
| "grad_norm": 0.46234622597694397, |
| "learning_rate": 3.652032916871737e-05, |
| "loss": 0.4722, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.9041153123041572, |
| "grad_norm": 0.41888976097106934, |
| "learning_rate": 3.558747350392146e-05, |
| "loss": 0.4874, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.9208272404428661, |
| "grad_norm": 0.4406299889087677, |
| "learning_rate": 3.466005793010985e-05, |
| "loss": 0.49, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.937539168581575, |
| "grad_norm": 0.3967576324939728, |
| "learning_rate": 3.373843250562265e-05, |
| "loss": 0.4246, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.954251096720284, |
| "grad_norm": 0.4108339548110962, |
| "learning_rate": 3.282294510327478e-05, |
| "loss": 0.4895, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.970963024858993, |
| "grad_norm": 0.4071328341960907, |
| "learning_rate": 3.1913941279049467e-05, |
| "loss": 0.5052, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.9876749529977022, |
| "grad_norm": 0.36455124616622925, |
| "learning_rate": 3.101176414166605e-05, |
| "loss": 0.4732, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.0033423856277417, |
| "grad_norm": 0.3723292648792267, |
| "learning_rate": 3.011675422307172e-05, |
| "loss": 0.4492, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.0200543137664506, |
| "grad_norm": 0.3702923059463501, |
| "learning_rate": 2.9229249349905684e-05, |
| "loss": 0.4607, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.03676624190516, |
| "grad_norm": 0.3676780164241791, |
| "learning_rate": 2.834958451598465e-05, |
| "loss": 0.4592, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.053478170043869, |
| "grad_norm": 0.3695000112056732, |
| "learning_rate": 2.7478091755857422e-05, |
| "loss": 0.4135, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.070190098182578, |
| "grad_norm": 0.44124835729599, |
| "learning_rate": 2.6615100019476535e-05, |
| "loss": 0.4485, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.0869020263212867, |
| "grad_norm": 0.7185173034667969, |
| "learning_rate": 2.576093504803432e-05, |
| "loss": 0.4462, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.1036139544599957, |
| "grad_norm": 0.44661805033683777, |
| "learning_rate": 2.491591925100985e-05, |
| "loss": 0.4683, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.120325882598705, |
| "grad_norm": 0.4305351972579956, |
| "learning_rate": 2.4080371584473748e-05, |
| "loss": 0.4428, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.137037810737414, |
| "grad_norm": 0.3181787133216858, |
| "learning_rate": 2.325460743069639e-05, |
| "loss": 0.4265, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.153749738876123, |
| "grad_norm": 0.4223162829875946, |
| "learning_rate": 2.2438938479104952e-05, |
| "loss": 0.4507, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.170461667014832, |
| "grad_norm": 0.4869953989982605, |
| "learning_rate": 2.1633672608634524e-05, |
| "loss": 0.4231, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.1871735951535407, |
| "grad_norm": 0.47924432158470154, |
| "learning_rate": 2.0839113771517467e-05, |
| "loss": 0.4123, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.20388552329225, |
| "grad_norm": 0.42630112171173096, |
| "learning_rate": 2.0055561878554792e-05, |
| "loss": 0.3807, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.220597451430959, |
| "grad_norm": 0.40105777978897095, |
| "learning_rate": 1.928331268591315e-05, |
| "loss": 0.4123, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.237309379569668, |
| "grad_norm": 0.39029476046562195, |
| "learning_rate": 1.852265768349006e-05, |
| "loss": 0.4407, |
| "step": 1340 |
| }, |
| { |
| "epoch": 2.254021307708377, |
| "grad_norm": 0.466331422328949, |
| "learning_rate": 1.777388398488918e-05, |
| "loss": 0.4078, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.2707332358470858, |
| "grad_norm": 0.44317781925201416, |
| "learning_rate": 1.7037274219047798e-05, |
| "loss": 0.417, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.2874451639857947, |
| "grad_norm": 0.43465059995651245, |
| "learning_rate": 1.6313106423556878e-05, |
| "loss": 0.4599, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.304157092124504, |
| "grad_norm": 0.494806706905365, |
| "learning_rate": 1.5601653939714074e-05, |
| "loss": 0.4595, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.320869020263213, |
| "grad_norm": 0.3743044435977936, |
| "learning_rate": 1.490318530934957e-05, |
| "loss": 0.4418, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.337580948401922, |
| "grad_norm": 0.4087362587451935, |
| "learning_rate": 1.4217964173463472e-05, |
| "loss": 0.4198, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.354292876540631, |
| "grad_norm": 0.46890708804130554, |
| "learning_rate": 1.3546249172712849e-05, |
| "loss": 0.4463, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.3710048046793397, |
| "grad_norm": 0.43129852414131165, |
| "learning_rate": 1.2888293849786503e-05, |
| "loss": 0.4485, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.387716732818049, |
| "grad_norm": 0.456182599067688, |
| "learning_rate": 1.2244346553703667e-05, |
| "loss": 0.4389, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.404428660956758, |
| "grad_norm": 0.4355461895465851, |
| "learning_rate": 1.161465034607332e-05, |
| "loss": 0.3875, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.421140589095467, |
| "grad_norm": 0.37242934107780457, |
| "learning_rate": 1.0999442909349217e-05, |
| "loss": 0.405, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.437852517234176, |
| "grad_norm": 0.4564312696456909, |
| "learning_rate": 1.0398956457115194e-05, |
| "loss": 0.4153, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.4545644453728848, |
| "grad_norm": 0.4565236270427704, |
| "learning_rate": 9.813417646434864e-06, |
| "loss": 0.4386, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.4712763735115937, |
| "grad_norm": 0.4497516453266144, |
| "learning_rate": 9.243047492298634e-06, |
| "loss": 0.4188, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.487988301650303, |
| "grad_norm": 0.5003072619438171, |
| "learning_rate": 8.688061284200266e-06, |
| "loss": 0.3996, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.504700229789012, |
| "grad_norm": 0.45389190316200256, |
| "learning_rate": 8.148668504874623e-06, |
| "loss": 0.377, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.521412157927721, |
| "grad_norm": 0.4920840263366699, |
| "learning_rate": 7.625072751227297e-06, |
| "loss": 0.3939, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.53812408606643, |
| "grad_norm": 0.38830721378326416, |
| "learning_rate": 7.117471657485663e-06, |
| "loss": 0.4011, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.554836014205139, |
| "grad_norm": 0.47151267528533936, |
| "learning_rate": 6.626056820600768e-06, |
| "loss": 0.4485, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.571547942343848, |
| "grad_norm": 0.47489333152770996, |
| "learning_rate": 6.151013727927984e-06, |
| "loss": 0.4418, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.588259870482557, |
| "grad_norm": 0.4408549666404724, |
| "learning_rate": 5.69252168721367e-06, |
| "loss": 0.4077, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.604971798621266, |
| "grad_norm": 0.45609548687934875, |
| "learning_rate": 5.250753758914506e-06, |
| "loss": 0.398, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.621683726759975, |
| "grad_norm": 0.4423038363456726, |
| "learning_rate": 4.82587669087477e-06, |
| "loss": 0.4327, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.6383956548986838, |
| "grad_norm": 0.4574050307273865, |
| "learning_rate": 4.418050855386413e-06, |
| "loss": 0.388, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.6551075830373927, |
| "grad_norm": 0.4758591949939728, |
| "learning_rate": 4.027430188655684e-06, |
| "loss": 0.4684, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.671819511176102, |
| "grad_norm": 0.44531774520874023, |
| "learning_rate": 3.654162132698918e-06, |
| "loss": 0.4378, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.688531439314811, |
| "grad_norm": 0.45729854702949524, |
| "learning_rate": 3.298387579689771e-06, |
| "loss": 0.4334, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.70524336745352, |
| "grad_norm": 0.3810584247112274, |
| "learning_rate": 2.960240818778659e-06, |
| "loss": 0.3903, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.721955295592229, |
| "grad_norm": 0.39533549547195435, |
| "learning_rate": 2.639849485404505e-06, |
| "loss": 0.4296, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.738667223730938, |
| "grad_norm": 0.36610159277915955, |
| "learning_rate": 2.3373345131180224e-06, |
| "loss": 0.4175, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.755379151869647, |
| "grad_norm": 0.45609691739082336, |
| "learning_rate": 2.052810087934698e-06, |
| "loss": 0.3886, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.772091080008356, |
| "grad_norm": 0.44251516461372375, |
| "learning_rate": 1.7863836052345429e-06, |
| "loss": 0.4099, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.788803008147065, |
| "grad_norm": 0.4892220199108124, |
| "learning_rate": 1.5381556292251632e-06, |
| "loss": 0.4733, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.805514936285774, |
| "grad_norm": 0.43403947353363037, |
| "learning_rate": 1.3082198549831836e-06, |
| "loss": 0.4303, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.822226864424483, |
| "grad_norm": 0.5250228643417358, |
| "learning_rate": 1.0966630730884887e-06, |
| "loss": 0.438, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.8389387925631917, |
| "grad_norm": 0.46305549144744873, |
| "learning_rate": 9.035651368646648e-07, |
| "loss": 0.4379, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.855650720701901, |
| "grad_norm": 0.48225823044776917, |
| "learning_rate": 7.289989322378732e-07, |
| "loss": 0.4258, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.87236264884061, |
| "grad_norm": 0.4875478744506836, |
| "learning_rate": 5.730303502256341e-07, |
| "loss": 0.4194, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.889074576979319, |
| "grad_norm": 0.4659990668296814, |
| "learning_rate": 4.3571826206590396e-07, |
| "loss": 0.4083, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.905786505118028, |
| "grad_norm": 0.467338889837265, |
| "learning_rate": 3.1711449699576845e-07, |
| "loss": 0.4081, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.922498433256737, |
| "grad_norm": 0.4802572727203369, |
| "learning_rate": 2.172638226882129e-07, |
| "loss": 0.3996, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.939210361395446, |
| "grad_norm": 0.41945475339889526, |
| "learning_rate": 1.3620392835430596e-07, |
| "loss": 0.3647, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.955922289534155, |
| "grad_norm": 0.47264522314071655, |
| "learning_rate": 7.396541051717942e-08, |
| "loss": 0.4691, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.972634217672864, |
| "grad_norm": 0.45545369386672974, |
| "learning_rate": 3.057176146319951e-08, |
| "loss": 0.4034, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.989346145811573, |
| "grad_norm": 0.5081713795661926, |
| "learning_rate": 6.0393603746822235e-09, |
| "loss": 0.4652, |
| "step": 1790 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 1797, |
| "total_flos": 6.751297376408863e+17, |
| "train_loss": 0.4856139095743696, |
| "train_runtime": 18496.859, |
| "train_samples_per_second": 0.776, |
| "train_steps_per_second": 0.097 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 1797, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.751297376408863e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|