| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 2094, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.014344629729245113, |
| "grad_norm": 3.3866311674749072, |
| "learning_rate": 4.285714285714286e-06, |
| "loss": 1.5266, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.028689259458490227, |
| "grad_norm": 1.8728384521077752, |
| "learning_rate": 9.047619047619047e-06, |
| "loss": 1.3066, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.04303388918773534, |
| "grad_norm": 1.1126841032649692, |
| "learning_rate": 1.3809523809523811e-05, |
| "loss": 1.1436, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05737851891698045, |
| "grad_norm": 1.1207104465369435, |
| "learning_rate": 1.8571428571428572e-05, |
| "loss": 1.0535, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07172314864622557, |
| "grad_norm": 1.1391226479086436, |
| "learning_rate": 2.3333333333333336e-05, |
| "loss": 1.0172, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.08606777837547068, |
| "grad_norm": 1.146545389971907, |
| "learning_rate": 2.8095238095238096e-05, |
| "loss": 0.9859, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1004124081047158, |
| "grad_norm": 1.1888015259911366, |
| "learning_rate": 3.285714285714286e-05, |
| "loss": 0.9528, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.1147570378339609, |
| "grad_norm": 1.2903823343286538, |
| "learning_rate": 3.761904761904762e-05, |
| "loss": 0.9397, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.129101667563206, |
| "grad_norm": 1.2943713329149742, |
| "learning_rate": 4.2380952380952385e-05, |
| "loss": 0.9282, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.14344629729245115, |
| "grad_norm": 1.2718909502396083, |
| "learning_rate": 4.714285714285714e-05, |
| "loss": 0.9186, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.15779092702169625, |
| "grad_norm": 1.4791012823315328, |
| "learning_rate": 5.1904761904761913e-05, |
| "loss": 0.9074, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.17213555675094136, |
| "grad_norm": 1.336532460075517, |
| "learning_rate": 5.666666666666667e-05, |
| "loss": 0.9002, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.1864801864801865, |
| "grad_norm": 1.6094057366302217, |
| "learning_rate": 6.142857142857143e-05, |
| "loss": 0.8887, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.2008248162094316, |
| "grad_norm": 1.070539230983606, |
| "learning_rate": 6.619047619047619e-05, |
| "loss": 0.884, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2151694459386767, |
| "grad_norm": 1.1200792510458928, |
| "learning_rate": 7.095238095238096e-05, |
| "loss": 0.8809, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.2295140756679218, |
| "grad_norm": 1.087537431311211, |
| "learning_rate": 7.571428571428571e-05, |
| "loss": 0.8799, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.24385870539716695, |
| "grad_norm": 0.9999716597796505, |
| "learning_rate": 8.047619047619048e-05, |
| "loss": 0.8802, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.258203335126412, |
| "grad_norm": 1.2050995451812405, |
| "learning_rate": 8.523809523809524e-05, |
| "loss": 0.8692, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.2725479648556572, |
| "grad_norm": 0.9483149209977663, |
| "learning_rate": 9e-05, |
| "loss": 0.8675, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.2868925945849023, |
| "grad_norm": 0.9047494721372505, |
| "learning_rate": 9.476190476190476e-05, |
| "loss": 0.8651, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3012372243141474, |
| "grad_norm": 0.9614336061705705, |
| "learning_rate": 9.952380952380953e-05, |
| "loss": 0.8743, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.3155818540433925, |
| "grad_norm": 0.840798357905234, |
| "learning_rate": 9.999436939807164e-05, |
| "loss": 0.8831, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.3299264837726376, |
| "grad_norm": 0.894805005888125, |
| "learning_rate": 9.99749072170404e-05, |
| "loss": 0.8753, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.3442711135018827, |
| "grad_norm": 0.9621522988496558, |
| "learning_rate": 9.994154935353517e-05, |
| "loss": 0.8553, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.3586157432311278, |
| "grad_norm": 0.8338221919665476, |
| "learning_rate": 9.98943050828164e-05, |
| "loss": 0.8577, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.372960372960373, |
| "grad_norm": 0.8401540225438021, |
| "learning_rate": 9.983318754130435e-05, |
| "loss": 0.842, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.3873050026896181, |
| "grad_norm": 0.7967631358112257, |
| "learning_rate": 9.975821372292653e-05, |
| "loss": 0.8346, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.4016496324188632, |
| "grad_norm": 0.7572726108984663, |
| "learning_rate": 9.966940447439245e-05, |
| "loss": 0.8272, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.4159942621481083, |
| "grad_norm": 0.7862046452006615, |
| "learning_rate": 9.956678448939718e-05, |
| "loss": 0.8161, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.4303388918773534, |
| "grad_norm": 0.8007930832524255, |
| "learning_rate": 9.945038230175509e-05, |
| "loss": 0.8263, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.4446835216065985, |
| "grad_norm": 0.6882798627741967, |
| "learning_rate": 9.932023027746602e-05, |
| "loss": 0.8119, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.4590281513358436, |
| "grad_norm": 0.7191586339334445, |
| "learning_rate": 9.917636460571578e-05, |
| "loss": 0.8195, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.47337278106508873, |
| "grad_norm": 0.6479226154771014, |
| "learning_rate": 9.901882528881363e-05, |
| "loss": 0.8152, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.4877174107943339, |
| "grad_norm": 0.7520756414640943, |
| "learning_rate": 9.884765613106948e-05, |
| "loss": 0.8056, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.502062040523579, |
| "grad_norm": 0.7496882676872738, |
| "learning_rate": 9.866290472661406e-05, |
| "loss": 0.8143, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.516406670252824, |
| "grad_norm": 0.8849980877887388, |
| "learning_rate": 9.846462244616508e-05, |
| "loss": 0.8139, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.5307512999820692, |
| "grad_norm": 0.689970676364689, |
| "learning_rate": 9.825286442274357e-05, |
| "loss": 0.8052, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.5450959297113144, |
| "grad_norm": 0.7192552702117557, |
| "learning_rate": 9.802768953634388e-05, |
| "loss": 0.7918, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.5594405594405595, |
| "grad_norm": 0.6975024662772501, |
| "learning_rate": 9.778916039756193e-05, |
| "loss": 0.7859, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.5737851891698046, |
| "grad_norm": 0.6553934247634706, |
| "learning_rate": 9.753734333018616e-05, |
| "loss": 0.776, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.5881298188990497, |
| "grad_norm": 0.757809076489788, |
| "learning_rate": 9.727230835275598e-05, |
| "loss": 0.7811, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.6024744486282948, |
| "grad_norm": 0.7082602077432336, |
| "learning_rate": 9.699412915909284e-05, |
| "loss": 0.7819, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.6168190783575399, |
| "grad_norm": 0.6348735880274488, |
| "learning_rate": 9.670288309780953e-05, |
| "loss": 0.7679, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.631163708086785, |
| "grad_norm": 0.6405240862226912, |
| "learning_rate": 9.639865115080304e-05, |
| "loss": 0.779, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.6455083378160301, |
| "grad_norm": 0.6634767779062684, |
| "learning_rate": 9.608151791073737e-05, |
| "loss": 0.7739, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.6598529675452752, |
| "grad_norm": 0.5759274474197833, |
| "learning_rate": 9.575157155752222e-05, |
| "loss": 0.7643, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.6741975972745203, |
| "grad_norm": 0.6640199963749864, |
| "learning_rate": 9.54089038337943e-05, |
| "loss": 0.7556, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.6885422270037654, |
| "grad_norm": 0.6203004765144989, |
| "learning_rate": 9.5053610019408e-05, |
| "loss": 0.7596, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.7028868567330105, |
| "grad_norm": 0.5775087991410168, |
| "learning_rate": 9.468578890494256e-05, |
| "loss": 0.7534, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.7172314864622557, |
| "grad_norm": 0.6714966253079345, |
| "learning_rate": 9.430554276423292e-05, |
| "loss": 0.7552, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.7315761161915008, |
| "grad_norm": 0.5888288333456809, |
| "learning_rate": 9.391297732593229e-05, |
| "loss": 0.7526, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.745920745920746, |
| "grad_norm": 0.6259226612543665, |
| "learning_rate": 9.350820174411386e-05, |
| "loss": 0.7537, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.7602653756499911, |
| "grad_norm": 0.5499968625723676, |
| "learning_rate": 9.309132856792023e-05, |
| "loss": 0.7499, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.7746100053792362, |
| "grad_norm": 0.6500285934875327, |
| "learning_rate": 9.266247371026873e-05, |
| "loss": 0.7447, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.7889546351084813, |
| "grad_norm": 0.5672171674827838, |
| "learning_rate": 9.222175641562143e-05, |
| "loss": 0.7496, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.8032992648377264, |
| "grad_norm": 0.6465733807975256, |
| "learning_rate": 9.176929922682891e-05, |
| "loss": 0.74, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.8176438945669715, |
| "grad_norm": 0.6101304041389433, |
| "learning_rate": 9.130522795105676e-05, |
| "loss": 0.7411, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.8319885242962166, |
| "grad_norm": 0.5489063036723607, |
| "learning_rate": 9.082967162480459e-05, |
| "loss": 0.7323, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.8463331540254617, |
| "grad_norm": 0.5516558559909698, |
| "learning_rate": 9.034276247802688e-05, |
| "loss": 0.7231, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.8606777837547068, |
| "grad_norm": 0.6477118047968917, |
| "learning_rate": 8.984463589736614e-05, |
| "loss": 0.7288, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.8750224134839519, |
| "grad_norm": 0.5962486006428408, |
| "learning_rate": 8.933543038850816e-05, |
| "loss": 0.732, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.889367043213197, |
| "grad_norm": 0.5713715183887016, |
| "learning_rate": 8.881528753767007e-05, |
| "loss": 0.728, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.9037116729424421, |
| "grad_norm": 0.6301745705273146, |
| "learning_rate": 8.82843519722319e-05, |
| "loss": 0.7263, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.9180563026716873, |
| "grad_norm": 0.5819941044165688, |
| "learning_rate": 8.774277132052237e-05, |
| "loss": 0.7189, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.9324009324009324, |
| "grad_norm": 0.5336149445366645, |
| "learning_rate": 8.719069617077046e-05, |
| "loss": 0.7254, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.9467455621301775, |
| "grad_norm": 0.6083401813961216, |
| "learning_rate": 8.662828002923378e-05, |
| "loss": 0.7218, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.9610901918594227, |
| "grad_norm": 0.5371262960983777, |
| "learning_rate": 8.605567927751576e-05, |
| "loss": 0.719, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.9754348215886678, |
| "grad_norm": 0.5551868136565808, |
| "learning_rate": 8.547305312908318e-05, |
| "loss": 0.7089, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.9897794513179129, |
| "grad_norm": 0.5899796664828114, |
| "learning_rate": 8.48805635849964e-05, |
| "loss": 0.7075, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.002868925945849, |
| "grad_norm": 0.608174630463498, |
| "learning_rate": 8.427837538886437e-05, |
| "loss": 0.6894, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.017213555675094, |
| "grad_norm": 0.6116990279567732, |
| "learning_rate": 8.366665598103727e-05, |
| "loss": 0.5865, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.0315581854043392, |
| "grad_norm": 0.5768126568975168, |
| "learning_rate": 8.304557545204908e-05, |
| "loss": 0.5931, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.0459028151335843, |
| "grad_norm": 0.5727241793491287, |
| "learning_rate": 8.241530649532339e-05, |
| "loss": 0.5859, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.0602474448628294, |
| "grad_norm": 0.5834066080830065, |
| "learning_rate": 8.177602435915546e-05, |
| "loss": 0.5833, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.0745920745920745, |
| "grad_norm": 0.6168394884857555, |
| "learning_rate": 8.11279067979839e-05, |
| "loss": 0.5805, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.0889367043213196, |
| "grad_norm": 0.4885078172344663, |
| "learning_rate": 8.04711340229654e-05, |
| "loss": 0.5733, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.1032813340505647, |
| "grad_norm": 0.5289947493535304, |
| "learning_rate": 7.980588865186649e-05, |
| "loss": 0.5812, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.11762596377981, |
| "grad_norm": 0.5672904537751536, |
| "learning_rate": 7.913235565828613e-05, |
| "loss": 0.5811, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.1319705935090552, |
| "grad_norm": 0.6107186114378813, |
| "learning_rate": 7.845072232022311e-05, |
| "loss": 0.5755, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.1463152232383003, |
| "grad_norm": 0.6696900109358123, |
| "learning_rate": 7.776117816800288e-05, |
| "loss": 0.5916, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.1606598529675454, |
| "grad_norm": 0.5412095989289454, |
| "learning_rate": 7.706391493157805e-05, |
| "loss": 0.5822, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.1750044826967905, |
| "grad_norm": 0.5408330429359739, |
| "learning_rate": 7.635912648721718e-05, |
| "loss": 0.5811, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.1893491124260356, |
| "grad_norm": 0.5952577265784902, |
| "learning_rate": 7.564700880359696e-05, |
| "loss": 0.5799, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.2036937421552807, |
| "grad_norm": 0.5026109808475954, |
| "learning_rate": 7.492775988731243e-05, |
| "loss": 0.5673, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.2180383718845258, |
| "grad_norm": 0.5302543029902266, |
| "learning_rate": 7.420157972782063e-05, |
| "loss": 0.5754, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.232383001613771, |
| "grad_norm": 0.5532266667153597, |
| "learning_rate": 7.346867024183291e-05, |
| "loss": 0.579, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.246727631343016, |
| "grad_norm": 0.5669382050811871, |
| "learning_rate": 7.272923521717133e-05, |
| "loss": 0.5756, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.2610722610722611, |
| "grad_norm": 0.5338735116713776, |
| "learning_rate": 7.198348025610481e-05, |
| "loss": 0.5769, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.2754168908015062, |
| "grad_norm": 0.5863122040109188, |
| "learning_rate": 7.12316127181808e-05, |
| "loss": 0.5685, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.2897615205307513, |
| "grad_norm": 0.5541827820189409, |
| "learning_rate": 7.047384166256815e-05, |
| "loss": 0.5801, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.3041061502599964, |
| "grad_norm": 0.5753955062923616, |
| "learning_rate": 6.971037778992775e-05, |
| "loss": 0.5795, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.3184507799892415, |
| "grad_norm": 0.47433414173616173, |
| "learning_rate": 6.894143338382639e-05, |
| "loss": 0.5768, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.3327954097184866, |
| "grad_norm": 0.5274791181928579, |
| "learning_rate": 6.81672222517107e-05, |
| "loss": 0.578, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.3471400394477318, |
| "grad_norm": 0.5774970902851654, |
| "learning_rate": 6.73879596654573e-05, |
| "loss": 0.5682, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.3614846691769769, |
| "grad_norm": 0.5662099451295628, |
| "learning_rate": 6.660386230151571e-05, |
| "loss": 0.5735, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.375829298906222, |
| "grad_norm": 0.5574084243877258, |
| "learning_rate": 6.581514818066088e-05, |
| "loss": 0.5739, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.390173928635467, |
| "grad_norm": 0.539589642853108, |
| "learning_rate": 6.502203660737169e-05, |
| "loss": 0.5688, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.4045185583647122, |
| "grad_norm": 0.49293926542133193, |
| "learning_rate": 6.422474810885278e-05, |
| "loss": 0.5669, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.4188631880939573, |
| "grad_norm": 0.49620916916034896, |
| "learning_rate": 6.342350437371614e-05, |
| "loss": 0.5702, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.4332078178232024, |
| "grad_norm": 0.5152950617397968, |
| "learning_rate": 6.26185281903399e-05, |
| "loss": 0.5685, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.4475524475524475, |
| "grad_norm": 0.545911912027696, |
| "learning_rate": 6.181004338492141e-05, |
| "loss": 0.5651, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.4618970772816926, |
| "grad_norm": 0.5074263813146201, |
| "learning_rate": 6.09982747592415e-05, |
| "loss": 0.5615, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.4762417070109377, |
| "grad_norm": 0.5094909684862107, |
| "learning_rate": 6.018344802815778e-05, |
| "loss": 0.5679, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.4905863367401828, |
| "grad_norm": 0.5103259058296251, |
| "learning_rate": 5.936578975684378e-05, |
| "loss": 0.5701, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.504930966469428, |
| "grad_norm": 0.5278709818430807, |
| "learning_rate": 5.854552729779184e-05, |
| "loss": 0.5627, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.519275596198673, |
| "grad_norm": 0.510686482301255, |
| "learning_rate": 5.772288872759702e-05, |
| "loss": 0.5625, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.5336202259279181, |
| "grad_norm": 0.5278526048784437, |
| "learning_rate": 5.6898102783539665e-05, |
| "loss": 0.5606, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.5479648556571632, |
| "grad_norm": 0.5559268236737518, |
| "learning_rate": 5.607139879998427e-05, |
| "loss": 0.5595, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.5623094853864083, |
| "grad_norm": 0.5583305948616533, |
| "learning_rate": 5.524300664461235e-05, |
| "loss": 0.5575, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.5766541151156535, |
| "grad_norm": 0.5186185461448056, |
| "learning_rate": 5.441315665450697e-05, |
| "loss": 0.5589, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.5909987448448986, |
| "grad_norm": 0.5168193702732372, |
| "learning_rate": 5.3582079572106794e-05, |
| "loss": 0.5534, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.6053433745741437, |
| "grad_norm": 0.5037158486433893, |
| "learning_rate": 5.275000648104743e-05, |
| "loss": 0.5569, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.6196880043033888, |
| "grad_norm": 0.49884254214328216, |
| "learning_rate": 5.191716874190785e-05, |
| "loss": 0.5564, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.6340326340326339, |
| "grad_norm": 0.5071669705468655, |
| "learning_rate": 5.1083797927879896e-05, |
| "loss": 0.5552, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.648377263761879, |
| "grad_norm": 0.5410568223255828, |
| "learning_rate": 5.025012576037855e-05, |
| "loss": 0.564, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.6627218934911243, |
| "grad_norm": 0.4850296619715352, |
| "learning_rate": 4.9416384044611124e-05, |
| "loss": 0.5579, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.6770665232203694, |
| "grad_norm": 0.5039355867374112, |
| "learning_rate": 4.858280460512302e-05, |
| "loss": 0.5551, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.6914111529496145, |
| "grad_norm": 0.5354526153870778, |
| "learning_rate": 4.7749619221338227e-05, |
| "loss": 0.5553, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.7057557826788596, |
| "grad_norm": 0.5149848285267117, |
| "learning_rate": 4.691705956311225e-05, |
| "loss": 0.554, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.7201004124081047, |
| "grad_norm": 0.5076867052953596, |
| "learning_rate": 4.608535712631566e-05, |
| "loss": 0.553, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.7344450421373498, |
| "grad_norm": 0.5245034600250137, |
| "learning_rate": 4.525474316846581e-05, |
| "loss": 0.5455, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.748789671866595, |
| "grad_norm": 0.5044035057693634, |
| "learning_rate": 4.4425448644425066e-05, |
| "loss": 0.5487, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.76313430159584, |
| "grad_norm": 0.5182301310051464, |
| "learning_rate": 4.359770414218296e-05, |
| "loss": 0.5401, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.7774789313250852, |
| "grad_norm": 0.5066048824304953, |
| "learning_rate": 4.2771739818740565e-05, |
| "loss": 0.5496, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.7918235610543303, |
| "grad_norm": 0.5337687629976401, |
| "learning_rate": 4.194778533611451e-05, |
| "loss": 0.5413, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.8061681907835754, |
| "grad_norm": 0.5207289329535878, |
| "learning_rate": 4.112606979747881e-05, |
| "loss": 0.544, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.8205128205128205, |
| "grad_norm": 0.5450234045888507, |
| "learning_rate": 4.030682168346192e-05, |
| "loss": 0.5358, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.8348574502420656, |
| "grad_norm": 0.48504143892984203, |
| "learning_rate": 3.949026878861704e-05, |
| "loss": 0.539, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.8492020799713107, |
| "grad_norm": 0.49111142317889844, |
| "learning_rate": 3.867663815808303e-05, |
| "loss": 0.5389, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.8635467097005558, |
| "grad_norm": 0.5008380643048362, |
| "learning_rate": 3.78661560244539e-05, |
| "loss": 0.5363, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.8778913394298011, |
| "grad_norm": 0.4952709952315973, |
| "learning_rate": 3.705904774487396e-05, |
| "loss": 0.5319, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.8922359691590462, |
| "grad_norm": 0.5164716516662802, |
| "learning_rate": 3.6255537738376706e-05, |
| "loss": 0.5402, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.9065805988882913, |
| "grad_norm": 0.5240016137645628, |
| "learning_rate": 3.545584942348426e-05, |
| "loss": 0.5309, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.9209252286175365, |
| "grad_norm": 0.5058856022275582, |
| "learning_rate": 3.466020515608525e-05, |
| "loss": 0.5298, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.9352698583467816, |
| "grad_norm": 0.5019184399441088, |
| "learning_rate": 3.386882616760794e-05, |
| "loss": 0.5319, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.9496144880760267, |
| "grad_norm": 0.471920743317965, |
| "learning_rate": 3.30819325035062e-05, |
| "loss": 0.5287, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.9639591178052718, |
| "grad_norm": 0.5094525188470136, |
| "learning_rate": 3.229974296207513e-05, |
| "loss": 0.5385, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.9783037475345169, |
| "grad_norm": 0.5606826371060244, |
| "learning_rate": 3.152247503361353e-05, |
| "loss": 0.527, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.992648377263762, |
| "grad_norm": 0.5085670056810321, |
| "learning_rate": 3.075034483994997e-05, |
| "loss": 0.5257, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.005737851891698, |
| "grad_norm": 0.5719992437170487, |
| "learning_rate": 2.998356707434947e-05, |
| "loss": 0.4623, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.020082481620943, |
| "grad_norm": 0.5387964274909228, |
| "learning_rate": 2.9222354941817375e-05, |
| "loss": 0.3606, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.034427111350188, |
| "grad_norm": 0.5698349863645911, |
| "learning_rate": 2.846692009981693e-05, |
| "loss": 0.3527, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.0487717410794333, |
| "grad_norm": 0.5324199413342471, |
| "learning_rate": 2.771747259941734e-05, |
| "loss": 0.3462, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.0631163708086784, |
| "grad_norm": 0.53215786873291, |
| "learning_rate": 2.6974220826888374e-05, |
| "loss": 0.342, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.0774610005379235, |
| "grad_norm": 0.5110033203467198, |
| "learning_rate": 2.623737144575787e-05, |
| "loss": 0.3462, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.0918056302671686, |
| "grad_norm": 0.5246624681955846, |
| "learning_rate": 2.5507129339348335e-05, |
| "loss": 0.3487, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.1061502599964137, |
| "grad_norm": 0.5070575905086573, |
| "learning_rate": 2.478369755380839e-05, |
| "loss": 0.3465, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.120494889725659, |
| "grad_norm": 0.5132555087524301, |
| "learning_rate": 2.406727724165524e-05, |
| "loss": 0.3451, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.134839519454904, |
| "grad_norm": 0.5143913782142077, |
| "learning_rate": 2.3358067605843537e-05, |
| "loss": 0.3442, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.149184149184149, |
| "grad_norm": 0.4894675520933593, |
| "learning_rate": 2.2656265844376367e-05, |
| "loss": 0.3449, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.163528778913394, |
| "grad_norm": 0.5118938379622366, |
| "learning_rate": 2.1962067095473648e-05, |
| "loss": 0.3443, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.1778734086426392, |
| "grad_norm": 0.5264199925745078, |
| "learning_rate": 2.127566438331345e-05, |
| "loss": 0.3443, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.1922180383718843, |
| "grad_norm": 0.5517034745554297, |
| "learning_rate": 2.059724856436092e-05, |
| "loss": 0.34, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.2065626681011294, |
| "grad_norm": 0.5153912300862288, |
| "learning_rate": 1.992700827430007e-05, |
| "loss": 0.3447, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.2209072978303745, |
| "grad_norm": 0.49999210085621454, |
| "learning_rate": 1.9265129875582954e-05, |
| "loss": 0.3402, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.23525192755962, |
| "grad_norm": 0.5117332103404209, |
| "learning_rate": 1.8611797405611097e-05, |
| "loss": 0.3434, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.249596557288865, |
| "grad_norm": 0.5334345620530477, |
| "learning_rate": 1.7967192525563254e-05, |
| "loss": 0.3429, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.2639411870181103, |
| "grad_norm": 0.5052797787039551, |
| "learning_rate": 1.733149446988394e-05, |
| "loss": 0.3402, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.2782858167473554, |
| "grad_norm": 0.5115033156660913, |
| "learning_rate": 1.670487999644669e-05, |
| "loss": 0.3413, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.2926304464766005, |
| "grad_norm": 0.4961174768714891, |
| "learning_rate": 1.6087523337406024e-05, |
| "loss": 0.3407, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.3069750762058456, |
| "grad_norm": 0.5181202897712733, |
| "learning_rate": 1.547959615075164e-05, |
| "loss": 0.3343, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.3213197059350907, |
| "grad_norm": 0.5076475142323975, |
| "learning_rate": 1.4881267472578325e-05, |
| "loss": 0.3415, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.335664335664336, |
| "grad_norm": 0.4921735423850851, |
| "learning_rate": 1.4292703670084916e-05, |
| "loss": 0.3392, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.350008965393581, |
| "grad_norm": 0.5151996030919073, |
| "learning_rate": 1.3714068395315427e-05, |
| "loss": 0.3349, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.364353595122826, |
| "grad_norm": 0.5104074354031792, |
| "learning_rate": 1.3145522539655041e-05, |
| "loss": 0.3354, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.378698224852071, |
| "grad_norm": 0.5380960601841327, |
| "learning_rate": 1.2587224189093755e-05, |
| "loss": 0.3302, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.3930428545813163, |
| "grad_norm": 0.5049847193289703, |
| "learning_rate": 1.2039328580270065e-05, |
| "loss": 0.3333, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.4073874843105614, |
| "grad_norm": 0.5040716684214055, |
| "learning_rate": 1.150198805730689e-05, |
| "loss": 0.3329, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.4217321140398065, |
| "grad_norm": 0.5040857331959766, |
| "learning_rate": 1.0975352029451863e-05, |
| "loss": 0.3337, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.4360767437690516, |
| "grad_norm": 0.5091871791617659, |
| "learning_rate": 1.0459566929533588e-05, |
| "loss": 0.3345, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.4504213734982967, |
| "grad_norm": 0.4986671760440932, |
| "learning_rate": 9.954776173245511e-06, |
| "loss": 0.3369, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.464766003227542, |
| "grad_norm": 0.5146311314437344, |
| "learning_rate": 9.461120119268713e-06, |
| "loss": 0.3338, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.479110632956787, |
| "grad_norm": 0.5052152754810867, |
| "learning_rate": 8.978736030244783e-06, |
| "loss": 0.331, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.493455262686032, |
| "grad_norm": 0.5060074098914322, |
| "learning_rate": 8.50775803460948e-06, |
| "loss": 0.3336, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.507799892415277, |
| "grad_norm": 0.5047637166351485, |
| "learning_rate": 8.048317089297875e-06, |
| "loss": 0.331, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.5221445221445222, |
| "grad_norm": 0.5051085483525463, |
| "learning_rate": 7.600540943331347e-06, |
| "loss": 0.3316, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.5364891518737673, |
| "grad_norm": 0.5291232815833027, |
| "learning_rate": 7.164554102296617e-06, |
| "loss": 0.3322, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.5508337816030124, |
| "grad_norm": 0.4950112167812311, |
| "learning_rate": 6.740477793726529e-06, |
| "loss": 0.3302, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.5651784113322575, |
| "grad_norm": 0.5171790095780296, |
| "learning_rate": 6.32842993339236e-06, |
| "loss": 0.3278, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.5795230410615027, |
| "grad_norm": 0.5041803659831317, |
| "learning_rate": 5.928525092516934e-06, |
| "loss": 0.3334, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.5938676707907478, |
| "grad_norm": 0.5073754782012652, |
| "learning_rate": 5.540874465917778e-06, |
| "loss": 0.3325, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.608212300519993, |
| "grad_norm": 0.48715355421051987, |
| "learning_rate": 5.165585841089021e-06, |
| "loss": 0.3246, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.622556930249238, |
| "grad_norm": 0.5162975920861064, |
| "learning_rate": 4.8027635682307445e-06, |
| "loss": 0.325, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.636901559978483, |
| "grad_norm": 0.5030085273093482, |
| "learning_rate": 4.45250853123404e-06, |
| "loss": 0.3267, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.651246189707728, |
| "grad_norm": 0.5261872357667398, |
| "learning_rate": 4.1149181196299905e-06, |
| "loss": 0.3277, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.6655908194369733, |
| "grad_norm": 0.49678770417028423, |
| "learning_rate": 3.7900862015101457e-06, |
| "loss": 0.3285, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.6799354491662184, |
| "grad_norm": 0.4889164865698146, |
| "learning_rate": 3.4781030974262108e-06, |
| "loss": 0.3268, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.6942800788954635, |
| "grad_norm": 0.4884408260663131, |
| "learning_rate": 3.1790555552761615e-06, |
| "loss": 0.3246, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.7086247086247086, |
| "grad_norm": 0.49292610830996564, |
| "learning_rate": 2.8930267261836395e-06, |
| "loss": 0.3231, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.7229693383539537, |
| "grad_norm": 0.4890311152459734, |
| "learning_rate": 2.6200961413776094e-06, |
| "loss": 0.324, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.737313968083199, |
| "grad_norm": 0.5165429385071639, |
| "learning_rate": 2.3603396900783724e-06, |
| "loss": 0.3251, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.751658597812444, |
| "grad_norm": 0.5024981727502739, |
| "learning_rate": 2.113829598396383e-06, |
| "loss": 0.322, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.766003227541689, |
| "grad_norm": 0.49038298209467507, |
| "learning_rate": 1.8806344092494932e-06, |
| "loss": 0.3248, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.780347857270934, |
| "grad_norm": 0.4917027026533285, |
| "learning_rate": 1.6608189633044113e-06, |
| "loss": 0.325, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.7946924870001792, |
| "grad_norm": 0.4869252686371276, |
| "learning_rate": 1.4544443809475561e-06, |
| "loss": 0.3247, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.8090371167294244, |
| "grad_norm": 0.48253239623178734, |
| "learning_rate": 1.2615680452903067e-06, |
| "loss": 0.3234, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.8233817464586695, |
| "grad_norm": 0.5096600620224961, |
| "learning_rate": 1.082243586213455e-06, |
| "loss": 0.3219, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.8377263761879146, |
| "grad_norm": 0.48655701646749633, |
| "learning_rate": 9.165208654552671e-07, |
| "loss": 0.321, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.8520710059171597, |
| "grad_norm": 0.47440137878879957, |
| "learning_rate": 7.6444596274724e-07, |
| "loss": 0.3229, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.866415635646405, |
| "grad_norm": 0.5034996226878241, |
| "learning_rate": 6.260611630015067e-07, |
| "loss": 0.3221, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.88076026537565, |
| "grad_norm": 0.47683170425958754, |
| "learning_rate": 5.01404944553363e-07, |
| "loss": 0.3219, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.895104895104895, |
| "grad_norm": 0.4909218835510976, |
| "learning_rate": 3.9051196846225935e-07, |
| "loss": 0.3251, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.90944952483414, |
| "grad_norm": 0.48456217220790176, |
| "learning_rate": 2.9341306887417653e-07, |
| "loss": 0.3213, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.923794154563385, |
| "grad_norm": 0.4779578566731188, |
| "learning_rate": 2.101352444480842e-07, |
| "loss": 0.3241, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.9381387842926303, |
| "grad_norm": 0.49966345259743034, |
| "learning_rate": 1.4070165084889008e-07, |
| "loss": 0.3199, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.9524834140218754, |
| "grad_norm": 0.4754785997240394, |
| "learning_rate": 8.513159430892925e-08, |
| "loss": 0.3225, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.9668280437511205, |
| "grad_norm": 0.4903628137644153, |
| "learning_rate": 4.344052625981365e-08, |
| "loss": 0.3219, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.9811726734803656, |
| "grad_norm": 0.4992880216120249, |
| "learning_rate": 1.5640039036085575e-08, |
| "loss": 0.3187, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.9955173032096107, |
| "grad_norm": 0.4963785468111048, |
| "learning_rate": 1.7378626519626296e-09, |
| "loss": 0.3238, |
| "step": 2090 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 2094, |
| "total_flos": 306663055425536.0, |
| "train_loss": 0.5762376924754328, |
| "train_runtime": 90627.2697, |
| "train_samples_per_second": 1.477, |
| "train_steps_per_second": 0.023 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2094, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 306663055425536.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|