| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 1073, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004659832246039142, |
| "grad_norm": 55.73911101293821, |
| "learning_rate": 7.763975155279503e-07, |
| "loss": 11.0693, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.009319664492078284, |
| "grad_norm": 57.1718581665509, |
| "learning_rate": 1.5527950310559006e-06, |
| "loss": 10.8839, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013979496738117428, |
| "grad_norm": 98.21065880054624, |
| "learning_rate": 2.329192546583851e-06, |
| "loss": 9.4422, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01863932898415657, |
| "grad_norm": 32.82367751669906, |
| "learning_rate": 3.1055900621118013e-06, |
| "loss": 3.2146, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023299161230195712, |
| "grad_norm": 3.30697302187253, |
| "learning_rate": 3.881987577639752e-06, |
| "loss": 1.347, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.027958993476234855, |
| "grad_norm": 1.2985174034570703, |
| "learning_rate": 4.658385093167702e-06, |
| "loss": 1.0061, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.032618825722273995, |
| "grad_norm": 0.8623629897688332, |
| "learning_rate": 5.4347826086956525e-06, |
| "loss": 0.8411, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.03727865796831314, |
| "grad_norm": 0.6188703104605264, |
| "learning_rate": 6.2111801242236025e-06, |
| "loss": 0.7755, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04193849021435228, |
| "grad_norm": 0.461009679090195, |
| "learning_rate": 6.9875776397515525e-06, |
| "loss": 0.7136, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.046598322460391424, |
| "grad_norm": 0.4286257732967241, |
| "learning_rate": 7.763975155279503e-06, |
| "loss": 0.6831, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.05125815470643057, |
| "grad_norm": 0.44241392108033467, |
| "learning_rate": 8.540372670807453e-06, |
| "loss": 0.646, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.05591798695246971, |
| "grad_norm": 0.3111820442035257, |
| "learning_rate": 9.316770186335403e-06, |
| "loss": 0.6088, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06057781919850885, |
| "grad_norm": 0.30251919432888047, |
| "learning_rate": 1.0093167701863353e-05, |
| "loss": 0.5932, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.06523765144454799, |
| "grad_norm": 0.33324119756936554, |
| "learning_rate": 1.0869565217391305e-05, |
| "loss": 0.5892, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06989748369058714, |
| "grad_norm": 0.3063487771822568, |
| "learning_rate": 1.1645962732919255e-05, |
| "loss": 0.5713, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.07455731593662628, |
| "grad_norm": 0.3071310049244509, |
| "learning_rate": 1.2422360248447205e-05, |
| "loss": 0.5618, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07921714818266543, |
| "grad_norm": 0.3358130788082856, |
| "learning_rate": 1.3198757763975155e-05, |
| "loss": 0.5524, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.08387698042870456, |
| "grad_norm": 0.2925977780766404, |
| "learning_rate": 1.3975155279503105e-05, |
| "loss": 0.5628, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08853681267474371, |
| "grad_norm": 0.25884643717759576, |
| "learning_rate": 1.4751552795031057e-05, |
| "loss": 0.5442, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.09319664492078285, |
| "grad_norm": 0.31707838864665394, |
| "learning_rate": 1.5527950310559007e-05, |
| "loss": 0.5296, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.097856477166822, |
| "grad_norm": 0.3021647811110462, |
| "learning_rate": 1.630434782608696e-05, |
| "loss": 0.5243, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.10251630941286113, |
| "grad_norm": 0.30442757517268515, |
| "learning_rate": 1.7080745341614907e-05, |
| "loss": 0.526, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.10717614165890028, |
| "grad_norm": 0.3036499569102043, |
| "learning_rate": 1.785714285714286e-05, |
| "loss": 0.5231, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.11183597390493942, |
| "grad_norm": 0.37387951427884036, |
| "learning_rate": 1.8633540372670807e-05, |
| "loss": 0.5098, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.11649580615097857, |
| "grad_norm": 0.2871555813435635, |
| "learning_rate": 1.940993788819876e-05, |
| "loss": 0.5057, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1211556383970177, |
| "grad_norm": 0.3209503297683246, |
| "learning_rate": 2.0186335403726707e-05, |
| "loss": 0.5113, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.12581547064305684, |
| "grad_norm": 0.41553906221283793, |
| "learning_rate": 2.096273291925466e-05, |
| "loss": 0.5105, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.13047530288909598, |
| "grad_norm": 0.447991972215783, |
| "learning_rate": 2.173913043478261e-05, |
| "loss": 0.4951, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.13513513513513514, |
| "grad_norm": 0.36952665164899706, |
| "learning_rate": 2.2515527950310562e-05, |
| "loss": 0.522, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.13979496738117428, |
| "grad_norm": 0.43854865846501767, |
| "learning_rate": 2.329192546583851e-05, |
| "loss": 0.4958, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.14445479962721341, |
| "grad_norm": 0.4551787765980115, |
| "learning_rate": 2.4068322981366462e-05, |
| "loss": 0.5041, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.14911463187325255, |
| "grad_norm": 0.500295346094291, |
| "learning_rate": 2.484472049689441e-05, |
| "loss": 0.4847, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.15377446411929171, |
| "grad_norm": 0.4867357618004153, |
| "learning_rate": 2.5621118012422362e-05, |
| "loss": 0.4961, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.15843429636533085, |
| "grad_norm": 0.40984119146102166, |
| "learning_rate": 2.639751552795031e-05, |
| "loss": 0.4719, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.16309412861137, |
| "grad_norm": 0.4604406630630476, |
| "learning_rate": 2.7173913043478262e-05, |
| "loss": 0.4852, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.16775396085740912, |
| "grad_norm": 0.40948649685519667, |
| "learning_rate": 2.795031055900621e-05, |
| "loss": 0.478, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.1724137931034483, |
| "grad_norm": 0.3653652404773955, |
| "learning_rate": 2.8726708074534165e-05, |
| "loss": 0.4809, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.17707362534948742, |
| "grad_norm": 0.3788769696744858, |
| "learning_rate": 2.9503105590062114e-05, |
| "loss": 0.4907, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.18173345759552656, |
| "grad_norm": 0.4455345320827965, |
| "learning_rate": 3.0279503105590062e-05, |
| "loss": 0.4802, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1863932898415657, |
| "grad_norm": 0.4872519077868042, |
| "learning_rate": 3.1055900621118014e-05, |
| "loss": 0.4818, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.19105312208760486, |
| "grad_norm": 0.5176809127208468, |
| "learning_rate": 3.183229813664597e-05, |
| "loss": 0.4707, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.195712954333644, |
| "grad_norm": 0.5721945537346641, |
| "learning_rate": 3.260869565217392e-05, |
| "loss": 0.4848, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.20037278657968313, |
| "grad_norm": 0.3770305001602024, |
| "learning_rate": 3.3385093167701865e-05, |
| "loss": 0.4669, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.20503261882572227, |
| "grad_norm": 0.4543945451638326, |
| "learning_rate": 3.4161490683229814e-05, |
| "loss": 0.4713, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.2096924510717614, |
| "grad_norm": 0.49369340193910277, |
| "learning_rate": 3.493788819875777e-05, |
| "loss": 0.4534, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.21435228331780057, |
| "grad_norm": 0.48464327291832504, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 0.4668, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.2190121155638397, |
| "grad_norm": 0.4111365129541678, |
| "learning_rate": 3.6490683229813665e-05, |
| "loss": 0.4644, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.22367194780987884, |
| "grad_norm": 0.6275047885479089, |
| "learning_rate": 3.7267080745341614e-05, |
| "loss": 0.4622, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.22833178005591798, |
| "grad_norm": 0.556206325520564, |
| "learning_rate": 3.804347826086957e-05, |
| "loss": 0.4674, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.23299161230195714, |
| "grad_norm": 0.4727137299853098, |
| "learning_rate": 3.881987577639752e-05, |
| "loss": 0.4592, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.23765144454799628, |
| "grad_norm": 0.5175385392810632, |
| "learning_rate": 3.9596273291925465e-05, |
| "loss": 0.4679, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.2423112767940354, |
| "grad_norm": 0.5522111161928154, |
| "learning_rate": 4.0372670807453414e-05, |
| "loss": 0.463, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.24697110904007455, |
| "grad_norm": 0.533406707658831, |
| "learning_rate": 4.114906832298137e-05, |
| "loss": 0.4613, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.2516309412861137, |
| "grad_norm": 0.5190269056190954, |
| "learning_rate": 4.192546583850932e-05, |
| "loss": 0.4596, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.25629077353215285, |
| "grad_norm": 0.5124788795006116, |
| "learning_rate": 4.270186335403727e-05, |
| "loss": 0.4654, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.26095060577819196, |
| "grad_norm": 0.6274404486819355, |
| "learning_rate": 4.347826086956522e-05, |
| "loss": 0.4576, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2656104380242311, |
| "grad_norm": 0.4751490435936714, |
| "learning_rate": 4.425465838509317e-05, |
| "loss": 0.4623, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.2702702702702703, |
| "grad_norm": 0.7744692776296426, |
| "learning_rate": 4.5031055900621124e-05, |
| "loss": 0.4654, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2749301025163094, |
| "grad_norm": 0.7800040335094921, |
| "learning_rate": 4.580745341614907e-05, |
| "loss": 0.4682, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.27958993476234856, |
| "grad_norm": 0.8281886923177356, |
| "learning_rate": 4.658385093167702e-05, |
| "loss": 0.4703, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.2842497670083877, |
| "grad_norm": 0.5858908773046568, |
| "learning_rate": 4.736024844720497e-05, |
| "loss": 0.4578, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.28890959925442683, |
| "grad_norm": 0.45985550866997615, |
| "learning_rate": 4.8136645962732924e-05, |
| "loss": 0.4497, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.293569431500466, |
| "grad_norm": 0.5286612419877652, |
| "learning_rate": 4.891304347826087e-05, |
| "loss": 0.4575, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.2982292637465051, |
| "grad_norm": 0.5552381000432096, |
| "learning_rate": 4.968944099378882e-05, |
| "loss": 0.4427, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.30288909599254427, |
| "grad_norm": 0.3693124262013228, |
| "learning_rate": 4.994822229892993e-05, |
| "loss": 0.4405, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.30754892823858343, |
| "grad_norm": 0.40179673300156293, |
| "learning_rate": 4.986192613047981e-05, |
| "loss": 0.4512, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.31220876048462254, |
| "grad_norm": 0.6909331905128122, |
| "learning_rate": 4.977562996202969e-05, |
| "loss": 0.4518, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.3168685927306617, |
| "grad_norm": 0.36848158859890456, |
| "learning_rate": 4.968933379357957e-05, |
| "loss": 0.4527, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.32152842497670087, |
| "grad_norm": 0.43881556176124087, |
| "learning_rate": 4.9603037625129445e-05, |
| "loss": 0.4475, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.32618825722274, |
| "grad_norm": 0.38991398427916507, |
| "learning_rate": 4.951674145667933e-05, |
| "loss": 0.461, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.33084808946877914, |
| "grad_norm": 0.47212015510204275, |
| "learning_rate": 4.94304452882292e-05, |
| "loss": 0.4472, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.33550792171481825, |
| "grad_norm": 0.44227904867586293, |
| "learning_rate": 4.934414911977908e-05, |
| "loss": 0.4448, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3401677539608574, |
| "grad_norm": 0.4466830759596917, |
| "learning_rate": 4.9257852951328965e-05, |
| "loss": 0.4412, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3448275862068966, |
| "grad_norm": 0.4019403755167571, |
| "learning_rate": 4.917155678287884e-05, |
| "loss": 0.4548, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3494874184529357, |
| "grad_norm": 0.46020164939301034, |
| "learning_rate": 4.908526061442872e-05, |
| "loss": 0.441, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.35414725069897485, |
| "grad_norm": 0.41640820047800664, |
| "learning_rate": 4.89989644459786e-05, |
| "loss": 0.456, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.35880708294501396, |
| "grad_norm": 0.43251075009831547, |
| "learning_rate": 4.891266827752848e-05, |
| "loss": 0.4575, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.3634669151910531, |
| "grad_norm": 0.49587902382827737, |
| "learning_rate": 4.882637210907836e-05, |
| "loss": 0.4417, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3681267474370923, |
| "grad_norm": 0.45767354851720066, |
| "learning_rate": 4.874007594062824e-05, |
| "loss": 0.4481, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.3727865796831314, |
| "grad_norm": 0.4109939412993998, |
| "learning_rate": 4.865377977217811e-05, |
| "loss": 0.4328, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.37744641192917056, |
| "grad_norm": 0.4267479303093156, |
| "learning_rate": 4.8567483603728e-05, |
| "loss": 0.4512, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.3821062441752097, |
| "grad_norm": 0.46022700390396754, |
| "learning_rate": 4.8481187435277875e-05, |
| "loss": 0.4473, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.38676607642124883, |
| "grad_norm": 0.5818788628854474, |
| "learning_rate": 4.839489126682776e-05, |
| "loss": 0.4338, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.391425908667288, |
| "grad_norm": 0.6348777601747888, |
| "learning_rate": 4.830859509837763e-05, |
| "loss": 0.4521, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.3960857409133271, |
| "grad_norm": 0.3406273073070538, |
| "learning_rate": 4.822229892992751e-05, |
| "loss": 0.4495, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.40074557315936626, |
| "grad_norm": 0.39490973933479806, |
| "learning_rate": 4.8136002761477395e-05, |
| "loss": 0.4364, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.40540540540540543, |
| "grad_norm": 0.5621814721849863, |
| "learning_rate": 4.804970659302727e-05, |
| "loss": 0.4497, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.41006523765144454, |
| "grad_norm": 0.5250828265047137, |
| "learning_rate": 4.796341042457715e-05, |
| "loss": 0.4438, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.4147250698974837, |
| "grad_norm": 0.49605694640709724, |
| "learning_rate": 4.787711425612703e-05, |
| "loss": 0.4384, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.4193849021435228, |
| "grad_norm": 0.49091569267750296, |
| "learning_rate": 4.779081808767691e-05, |
| "loss": 0.4292, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.424044734389562, |
| "grad_norm": 0.5449853039218947, |
| "learning_rate": 4.770452191922679e-05, |
| "loss": 0.4359, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.42870456663560114, |
| "grad_norm": 0.40595214751293, |
| "learning_rate": 4.761822575077667e-05, |
| "loss": 0.4327, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.43336439888164024, |
| "grad_norm": 0.4959979164579244, |
| "learning_rate": 4.753192958232654e-05, |
| "loss": 0.4297, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.4380242311276794, |
| "grad_norm": 0.42025573002718436, |
| "learning_rate": 4.744563341387643e-05, |
| "loss": 0.4426, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.4426840633737186, |
| "grad_norm": 0.4082690281031086, |
| "learning_rate": 4.7359337245426306e-05, |
| "loss": 0.432, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.4473438956197577, |
| "grad_norm": 0.46608619482702135, |
| "learning_rate": 4.7273041076976184e-05, |
| "loss": 0.4441, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.45200372786579684, |
| "grad_norm": 0.41782415569185494, |
| "learning_rate": 4.718674490852606e-05, |
| "loss": 0.4367, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.45666356011183595, |
| "grad_norm": 0.4159441336334524, |
| "learning_rate": 4.710044874007594e-05, |
| "loss": 0.431, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4613233923578751, |
| "grad_norm": 0.4267635887639151, |
| "learning_rate": 4.7014152571625826e-05, |
| "loss": 0.437, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4659832246039143, |
| "grad_norm": 0.536368733946403, |
| "learning_rate": 4.6927856403175704e-05, |
| "loss": 0.4398, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.4706430568499534, |
| "grad_norm": 0.38365822324922083, |
| "learning_rate": 4.684156023472558e-05, |
| "loss": 0.4264, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.47530288909599255, |
| "grad_norm": 0.49638181546901267, |
| "learning_rate": 4.675526406627546e-05, |
| "loss": 0.4349, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.47996272134203166, |
| "grad_norm": 0.6039980202672868, |
| "learning_rate": 4.666896789782534e-05, |
| "loss": 0.4329, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.4846225535880708, |
| "grad_norm": 0.531071639674577, |
| "learning_rate": 4.658267172937522e-05, |
| "loss": 0.4367, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.48928238583411, |
| "grad_norm": 0.41927620753314093, |
| "learning_rate": 4.64963755609251e-05, |
| "loss": 0.4356, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.4939422180801491, |
| "grad_norm": 0.49167935938181634, |
| "learning_rate": 4.641007939247497e-05, |
| "loss": 0.4324, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.49860205032618826, |
| "grad_norm": 0.35194515788154196, |
| "learning_rate": 4.632378322402486e-05, |
| "loss": 0.4327, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.5032618825722274, |
| "grad_norm": 0.3880164900857034, |
| "learning_rate": 4.6237487055574736e-05, |
| "loss": 0.4227, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5079217148182665, |
| "grad_norm": 0.4647026652109177, |
| "learning_rate": 4.6151190887124615e-05, |
| "loss": 0.4289, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.5125815470643057, |
| "grad_norm": 0.4369224822745491, |
| "learning_rate": 4.606489471867449e-05, |
| "loss": 0.4292, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5172413793103449, |
| "grad_norm": 0.3516777481993619, |
| "learning_rate": 4.597859855022437e-05, |
| "loss": 0.4236, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.5219012115563839, |
| "grad_norm": 0.4189980745198798, |
| "learning_rate": 4.589230238177425e-05, |
| "loss": 0.4442, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.5265610438024231, |
| "grad_norm": 0.4546880509441404, |
| "learning_rate": 4.5806006213324134e-05, |
| "loss": 0.4219, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.5312208760484622, |
| "grad_norm": 0.5415119247861921, |
| "learning_rate": 4.5719710044874006e-05, |
| "loss": 0.4218, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.5358807082945014, |
| "grad_norm": 0.4163996631718338, |
| "learning_rate": 4.563341387642389e-05, |
| "loss": 0.428, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5405405405405406, |
| "grad_norm": 0.3619146303574906, |
| "learning_rate": 4.554711770797377e-05, |
| "loss": 0.428, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.5452003727865797, |
| "grad_norm": 0.4528885219717091, |
| "learning_rate": 4.546082153952365e-05, |
| "loss": 0.4248, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.5498602050326188, |
| "grad_norm": 0.42455500708425176, |
| "learning_rate": 4.5374525371073526e-05, |
| "loss": 0.4212, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.554520037278658, |
| "grad_norm": 0.39895122252871523, |
| "learning_rate": 4.5288229202623404e-05, |
| "loss": 0.4233, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.5591798695246971, |
| "grad_norm": 0.3990992719666481, |
| "learning_rate": 4.520193303417328e-05, |
| "loss": 0.4236, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.5638397017707363, |
| "grad_norm": 0.3553585945932701, |
| "learning_rate": 4.511563686572317e-05, |
| "loss": 0.4223, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5684995340167754, |
| "grad_norm": 0.42046831338605023, |
| "learning_rate": 4.5029340697273045e-05, |
| "loss": 0.4334, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5731593662628145, |
| "grad_norm": 0.34391259032310467, |
| "learning_rate": 4.4943044528822923e-05, |
| "loss": 0.42, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5778191985088537, |
| "grad_norm": 0.4087272926784166, |
| "learning_rate": 4.48567483603728e-05, |
| "loss": 0.417, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5824790307548928, |
| "grad_norm": 0.3589614143705352, |
| "learning_rate": 4.477045219192268e-05, |
| "loss": 0.4387, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.587138863000932, |
| "grad_norm": 0.3629824785143128, |
| "learning_rate": 4.4684156023472565e-05, |
| "loss": 0.4294, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5917986952469712, |
| "grad_norm": 0.47917310854378126, |
| "learning_rate": 4.4597859855022436e-05, |
| "loss": 0.4339, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5964585274930102, |
| "grad_norm": 0.4782662682845663, |
| "learning_rate": 4.4511563686572315e-05, |
| "loss": 0.4247, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.6011183597390494, |
| "grad_norm": 0.471784972910924, |
| "learning_rate": 4.44252675181222e-05, |
| "loss": 0.4243, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.6057781919850885, |
| "grad_norm": 0.42122505647880576, |
| "learning_rate": 4.433897134967208e-05, |
| "loss": 0.433, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6104380242311277, |
| "grad_norm": 0.4025673330141692, |
| "learning_rate": 4.4252675181221956e-05, |
| "loss": 0.4306, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.6150978564771669, |
| "grad_norm": 0.33637741802274024, |
| "learning_rate": 4.4166379012771834e-05, |
| "loss": 0.4124, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.6197576887232059, |
| "grad_norm": 0.46827398668452175, |
| "learning_rate": 4.408008284432171e-05, |
| "loss": 0.4132, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.6244175209692451, |
| "grad_norm": 0.4372278275475368, |
| "learning_rate": 4.39937866758716e-05, |
| "loss": 0.4118, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.6290773532152842, |
| "grad_norm": 0.41420214852536175, |
| "learning_rate": 4.3907490507421476e-05, |
| "loss": 0.4204, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6337371854613234, |
| "grad_norm": 0.4227796669483225, |
| "learning_rate": 4.382119433897135e-05, |
| "loss": 0.4233, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.6383970177073626, |
| "grad_norm": 0.42862178932562206, |
| "learning_rate": 4.373489817052123e-05, |
| "loss": 0.4329, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.6430568499534017, |
| "grad_norm": 0.4382533539684448, |
| "learning_rate": 4.364860200207111e-05, |
| "loss": 0.4151, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.6477166821994408, |
| "grad_norm": 0.35948786609504135, |
| "learning_rate": 4.356230583362099e-05, |
| "loss": 0.4202, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.65237651444548, |
| "grad_norm": 0.35194142984278265, |
| "learning_rate": 4.347600966517087e-05, |
| "loss": 0.4288, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.6570363466915191, |
| "grad_norm": 0.4108441539595895, |
| "learning_rate": 4.3389713496720745e-05, |
| "loss": 0.4223, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.6616961789375583, |
| "grad_norm": 0.48807209838383736, |
| "learning_rate": 4.330341732827063e-05, |
| "loss": 0.4229, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.6663560111835974, |
| "grad_norm": 0.4177025446331477, |
| "learning_rate": 4.321712115982051e-05, |
| "loss": 0.4114, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.6710158434296365, |
| "grad_norm": 0.3755176399576921, |
| "learning_rate": 4.3130824991370387e-05, |
| "loss": 0.4217, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.6756756756756757, |
| "grad_norm": 0.36461380313474906, |
| "learning_rate": 4.3044528822920265e-05, |
| "loss": 0.4205, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6803355079217148, |
| "grad_norm": 0.4251420641738401, |
| "learning_rate": 4.295823265447014e-05, |
| "loss": 0.4153, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.684995340167754, |
| "grad_norm": 0.4003093585866478, |
| "learning_rate": 4.287193648602002e-05, |
| "loss": 0.4193, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6896551724137931, |
| "grad_norm": 0.34876369977589905, |
| "learning_rate": 4.27856403175699e-05, |
| "loss": 0.4129, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6943150046598322, |
| "grad_norm": 0.3707569691409881, |
| "learning_rate": 4.269934414911978e-05, |
| "loss": 0.4087, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6989748369058714, |
| "grad_norm": 0.4316626941569482, |
| "learning_rate": 4.261304798066966e-05, |
| "loss": 0.4135, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.7036346691519105, |
| "grad_norm": 0.39303618105943594, |
| "learning_rate": 4.252675181221954e-05, |
| "loss": 0.4159, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.7082945013979497, |
| "grad_norm": 0.4267487540956833, |
| "learning_rate": 4.244045564376942e-05, |
| "loss": 0.419, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.7129543336439889, |
| "grad_norm": 0.3664167792787496, |
| "learning_rate": 4.23541594753193e-05, |
| "loss": 0.403, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.7176141658900279, |
| "grad_norm": 0.3577500899585447, |
| "learning_rate": 4.2267863306869176e-05, |
| "loss": 0.4122, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.7222739981360671, |
| "grad_norm": 0.39507959012374977, |
| "learning_rate": 4.2181567138419054e-05, |
| "loss": 0.4249, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7269338303821062, |
| "grad_norm": 0.3688889563306457, |
| "learning_rate": 4.209527096996894e-05, |
| "loss": 0.4094, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.7315936626281454, |
| "grad_norm": 0.3846331698540081, |
| "learning_rate": 4.200897480151881e-05, |
| "loss": 0.4167, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.7362534948741846, |
| "grad_norm": 0.43990146629985694, |
| "learning_rate": 4.1922678633068695e-05, |
| "loss": 0.42, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.7409133271202236, |
| "grad_norm": 0.4264573932403777, |
| "learning_rate": 4.1836382464618573e-05, |
| "loss": 0.4142, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.7455731593662628, |
| "grad_norm": 0.43741863957364285, |
| "learning_rate": 4.175008629616845e-05, |
| "loss": 0.4124, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.750232991612302, |
| "grad_norm": 0.38911299384649556, |
| "learning_rate": 4.166379012771833e-05, |
| "loss": 0.4122, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.7548928238583411, |
| "grad_norm": 0.34798977131970454, |
| "learning_rate": 4.157749395926821e-05, |
| "loss": 0.4304, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.7595526561043803, |
| "grad_norm": 0.360973732965553, |
| "learning_rate": 4.1491197790818086e-05, |
| "loss": 0.4048, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.7642124883504194, |
| "grad_norm": 0.5342748114831669, |
| "learning_rate": 4.140490162236797e-05, |
| "loss": 0.4202, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.7688723205964585, |
| "grad_norm": 0.4164036109796091, |
| "learning_rate": 4.131860545391785e-05, |
| "loss": 0.4124, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.7735321528424977, |
| "grad_norm": 0.3661344541633312, |
| "learning_rate": 4.123230928546773e-05, |
| "loss": 0.4071, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.7781919850885368, |
| "grad_norm": 0.3754811730264276, |
| "learning_rate": 4.1146013117017606e-05, |
| "loss": 0.4142, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.782851817334576, |
| "grad_norm": 0.419200409043472, |
| "learning_rate": 4.1059716948567484e-05, |
| "loss": 0.4173, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.7875116495806151, |
| "grad_norm": 0.43893714450759036, |
| "learning_rate": 4.097342078011737e-05, |
| "loss": 0.4253, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7921714818266542, |
| "grad_norm": 0.41322614598162755, |
| "learning_rate": 4.088712461166724e-05, |
| "loss": 0.4151, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7968313140726934, |
| "grad_norm": 0.5117391056019034, |
| "learning_rate": 4.080082844321712e-05, |
| "loss": 0.417, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.8014911463187325, |
| "grad_norm": 0.39482750255468374, |
| "learning_rate": 4.0714532274767004e-05, |
| "loss": 0.4239, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.8061509785647717, |
| "grad_norm": 0.35016978679527583, |
| "learning_rate": 4.062823610631688e-05, |
| "loss": 0.405, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.8108108108108109, |
| "grad_norm": 0.32094903787924467, |
| "learning_rate": 4.054193993786676e-05, |
| "loss": 0.4194, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.8154706430568499, |
| "grad_norm": 0.36889819669973173, |
| "learning_rate": 4.045564376941664e-05, |
| "loss": 0.4149, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8201304753028891, |
| "grad_norm": 0.3880605400422551, |
| "learning_rate": 4.036934760096652e-05, |
| "loss": 0.4059, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.8247903075489282, |
| "grad_norm": 0.377773246265045, |
| "learning_rate": 4.02830514325164e-05, |
| "loss": 0.42, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.8294501397949674, |
| "grad_norm": 0.38824868030619064, |
| "learning_rate": 4.019675526406628e-05, |
| "loss": 0.4067, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.8341099720410066, |
| "grad_norm": 0.34547566635591975, |
| "learning_rate": 4.011045909561615e-05, |
| "loss": 0.4261, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.8387698042870456, |
| "grad_norm": 0.3630734939739242, |
| "learning_rate": 4.0024162927166037e-05, |
| "loss": 0.409, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.8434296365330848, |
| "grad_norm": 0.3215149282810052, |
| "learning_rate": 3.9937866758715915e-05, |
| "loss": 0.4105, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.848089468779124, |
| "grad_norm": 0.5156466838457244, |
| "learning_rate": 3.98515705902658e-05, |
| "loss": 0.4167, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.8527493010251631, |
| "grad_norm": 0.58719622036636, |
| "learning_rate": 3.976527442181567e-05, |
| "loss": 0.4094, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.8574091332712023, |
| "grad_norm": 0.44663910672199086, |
| "learning_rate": 3.967897825336555e-05, |
| "loss": 0.4158, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 0.3593752942361397, |
| "learning_rate": 3.9592682084915434e-05, |
| "loss": 0.4024, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.8667287977632805, |
| "grad_norm": 0.5114184723138864, |
| "learning_rate": 3.950638591646531e-05, |
| "loss": 0.402, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.8713886300093197, |
| "grad_norm": 0.3926412761807418, |
| "learning_rate": 3.942008974801519e-05, |
| "loss": 0.4139, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.8760484622553588, |
| "grad_norm": 0.3111102224238461, |
| "learning_rate": 3.933379357956507e-05, |
| "loss": 0.4095, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.880708294501398, |
| "grad_norm": 0.42810427582045707, |
| "learning_rate": 3.924749741111495e-05, |
| "loss": 0.4117, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.8853681267474371, |
| "grad_norm": 0.48569011616159047, |
| "learning_rate": 3.916120124266483e-05, |
| "loss": 0.4195, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.8900279589934762, |
| "grad_norm": 0.4190117006397043, |
| "learning_rate": 3.9074905074214704e-05, |
| "loss": 0.4184, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.8946877912395154, |
| "grad_norm": 0.33914591782346504, |
| "learning_rate": 3.898860890576458e-05, |
| "loss": 0.4137, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.8993476234855545, |
| "grad_norm": 0.43035587029435407, |
| "learning_rate": 3.890231273731447e-05, |
| "loss": 0.4053, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.9040074557315937, |
| "grad_norm": 0.3101483588886324, |
| "learning_rate": 3.8816016568864345e-05, |
| "loss": 0.4031, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.9086672879776329, |
| "grad_norm": 0.3322568146796224, |
| "learning_rate": 3.8729720400414224e-05, |
| "loss": 0.422, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9133271202236719, |
| "grad_norm": 0.3917813978291434, |
| "learning_rate": 3.86434242319641e-05, |
| "loss": 0.4091, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.9179869524697111, |
| "grad_norm": 0.4742078695343531, |
| "learning_rate": 3.855712806351398e-05, |
| "loss": 0.4103, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.9226467847157502, |
| "grad_norm": 0.45678911380863857, |
| "learning_rate": 3.8470831895063865e-05, |
| "loss": 0.4079, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.9273066169617894, |
| "grad_norm": 0.4015797894562155, |
| "learning_rate": 3.838453572661374e-05, |
| "loss": 0.4036, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.9319664492078286, |
| "grad_norm": 0.4393074039612925, |
| "learning_rate": 3.8298239558163615e-05, |
| "loss": 0.4066, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9366262814538676, |
| "grad_norm": 0.401959095434801, |
| "learning_rate": 3.82119433897135e-05, |
| "loss": 0.4113, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.9412861136999068, |
| "grad_norm": 0.38718845936575524, |
| "learning_rate": 3.812564722126338e-05, |
| "loss": 0.4042, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.9459459459459459, |
| "grad_norm": 0.40104704083420933, |
| "learning_rate": 3.8039351052813256e-05, |
| "loss": 0.4126, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.9506057781919851, |
| "grad_norm": 0.4415733445950018, |
| "learning_rate": 3.7953054884363134e-05, |
| "loss": 0.4063, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.9552656104380243, |
| "grad_norm": 0.37032743287501774, |
| "learning_rate": 3.786675871591301e-05, |
| "loss": 0.41, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.9599254426840633, |
| "grad_norm": 0.3024002245710326, |
| "learning_rate": 3.77804625474629e-05, |
| "loss": 0.4151, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.9645852749301025, |
| "grad_norm": 0.43466697925061876, |
| "learning_rate": 3.7694166379012776e-05, |
| "loss": 0.4074, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.9692451071761417, |
| "grad_norm": 0.4045497692524809, |
| "learning_rate": 3.7607870210562654e-05, |
| "loss": 0.4051, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.9739049394221808, |
| "grad_norm": 0.2870249098276766, |
| "learning_rate": 3.752157404211253e-05, |
| "loss": 0.3975, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.97856477166822, |
| "grad_norm": 0.44417765948538723, |
| "learning_rate": 3.743527787366241e-05, |
| "loss": 0.4273, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.983224603914259, |
| "grad_norm": 0.3866227301554364, |
| "learning_rate": 3.734898170521229e-05, |
| "loss": 0.4049, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.9878844361602982, |
| "grad_norm": 0.3834591341930073, |
| "learning_rate": 3.7262685536762174e-05, |
| "loss": 0.4041, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.9925442684063374, |
| "grad_norm": 0.3608189448212277, |
| "learning_rate": 3.7176389368312045e-05, |
| "loss": 0.4082, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.9972041006523765, |
| "grad_norm": 0.4677480050799797, |
| "learning_rate": 3.709009319986193e-05, |
| "loss": 0.4086, |
| "step": 1070 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 3219, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.186429923093381e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|