| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 1073, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0046641791044776115, |
| "grad_norm": 1.9791202613768635, |
| "learning_rate": 4.6296296296296296e-06, |
| "loss": 0.8462, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.009328358208955223, |
| "grad_norm": 1.4305500674160407, |
| "learning_rate": 9.259259259259259e-06, |
| "loss": 0.8236, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013992537313432836, |
| "grad_norm": 1.1206530311176968, |
| "learning_rate": 1.388888888888889e-05, |
| "loss": 0.7682, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.018656716417910446, |
| "grad_norm": 0.6720846360747776, |
| "learning_rate": 1.8518518518518518e-05, |
| "loss": 0.7273, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.02332089552238806, |
| "grad_norm": 0.5048504624958363, |
| "learning_rate": 2.314814814814815e-05, |
| "loss": 0.6963, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.027985074626865673, |
| "grad_norm": 0.459675485239456, |
| "learning_rate": 2.777777777777778e-05, |
| "loss": 0.6737, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03264925373134328, |
| "grad_norm": 0.4155808328686281, |
| "learning_rate": 3.240740740740741e-05, |
| "loss": 0.6448, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.03731343283582089, |
| "grad_norm": 0.42045736753620744, |
| "learning_rate": 3.7037037037037037e-05, |
| "loss": 0.6378, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04197761194029851, |
| "grad_norm": 0.40302027504447946, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 0.6333, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.04664179104477612, |
| "grad_norm": 0.3893242949214179, |
| "learning_rate": 4.62962962962963e-05, |
| "loss": 0.6148, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.051305970149253734, |
| "grad_norm": 0.399280608624726, |
| "learning_rate": 4.999989285883431e-05, |
| "loss": 0.6341, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.055970149253731345, |
| "grad_norm": 0.42810480822035357, |
| "learning_rate": 4.999614302517356e-05, |
| "loss": 0.6272, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06063432835820896, |
| "grad_norm": 0.39853587993512773, |
| "learning_rate": 4.99870371535606e-05, |
| "loss": 0.6059, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.06529850746268656, |
| "grad_norm": 0.3862224380592072, |
| "learning_rate": 4.997257741198456e-05, |
| "loss": 0.635, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06996268656716417, |
| "grad_norm": 0.4763664268940608, |
| "learning_rate": 4.9952767243121146e-05, |
| "loss": 0.6122, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.07462686567164178, |
| "grad_norm": 0.4964064315332045, |
| "learning_rate": 4.992761136351291e-05, |
| "loss": 0.6076, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07929104477611941, |
| "grad_norm": 0.5168599776716694, |
| "learning_rate": 4.989711576244639e-05, |
| "loss": 0.6, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.08395522388059702, |
| "grad_norm": 0.41578311172819316, |
| "learning_rate": 4.986128770052603e-05, |
| "loss": 0.5894, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08861940298507463, |
| "grad_norm": 0.4718528588737532, |
| "learning_rate": 4.9820135707945634e-05, |
| "loss": 0.5914, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.09328358208955224, |
| "grad_norm": 0.49638008772305653, |
| "learning_rate": 4.9773669582457364e-05, |
| "loss": 0.6045, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.09794776119402986, |
| "grad_norm": 0.44140653927951257, |
| "learning_rate": 4.972190038703905e-05, |
| "loss": 0.6098, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.10261194029850747, |
| "grad_norm": 0.42118133939437635, |
| "learning_rate": 4.966484044726024e-05, |
| "loss": 0.5969, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.10727611940298508, |
| "grad_norm": 0.45053532317430645, |
| "learning_rate": 4.9602503348347625e-05, |
| "loss": 0.5874, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.11194029850746269, |
| "grad_norm": 0.4793106564516387, |
| "learning_rate": 4.953490393195063e-05, |
| "loss": 0.5948, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.1166044776119403, |
| "grad_norm": 0.4880433070764506, |
| "learning_rate": 4.9462058292607735e-05, |
| "loss": 0.5921, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.12126865671641791, |
| "grad_norm": 0.4235522421922073, |
| "learning_rate": 4.938398377391461e-05, |
| "loss": 0.5849, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.1259328358208955, |
| "grad_norm": 0.399275985838528, |
| "learning_rate": 4.930069896439485e-05, |
| "loss": 0.5846, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.13059701492537312, |
| "grad_norm": 0.5344724244547288, |
| "learning_rate": 4.921222369307427e-05, |
| "loss": 0.5867, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.13526119402985073, |
| "grad_norm": 0.4368309623159553, |
| "learning_rate": 4.9118579024759854e-05, |
| "loss": 0.5891, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.13992537313432835, |
| "grad_norm": 0.471415466306611, |
| "learning_rate": 4.901978725502454e-05, |
| "loss": 0.5729, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.14458955223880596, |
| "grad_norm": 0.42978553492277366, |
| "learning_rate": 4.891587190489891e-05, |
| "loss": 0.5775, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.14925373134328357, |
| "grad_norm": 0.42206017886782643, |
| "learning_rate": 4.880685771527114e-05, |
| "loss": 0.5853, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.15391791044776118, |
| "grad_norm": 0.3845543469543206, |
| "learning_rate": 4.869277064099654e-05, |
| "loss": 0.5811, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.15858208955223882, |
| "grad_norm": 0.41035761845429175, |
| "learning_rate": 4.8573637844718e-05, |
| "loss": 0.5916, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.16324626865671643, |
| "grad_norm": 0.4026284324321276, |
| "learning_rate": 4.844948769039896e-05, |
| "loss": 0.5857, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.16791044776119404, |
| "grad_norm": 0.3918826778611494, |
| "learning_rate": 4.83203497365703e-05, |
| "loss": 0.5812, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.17257462686567165, |
| "grad_norm": 0.39449144634351974, |
| "learning_rate": 4.818625472929286e-05, |
| "loss": 0.5741, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.17723880597014927, |
| "grad_norm": 0.3478654496926465, |
| "learning_rate": 4.8047234594837143e-05, |
| "loss": 0.5677, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.18190298507462688, |
| "grad_norm": 0.42972864545531936, |
| "learning_rate": 4.7903322432082185e-05, |
| "loss": 0.568, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1865671641791045, |
| "grad_norm": 0.36762920021108547, |
| "learning_rate": 4.775455250463507e-05, |
| "loss": 0.5678, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.1912313432835821, |
| "grad_norm": 0.41776776027606394, |
| "learning_rate": 4.760096023267322e-05, |
| "loss": 0.5815, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1958955223880597, |
| "grad_norm": 0.3651361955796744, |
| "learning_rate": 4.744258218451135e-05, |
| "loss": 0.5732, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.20055970149253732, |
| "grad_norm": 0.507956038275049, |
| "learning_rate": 4.7279456067895e-05, |
| "loss": 0.5708, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.20522388059701493, |
| "grad_norm": 0.38678146760762727, |
| "learning_rate": 4.71116207210228e-05, |
| "loss": 0.5667, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.20988805970149255, |
| "grad_norm": 0.3746088635815425, |
| "learning_rate": 4.6939116103299655e-05, |
| "loss": 0.5594, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.21455223880597016, |
| "grad_norm": 0.4210888636990193, |
| "learning_rate": 4.676198328582288e-05, |
| "loss": 0.5684, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.21921641791044777, |
| "grad_norm": 0.37863146424275174, |
| "learning_rate": 4.6580264441603724e-05, |
| "loss": 0.5754, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.22388059701492538, |
| "grad_norm": 0.3812578385049777, |
| "learning_rate": 4.6394002835526535e-05, |
| "loss": 0.5745, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.228544776119403, |
| "grad_norm": 0.3755525320196136, |
| "learning_rate": 4.6203242814047946e-05, |
| "loss": 0.5594, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.2332089552238806, |
| "grad_norm": 0.35892802480588903, |
| "learning_rate": 4.6008029794638596e-05, |
| "loss": 0.5598, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.23787313432835822, |
| "grad_norm": 0.34690780233039625, |
| "learning_rate": 4.580841025496974e-05, |
| "loss": 0.5574, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.24253731343283583, |
| "grad_norm": 0.34148169877209095, |
| "learning_rate": 4.560443172184763e-05, |
| "loss": 0.5667, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.24720149253731344, |
| "grad_norm": 0.32369902312164034, |
| "learning_rate": 4.539614275989793e-05, |
| "loss": 0.5551, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.251865671641791, |
| "grad_norm": 0.33514891723560625, |
| "learning_rate": 4.5183592960003104e-05, |
| "loss": 0.5539, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.25652985074626866, |
| "grad_norm": 0.4073054681680018, |
| "learning_rate": 4.496683292749555e-05, |
| "loss": 0.5583, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.26119402985074625, |
| "grad_norm": 0.39594817481956696, |
| "learning_rate": 4.4745914270109055e-05, |
| "loss": 0.5616, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2658582089552239, |
| "grad_norm": 0.3005600705660207, |
| "learning_rate": 4.4520889585691705e-05, |
| "loss": 0.5669, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.27052238805970147, |
| "grad_norm": 0.32063856721430456, |
| "learning_rate": 4.429181244968301e-05, |
| "loss": 0.551, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2751865671641791, |
| "grad_norm": 0.29805045488753207, |
| "learning_rate": 4.4058737402358295e-05, |
| "loss": 0.5452, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.2798507462686567, |
| "grad_norm": 0.38405299320875, |
| "learning_rate": 4.38217199358434e-05, |
| "loss": 0.5612, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.28451492537313433, |
| "grad_norm": 0.33664016303790095, |
| "learning_rate": 4.3580816480902656e-05, |
| "loss": 0.5454, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.2891791044776119, |
| "grad_norm": 0.3342664338536646, |
| "learning_rate": 4.3336084393503545e-05, |
| "loss": 0.5538, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.29384328358208955, |
| "grad_norm": 0.35017572756661175, |
| "learning_rate": 4.308758194116094e-05, |
| "loss": 0.5565, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.29850746268656714, |
| "grad_norm": 0.40426731913580016, |
| "learning_rate": 4.283536828906436e-05, |
| "loss": 0.5679, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.3031716417910448, |
| "grad_norm": 0.36961111936209573, |
| "learning_rate": 4.2579503485991567e-05, |
| "loss": 0.5523, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.30783582089552236, |
| "grad_norm": 0.37687261195329114, |
| "learning_rate": 4.2320048450011684e-05, |
| "loss": 0.5591, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.3125, |
| "grad_norm": 0.4162442205962276, |
| "learning_rate": 4.205706495398143e-05, |
| "loss": 0.5547, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.31716417910447764, |
| "grad_norm": 0.34051096681409126, |
| "learning_rate": 4.179061561083777e-05, |
| "loss": 0.5406, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.3218283582089552, |
| "grad_norm": 0.3420797027133927, |
| "learning_rate": 4.1520763858690644e-05, |
| "loss": 0.5553, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.32649253731343286, |
| "grad_norm": 0.3352653830273245, |
| "learning_rate": 4.124757394571914e-05, |
| "loss": 0.5517, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.33115671641791045, |
| "grad_norm": 0.3248618140478854, |
| "learning_rate": 4.097111091487486e-05, |
| "loss": 0.5455, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.3358208955223881, |
| "grad_norm": 0.38398590733518817, |
| "learning_rate": 4.069144058839605e-05, |
| "loss": 0.5631, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.34048507462686567, |
| "grad_norm": 0.328922480165823, |
| "learning_rate": 4.040862955213615e-05, |
| "loss": 0.5571, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3451492537313433, |
| "grad_norm": 0.3202207126152803, |
| "learning_rate": 4.012274513971061e-05, |
| "loss": 0.5592, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3498134328358209, |
| "grad_norm": 0.3498933076665203, |
| "learning_rate": 3.9833855416465624e-05, |
| "loss": 0.5468, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.35447761194029853, |
| "grad_norm": 0.3457169453229565, |
| "learning_rate": 3.954202916327264e-05, |
| "loss": 0.5458, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.3591417910447761, |
| "grad_norm": 0.3049103435082426, |
| "learning_rate": 3.924733586015257e-05, |
| "loss": 0.5443, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.36380597014925375, |
| "grad_norm": 0.30856535885379005, |
| "learning_rate": 3.894984566973346e-05, |
| "loss": 0.5477, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.36847014925373134, |
| "grad_norm": 0.3344038208963223, |
| "learning_rate": 3.864962942054572e-05, |
| "loss": 0.5543, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.373134328358209, |
| "grad_norm": 0.32351896377380496, |
| "learning_rate": 3.834675859015876e-05, |
| "loss": 0.546, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.37779850746268656, |
| "grad_norm": 0.32892338756590944, |
| "learning_rate": 3.804130528816312e-05, |
| "loss": 0.5486, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.3824626865671642, |
| "grad_norm": 0.3199167315472929, |
| "learning_rate": 3.77333422390021e-05, |
| "loss": 0.534, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.3871268656716418, |
| "grad_norm": 0.2986495023865257, |
| "learning_rate": 3.7422942764657054e-05, |
| "loss": 0.5454, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.3917910447761194, |
| "grad_norm": 0.33847840678383695, |
| "learning_rate": 3.711018076719034e-05, |
| "loss": 0.5505, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.396455223880597, |
| "grad_norm": 0.32240507890071096, |
| "learning_rate": 3.679513071115025e-05, |
| "loss": 0.5537, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.40111940298507465, |
| "grad_norm": 0.31943339464931536, |
| "learning_rate": 3.647786760584194e-05, |
| "loss": 0.5508, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.40578358208955223, |
| "grad_norm": 0.3297548396557319, |
| "learning_rate": 3.615846698746869e-05, |
| "loss": 0.5544, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.41044776119402987, |
| "grad_norm": 0.32095008956501014, |
| "learning_rate": 3.583700490114776e-05, |
| "loss": 0.5468, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.41511194029850745, |
| "grad_norm": 0.2954412311074952, |
| "learning_rate": 3.5513557882805e-05, |
| "loss": 0.5451, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.4197761194029851, |
| "grad_norm": 0.35657724937539037, |
| "learning_rate": 3.518820294095267e-05, |
| "loss": 0.5477, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.4244402985074627, |
| "grad_norm": 0.32588100741949194, |
| "learning_rate": 3.486101753835468e-05, |
| "loss": 0.5405, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.4291044776119403, |
| "grad_norm": 0.31836225652258193, |
| "learning_rate": 3.453207957358377e-05, |
| "loss": 0.5364, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.4337686567164179, |
| "grad_norm": 0.2667312497141442, |
| "learning_rate": 3.420146736247487e-05, |
| "loss": 0.5273, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.43843283582089554, |
| "grad_norm": 0.32043847430305067, |
| "learning_rate": 3.386925961947906e-05, |
| "loss": 0.5513, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.4430970149253731, |
| "grad_norm": 0.32874167757362127, |
| "learning_rate": 3.353553543892277e-05, |
| "loss": 0.5507, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.44776119402985076, |
| "grad_norm": 0.33258043796881387, |
| "learning_rate": 3.320037427617639e-05, |
| "loss": 0.5427, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.45242537313432835, |
| "grad_norm": 0.3020542500158025, |
| "learning_rate": 3.2863855928737026e-05, |
| "loss": 0.5375, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.457089552238806, |
| "grad_norm": 0.2999989976107512, |
| "learning_rate": 3.252606051722972e-05, |
| "loss": 0.552, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.46175373134328357, |
| "grad_norm": 0.3135899658936416, |
| "learning_rate": 3.218706846633183e-05, |
| "loss": 0.5361, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.4664179104477612, |
| "grad_norm": 0.31783999641909394, |
| "learning_rate": 3.1846960485624886e-05, |
| "loss": 0.5321, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.4710820895522388, |
| "grad_norm": 0.3140003079271038, |
| "learning_rate": 3.150581755037877e-05, |
| "loss": 0.5444, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.47574626865671643, |
| "grad_norm": 0.2851153733261266, |
| "learning_rate": 3.1163720882272516e-05, |
| "loss": 0.5391, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.480410447761194, |
| "grad_norm": 0.28512732601606067, |
| "learning_rate": 3.08207519300565e-05, |
| "loss": 0.5379, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.48507462686567165, |
| "grad_norm": 0.2823127624665095, |
| "learning_rate": 3.047699235016056e-05, |
| "loss": 0.5294, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.48973880597014924, |
| "grad_norm": 0.32198571530210485, |
| "learning_rate": 3.0132523987252658e-05, |
| "loss": 0.5327, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.4944029850746269, |
| "grad_norm": 0.287820704906656, |
| "learning_rate": 2.9787428854752736e-05, |
| "loss": 0.5229, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.49906716417910446, |
| "grad_norm": 0.2972454504515111, |
| "learning_rate": 2.9441789115306402e-05, |
| "loss": 0.5444, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.503731343283582, |
| "grad_norm": 0.29514050211964044, |
| "learning_rate": 2.9095687061223058e-05, |
| "loss": 0.5405, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5083955223880597, |
| "grad_norm": 0.3223030195906274, |
| "learning_rate": 2.874920509488319e-05, |
| "loss": 0.5436, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.5130597014925373, |
| "grad_norm": 0.331237041377847, |
| "learning_rate": 2.8402425709119435e-05, |
| "loss": 0.5386, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5177238805970149, |
| "grad_norm": 0.28692038439269246, |
| "learning_rate": 2.8055431467576106e-05, |
| "loss": 0.5366, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.5223880597014925, |
| "grad_norm": 0.30635958523804346, |
| "learning_rate": 2.7708304985051868e-05, |
| "loss": 0.54, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.5270522388059702, |
| "grad_norm": 0.31895459933403253, |
| "learning_rate": 2.7361128907830253e-05, |
| "loss": 0.5309, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.5317164179104478, |
| "grad_norm": 0.28189697192197444, |
| "learning_rate": 2.7013985894002623e-05, |
| "loss": 0.5214, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.5363805970149254, |
| "grad_norm": 0.28815634636872006, |
| "learning_rate": 2.6666958593788405e-05, |
| "loss": 0.5364, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5410447761194029, |
| "grad_norm": 0.29779272601870393, |
| "learning_rate": 2.6320129629857093e-05, |
| "loss": 0.5409, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.5457089552238806, |
| "grad_norm": 0.2736771548372716, |
| "learning_rate": 2.597358157765692e-05, |
| "loss": 0.5287, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.5503731343283582, |
| "grad_norm": 0.2610666235288935, |
| "learning_rate": 2.56273969457547e-05, |
| "loss": 0.5261, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.5550373134328358, |
| "grad_norm": 0.30003927928430624, |
| "learning_rate": 2.528165815619162e-05, |
| "loss": 0.5361, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.5597014925373134, |
| "grad_norm": 0.27184633082761, |
| "learning_rate": 2.4936447524859625e-05, |
| "loss": 0.5272, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.5643656716417911, |
| "grad_norm": 0.2673745453598303, |
| "learning_rate": 2.459184724190308e-05, |
| "loss": 0.5195, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5690298507462687, |
| "grad_norm": 0.30502816322055604, |
| "learning_rate": 2.4247939352150386e-05, |
| "loss": 0.5339, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5736940298507462, |
| "grad_norm": 0.2915347636882713, |
| "learning_rate": 2.390480573558012e-05, |
| "loss": 0.5404, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5783582089552238, |
| "grad_norm": 0.3009151953810016, |
| "learning_rate": 2.3562528087826573e-05, |
| "loss": 0.5251, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5830223880597015, |
| "grad_norm": 0.273216079030479, |
| "learning_rate": 2.3221187900729003e-05, |
| "loss": 0.5289, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.5876865671641791, |
| "grad_norm": 0.2814635050429347, |
| "learning_rate": 2.2880866442929544e-05, |
| "loss": 0.5304, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5923507462686567, |
| "grad_norm": 0.2671887715565127, |
| "learning_rate": 2.254164474052416e-05, |
| "loss": 0.5336, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5970149253731343, |
| "grad_norm": 0.2813730919512758, |
| "learning_rate": 2.2203603557771447e-05, |
| "loss": 0.5179, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.601679104477612, |
| "grad_norm": 0.27596121024924675, |
| "learning_rate": 2.186682337786365e-05, |
| "loss": 0.53, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.6063432835820896, |
| "grad_norm": 0.2799192543432165, |
| "learning_rate": 2.153138438376473e-05, |
| "loss": 0.5177, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6110074626865671, |
| "grad_norm": 0.2885695286696909, |
| "learning_rate": 2.119736643911979e-05, |
| "loss": 0.5266, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.6156716417910447, |
| "grad_norm": 0.2744442740400871, |
| "learning_rate": 2.0864849069240645e-05, |
| "loss": 0.5304, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.6203358208955224, |
| "grad_norm": 0.26479139181117806, |
| "learning_rate": 2.0533911442171805e-05, |
| "loss": 0.5265, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 0.29307727200532646, |
| "learning_rate": 2.0204632349841667e-05, |
| "loss": 0.5237, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.6296641791044776, |
| "grad_norm": 0.26780149637777595, |
| "learning_rate": 1.9877090189303182e-05, |
| "loss": 0.531, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6343283582089553, |
| "grad_norm": 0.2724139697039071, |
| "learning_rate": 1.9551362944068462e-05, |
| "loss": 0.5237, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.6389925373134329, |
| "grad_norm": 0.2603052625446588, |
| "learning_rate": 1.922752816554204e-05, |
| "loss": 0.5234, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.6436567164179104, |
| "grad_norm": 0.26133005941810766, |
| "learning_rate": 1.890566295455678e-05, |
| "loss": 0.5174, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.648320895522388, |
| "grad_norm": 0.24984442702607385, |
| "learning_rate": 1.858584394301728e-05, |
| "loss": 0.5215, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.6529850746268657, |
| "grad_norm": 0.28057878918319706, |
| "learning_rate": 1.8268147275654707e-05, |
| "loss": 0.5386, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.6576492537313433, |
| "grad_norm": 0.2481915917558959, |
| "learning_rate": 1.7952648591897858e-05, |
| "loss": 0.5212, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.6623134328358209, |
| "grad_norm": 0.2908975085954642, |
| "learning_rate": 1.7639423007864252e-05, |
| "loss": 0.5132, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.6669776119402985, |
| "grad_norm": 0.24930505384787335, |
| "learning_rate": 1.7328545098476106e-05, |
| "loss": 0.5167, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.6716417910447762, |
| "grad_norm": 0.26823214875899426, |
| "learning_rate": 1.702008887970491e-05, |
| "loss": 0.5191, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.6763059701492538, |
| "grad_norm": 0.2613619981319557, |
| "learning_rate": 1.671412779094926e-05, |
| "loss": 0.5229, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6809701492537313, |
| "grad_norm": 0.2524866364696518, |
| "learning_rate": 1.6410734677549872e-05, |
| "loss": 0.5191, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.6856343283582089, |
| "grad_norm": 0.2754948559566439, |
| "learning_rate": 1.6109981773446036e-05, |
| "loss": 0.5204, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.6902985074626866, |
| "grad_norm": 0.2473197406930695, |
| "learning_rate": 1.58119406839777e-05, |
| "loss": 0.5188, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6949626865671642, |
| "grad_norm": 0.24213175335701867, |
| "learning_rate": 1.5516682368837133e-05, |
| "loss": 0.5142, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6996268656716418, |
| "grad_norm": 0.2760399820439403, |
| "learning_rate": 1.5224277125174388e-05, |
| "loss": 0.5307, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.7042910447761194, |
| "grad_norm": 0.265171148488907, |
| "learning_rate": 1.4934794570860416e-05, |
| "loss": 0.5314, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.7089552238805971, |
| "grad_norm": 0.2636748634412273, |
| "learning_rate": 1.464830362791204e-05, |
| "loss": 0.5178, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.7136194029850746, |
| "grad_norm": 0.26114188558324736, |
| "learning_rate": 1.4364872506082425e-05, |
| "loss": 0.5255, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.7182835820895522, |
| "grad_norm": 0.27463450094575076, |
| "learning_rate": 1.4084568686621314e-05, |
| "loss": 0.5286, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.7229477611940298, |
| "grad_norm": 0.2627758464521469, |
| "learning_rate": 1.3807458906208546e-05, |
| "loss": 0.5355, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7276119402985075, |
| "grad_norm": 0.2535387674795409, |
| "learning_rate": 1.3533609141065008e-05, |
| "loss": 0.5148, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.7322761194029851, |
| "grad_norm": 0.25143264464640164, |
| "learning_rate": 1.326308459124447e-05, |
| "loss": 0.518, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.7369402985074627, |
| "grad_norm": 0.24678458633586137, |
| "learning_rate": 1.299594966511038e-05, |
| "loss": 0.5265, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.7416044776119403, |
| "grad_norm": 0.25397199889705413, |
| "learning_rate": 1.2732267964001033e-05, |
| "loss": 0.5147, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.746268656716418, |
| "grad_norm": 0.2612625440498218, |
| "learning_rate": 1.2472102267086904e-05, |
| "loss": 0.5212, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.7509328358208955, |
| "grad_norm": 0.24164108913385374, |
| "learning_rate": 1.2215514516423813e-05, |
| "loss": 0.5352, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.7555970149253731, |
| "grad_norm": 0.2560594323046853, |
| "learning_rate": 1.1962565802205255e-05, |
| "loss": 0.5138, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.7602611940298507, |
| "grad_norm": 0.24857174780101957, |
| "learning_rate": 1.1713316348217673e-05, |
| "loss": 0.5187, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.7649253731343284, |
| "grad_norm": 0.2522325967641256, |
| "learning_rate": 1.1467825497501954e-05, |
| "loss": 0.5033, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.769589552238806, |
| "grad_norm": 0.23930223016635088, |
| "learning_rate": 1.1226151698224597e-05, |
| "loss": 0.5243, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.7742537313432836, |
| "grad_norm": 0.24668012695988362, |
| "learning_rate": 1.0988352489762006e-05, |
| "loss": 0.5283, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.7789179104477612, |
| "grad_norm": 0.2440582920115209, |
| "learning_rate": 1.0754484489001085e-05, |
| "loss": 0.5185, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.7835820895522388, |
| "grad_norm": 0.2518319679837789, |
| "learning_rate": 1.052460337685951e-05, |
| "loss": 0.5174, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.7882462686567164, |
| "grad_norm": 0.24479017106886222, |
| "learning_rate": 1.0298763885028839e-05, |
| "loss": 0.5135, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.792910447761194, |
| "grad_norm": 0.23990431396616274, |
| "learning_rate": 1.0077019782943584e-05, |
| "loss": 0.5192, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7975746268656716, |
| "grad_norm": 0.22863144922943815, |
| "learning_rate": 9.859423864979441e-06, |
| "loss": 0.5076, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.8022388059701493, |
| "grad_norm": 0.2526688437058162, |
| "learning_rate": 9.646027937883622e-06, |
| "loss": 0.5266, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.8069029850746269, |
| "grad_norm": 0.2571652401412218, |
| "learning_rate": 9.436882808440334e-06, |
| "loss": 0.5279, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.8115671641791045, |
| "grad_norm": 0.23297383232884672, |
| "learning_rate": 9.232038271374377e-06, |
| "loss": 0.51, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.816231343283582, |
| "grad_norm": 0.23792626827153376, |
| "learning_rate": 9.031543097495638e-06, |
| "loss": 0.5145, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8208955223880597, |
| "grad_norm": 0.259491381893308, |
| "learning_rate": 8.835445022087426e-06, |
| "loss": 0.5125, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.8255597014925373, |
| "grad_norm": 0.2544129442607262, |
| "learning_rate": 8.6437907335413e-06, |
| "loss": 0.5225, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.8302238805970149, |
| "grad_norm": 0.23802013309346642, |
| "learning_rate": 8.456625862241193e-06, |
| "loss": 0.5248, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.8348880597014925, |
| "grad_norm": 0.24098732939917877, |
| "learning_rate": 8.273994969699394e-06, |
| "loss": 0.5188, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.8395522388059702, |
| "grad_norm": 0.22509509146201456, |
| "learning_rate": 8.095941537947057e-06, |
| "loss": 0.5218, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.8434296365330848, |
| "grad_norm": 0.25503644802838177, |
| "learning_rate": 7.95115281652163e-06, |
| "loss": 0.4928, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.848089468779124, |
| "grad_norm": 0.25554829568809356, |
| "learning_rate": 7.781763120363474e-06, |
| "loss": 0.4939, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.8527493010251631, |
| "grad_norm": 0.24785150988406393, |
| "learning_rate": 7.6170588694362915e-06, |
| "loss": 0.4962, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.8574091332712023, |
| "grad_norm": 0.2345884436747603, |
| "learning_rate": 7.4570792007456745e-06, |
| "loss": 0.5042, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 0.26221430699973647, |
| "learning_rate": 7.3018621286425035e-06, |
| "loss": 0.4985, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.8667287977632805, |
| "grad_norm": 0.23632019810695779, |
| "learning_rate": 7.151444535790017e-06, |
| "loss": 0.4879, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.8713886300093197, |
| "grad_norm": 0.23685269779407658, |
| "learning_rate": 7.005862164399716e-06, |
| "loss": 0.4952, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.8760484622553588, |
| "grad_norm": 0.2263847108294601, |
| "learning_rate": 6.865149607738324e-06, |
| "loss": 0.4842, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.880708294501398, |
| "grad_norm": 0.2493209183511488, |
| "learning_rate": 6.7293403019077394e-06, |
| "loss": 0.4875, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.8853681267474371, |
| "grad_norm": 0.24952354183015024, |
| "learning_rate": 6.598466517899961e-06, |
| "loss": 0.5012, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.8900279589934762, |
| "grad_norm": 0.24015418129527374, |
| "learning_rate": 6.472559353928814e-06, |
| "loss": 0.497, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.8946877912395154, |
| "grad_norm": 0.2406838118135352, |
| "learning_rate": 6.35164872804046e-06, |
| "loss": 0.4904, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.8993476234855545, |
| "grad_norm": 0.23781577622705932, |
| "learning_rate": 6.235763371004234e-06, |
| "loss": 0.4853, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.9040074557315937, |
| "grad_norm": 0.24384534616887826, |
| "learning_rate": 6.124930819485644e-06, |
| "loss": 0.5014, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.9086672879776329, |
| "grad_norm": 0.24824064166520088, |
| "learning_rate": 6.0191774095031244e-06, |
| "loss": 0.4794, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9133271202236719, |
| "grad_norm": 0.2397496796949414, |
| "learning_rate": 5.9185282701700745e-06, |
| "loss": 0.4995, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.9179869524697111, |
| "grad_norm": 0.23793615513302432, |
| "learning_rate": 5.823007317723664e-06, |
| "loss": 0.4909, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.9226467847157502, |
| "grad_norm": 0.2417670178260445, |
| "learning_rate": 5.732637249841873e-06, |
| "loss": 0.4824, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.9273066169617894, |
| "grad_norm": 0.23712042301378178, |
| "learning_rate": 5.647439540250082e-06, |
| "loss": 0.4876, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.9319664492078286, |
| "grad_norm": 0.23577490401974563, |
| "learning_rate": 5.567434433618465e-06, |
| "loss": 0.4848, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9366262814538676, |
| "grad_norm": 0.239146845460939, |
| "learning_rate": 5.492640940751462e-06, |
| "loss": 0.4797, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.9412861136999068, |
| "grad_norm": 0.2542109123530308, |
| "learning_rate": 5.423076834070447e-06, |
| "loss": 0.4787, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.9459459459459459, |
| "grad_norm": 0.23090507379059155, |
| "learning_rate": 5.358758643390628e-06, |
| "loss": 0.5016, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.9506057781919851, |
| "grad_norm": 0.23532827969201903, |
| "learning_rate": 5.299701651993246e-06, |
| "loss": 0.5013, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.9552656104380243, |
| "grad_norm": 0.23516814839669387, |
| "learning_rate": 5.245919892993957e-06, |
| "loss": 0.4826, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.9599254426840633, |
| "grad_norm": 0.23105280417402752, |
| "learning_rate": 5.197426146008291e-06, |
| "loss": 0.4931, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.9645852749301025, |
| "grad_norm": 0.2663559537808514, |
| "learning_rate": 5.1542319341149565e-06, |
| "loss": 0.4899, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.9692451071761417, |
| "grad_norm": 0.23949167751897144, |
| "learning_rate": 5.1163475211177235e-06, |
| "loss": 0.4988, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.9739049394221808, |
| "grad_norm": 0.24000618926829137, |
| "learning_rate": 5.083781909106557e-06, |
| "loss": 0.4828, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.97856477166822, |
| "grad_norm": 0.24152911669491808, |
| "learning_rate": 5.056542836318518e-06, |
| "loss": 0.498, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.983224603914259, |
| "grad_norm": 0.22134585139948434, |
| "learning_rate": 5.034636775299023e-06, |
| "loss": 0.4894, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.9878844361602982, |
| "grad_norm": 0.23386321836027193, |
| "learning_rate": 5.018068931363828e-06, |
| "loss": 0.4874, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.9925442684063374, |
| "grad_norm": 0.23532118958350487, |
| "learning_rate": 5.006843241362149e-06, |
| "loss": 0.4972, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.9972041006523765, |
| "grad_norm": 0.22265388724532062, |
| "learning_rate": 5.000962372741178e-06, |
| "loss": 0.498, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 1073, |
| "total_flos": 489077052801024.0, |
| "train_loss": 0.07929676676729741, |
| "train_runtime": 3102.6198, |
| "train_samples_per_second": 11.059, |
| "train_steps_per_second": 0.346 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 1073, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 489077052801024.0, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|