| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.995670995670996, |
| "eval_steps": 500, |
| "global_step": 519, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005772005772005772, |
| "grad_norm": 6.591821989460518, |
| "learning_rate": 1.5384615384615387e-06, |
| "loss": 0.8904, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.011544011544011544, |
| "grad_norm": 6.701083163841649, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 0.9046, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.017316017316017316, |
| "grad_norm": 6.536145770386053, |
| "learning_rate": 4.615384615384616e-06, |
| "loss": 0.8942, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.023088023088023088, |
| "grad_norm": 5.047333842456431, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.8572, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02886002886002886, |
| "grad_norm": 2.468734509084157, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 0.793, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03463203463203463, |
| "grad_norm": 2.2050657522791153, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.7611, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.04040404040404041, |
| "grad_norm": 4.090781002458348, |
| "learning_rate": 1.076923076923077e-05, |
| "loss": 0.7895, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.046176046176046176, |
| "grad_norm": 4.080399864777092, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.7438, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.05194805194805195, |
| "grad_norm": 4.592810986419584, |
| "learning_rate": 1.3846153846153847e-05, |
| "loss": 0.7438, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.05772005772005772, |
| "grad_norm": 2.9887502837956124, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.7188, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.06349206349206349, |
| "grad_norm": 2.0334736259374444, |
| "learning_rate": 1.6923076923076924e-05, |
| "loss": 0.6873, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.06926406926406926, |
| "grad_norm": 2.2441138864875905, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.6738, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.07503607503607504, |
| "grad_norm": 1.309205359684577, |
| "learning_rate": 2e-05, |
| "loss": 0.6549, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.08080808080808081, |
| "grad_norm": 1.6968217490459554, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 0.6572, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.08658008658008658, |
| "grad_norm": 1.3166881257009861, |
| "learning_rate": 2.3076923076923076e-05, |
| "loss": 0.6267, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.09235209235209235, |
| "grad_norm": 1.058656168608237, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 0.6148, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.09812409812409813, |
| "grad_norm": 0.9684271054203041, |
| "learning_rate": 2.6153846153846157e-05, |
| "loss": 0.6063, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.1038961038961039, |
| "grad_norm": 0.9909925970261345, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 0.6029, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.10966810966810966, |
| "grad_norm": 0.9842365088549295, |
| "learning_rate": 2.923076923076923e-05, |
| "loss": 0.6049, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.11544011544011544, |
| "grad_norm": 1.059427338447146, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.5893, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.12121212121212122, |
| "grad_norm": 0.9044592617829416, |
| "learning_rate": 3.230769230769231e-05, |
| "loss": 0.5789, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.12698412698412698, |
| "grad_norm": 0.9839678076732425, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 0.5653, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.13275613275613277, |
| "grad_norm": 1.3019517260461804, |
| "learning_rate": 3.538461538461539e-05, |
| "loss": 0.5784, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.13852813852813853, |
| "grad_norm": 1.1863713203167559, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 0.5657, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.1443001443001443, |
| "grad_norm": 1.0984531627337475, |
| "learning_rate": 3.846153846153846e-05, |
| "loss": 0.5628, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.15007215007215008, |
| "grad_norm": 1.361248597465037, |
| "learning_rate": 4e-05, |
| "loss": 0.5589, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.15584415584415584, |
| "grad_norm": 0.9699622745779369, |
| "learning_rate": 4.1538461538461544e-05, |
| "loss": 0.5587, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.16161616161616163, |
| "grad_norm": 1.6250628750726255, |
| "learning_rate": 4.307692307692308e-05, |
| "loss": 0.5533, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.1673881673881674, |
| "grad_norm": 0.8965839274541476, |
| "learning_rate": 4.461538461538462e-05, |
| "loss": 0.5532, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.17316017316017315, |
| "grad_norm": 2.2172027740776237, |
| "learning_rate": 4.615384615384615e-05, |
| "loss": 0.5516, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.17893217893217894, |
| "grad_norm": 1.4206471782118077, |
| "learning_rate": 4.76923076923077e-05, |
| "loss": 0.5443, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1847041847041847, |
| "grad_norm": 1.953518854356112, |
| "learning_rate": 4.923076923076924e-05, |
| "loss": 0.5457, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.19047619047619047, |
| "grad_norm": 1.7035803319314056, |
| "learning_rate": 5.076923076923077e-05, |
| "loss": 0.5452, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.19624819624819625, |
| "grad_norm": 1.3778506162803026, |
| "learning_rate": 5.230769230769231e-05, |
| "loss": 0.5386, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.20202020202020202, |
| "grad_norm": 2.2007791090752273, |
| "learning_rate": 5.3846153846153853e-05, |
| "loss": 0.5326, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.2077922077922078, |
| "grad_norm": 1.3929576225978784, |
| "learning_rate": 5.538461538461539e-05, |
| "loss": 0.5445, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.21356421356421357, |
| "grad_norm": 2.5179570562309475, |
| "learning_rate": 5.692307692307693e-05, |
| "loss": 0.5395, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.21933621933621933, |
| "grad_norm": 1.958838600657758, |
| "learning_rate": 5.846153846153846e-05, |
| "loss": 0.5288, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.22510822510822512, |
| "grad_norm": 2.2628335323973, |
| "learning_rate": 6.000000000000001e-05, |
| "loss": 0.5204, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.23088023088023088, |
| "grad_norm": 1.834622802154548, |
| "learning_rate": 6.153846153846155e-05, |
| "loss": 0.5261, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.23665223665223664, |
| "grad_norm": 2.0831656414023443, |
| "learning_rate": 6.307692307692308e-05, |
| "loss": 0.5316, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.24242424242424243, |
| "grad_norm": 1.6649570041240862, |
| "learning_rate": 6.461538461538462e-05, |
| "loss": 0.5298, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.2481962481962482, |
| "grad_norm": 1.8210200420604277, |
| "learning_rate": 6.615384615384616e-05, |
| "loss": 0.5289, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.25396825396825395, |
| "grad_norm": 1.232981446929873, |
| "learning_rate": 6.76923076923077e-05, |
| "loss": 0.5155, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.2597402597402597, |
| "grad_norm": 1.7708070269877163, |
| "learning_rate": 6.923076923076924e-05, |
| "loss": 0.5311, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.26551226551226553, |
| "grad_norm": 1.2424713252981978, |
| "learning_rate": 7.076923076923078e-05, |
| "loss": 0.5244, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.2712842712842713, |
| "grad_norm": 1.917850479797542, |
| "learning_rate": 7.230769230769232e-05, |
| "loss": 0.5229, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.27705627705627706, |
| "grad_norm": 1.4820579510034553, |
| "learning_rate": 7.384615384615386e-05, |
| "loss": 0.5222, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2828282828282828, |
| "grad_norm": 1.3751360045514713, |
| "learning_rate": 7.538461538461539e-05, |
| "loss": 0.5127, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.2886002886002886, |
| "grad_norm": 2.4703954526048792, |
| "learning_rate": 7.692307692307693e-05, |
| "loss": 0.5358, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.2943722943722944, |
| "grad_norm": 1.1276824883035388, |
| "learning_rate": 7.846153846153847e-05, |
| "loss": 0.5202, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.30014430014430016, |
| "grad_norm": 1.9518381050342926, |
| "learning_rate": 8e-05, |
| "loss": 0.5231, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.3059163059163059, |
| "grad_norm": 1.6826277240274867, |
| "learning_rate": 7.999909490463248e-05, |
| "loss": 0.5266, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.3116883116883117, |
| "grad_norm": 1.738831578332151, |
| "learning_rate": 7.999637965948977e-05, |
| "loss": 0.5092, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.31746031746031744, |
| "grad_norm": 1.4323939653994584, |
| "learning_rate": 7.999185438744968e-05, |
| "loss": 0.5095, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.32323232323232326, |
| "grad_norm": 1.3664334433498815, |
| "learning_rate": 7.998551929330234e-05, |
| "loss": 0.5114, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.329004329004329, |
| "grad_norm": 1.1542720966817734, |
| "learning_rate": 7.997737466374096e-05, |
| "loss": 0.5091, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.3347763347763348, |
| "grad_norm": 1.807835646103163, |
| "learning_rate": 7.99674208673489e-05, |
| "loss": 0.502, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.34054834054834054, |
| "grad_norm": 1.4816405608909524, |
| "learning_rate": 7.995565835458286e-05, |
| "loss": 0.5007, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.3463203463203463, |
| "grad_norm": 2.072657075856209, |
| "learning_rate": 7.994208765775267e-05, |
| "loss": 0.5142, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.35209235209235207, |
| "grad_norm": 1.3835777391967627, |
| "learning_rate": 7.992670939099704e-05, |
| "loss": 0.5039, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.3578643578643579, |
| "grad_norm": 1.315993786091659, |
| "learning_rate": 7.99095242502559e-05, |
| "loss": 0.5018, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.36363636363636365, |
| "grad_norm": 1.350155329631968, |
| "learning_rate": 7.989053301323881e-05, |
| "loss": 0.4986, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.3694083694083694, |
| "grad_norm": 1.5446010928098293, |
| "learning_rate": 7.986973653938977e-05, |
| "loss": 0.4983, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.37518037518037517, |
| "grad_norm": 1.04496723786831, |
| "learning_rate": 7.984713576984842e-05, |
| "loss": 0.509, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.38095238095238093, |
| "grad_norm": 1.8739311708739077, |
| "learning_rate": 7.982273172740734e-05, |
| "loss": 0.5126, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.38672438672438675, |
| "grad_norm": 1.3652541433364316, |
| "learning_rate": 7.979652551646583e-05, |
| "loss": 0.5, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.3924963924963925, |
| "grad_norm": 1.9130220059566032, |
| "learning_rate": 7.976851832297987e-05, |
| "loss": 0.5132, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.39826839826839827, |
| "grad_norm": 1.1729154565908322, |
| "learning_rate": 7.973871141440853e-05, |
| "loss": 0.5052, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.40404040404040403, |
| "grad_norm": 1.7055313247779371, |
| "learning_rate": 7.970710613965657e-05, |
| "loss": 0.5078, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.4098124098124098, |
| "grad_norm": 1.3935437350999336, |
| "learning_rate": 7.967370392901334e-05, |
| "loss": 0.5023, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.4155844155844156, |
| "grad_norm": 245.851096759516, |
| "learning_rate": 7.963850629408817e-05, |
| "loss": 7.192, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.4213564213564214, |
| "grad_norm": 3.5843288436560656, |
| "learning_rate": 7.960151482774188e-05, |
| "loss": 0.5501, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.42712842712842713, |
| "grad_norm": 1.8159995585862374, |
| "learning_rate": 7.95627312040147e-05, |
| "loss": 0.5197, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.4329004329004329, |
| "grad_norm": 1.7593611646948464, |
| "learning_rate": 7.952215717805055e-05, |
| "loss": 0.5043, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.43867243867243866, |
| "grad_norm": 2.554507256709062, |
| "learning_rate": 7.947979458601756e-05, |
| "loss": 0.5184, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 1.7433980613956555, |
| "learning_rate": 7.943564534502503e-05, |
| "loss": 0.5184, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.45021645021645024, |
| "grad_norm": 0.9438409442305278, |
| "learning_rate": 7.938971145303666e-05, |
| "loss": 0.5054, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.455988455988456, |
| "grad_norm": 1.1198133821796519, |
| "learning_rate": 7.934199498878005e-05, |
| "loss": 0.4974, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.46176046176046176, |
| "grad_norm": 1.2641541874296718, |
| "learning_rate": 7.929249811165274e-05, |
| "loss": 0.4979, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.4675324675324675, |
| "grad_norm": 4.739955304345111, |
| "learning_rate": 7.924122306162448e-05, |
| "loss": 0.5227, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.4733044733044733, |
| "grad_norm": 3.777145023653992, |
| "learning_rate": 7.918817215913574e-05, |
| "loss": 0.5384, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.4790764790764791, |
| "grad_norm": 1.897498471819791, |
| "learning_rate": 7.913334780499284e-05, |
| "loss": 0.5193, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.48484848484848486, |
| "grad_norm": 1.9166167580469424, |
| "learning_rate": 7.907675248025926e-05, |
| "loss": 0.5178, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4906204906204906, |
| "grad_norm": 111.71899143551498, |
| "learning_rate": 7.901838874614325e-05, |
| "loss": 0.565, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.4963924963924964, |
| "grad_norm": 12.497569359247084, |
| "learning_rate": 7.895825924388214e-05, |
| "loss": 0.574, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.5021645021645021, |
| "grad_norm": 2.309806827904168, |
| "learning_rate": 7.889636669462257e-05, |
| "loss": 0.5636, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.5079365079365079, |
| "grad_norm": 1.3442519653064227, |
| "learning_rate": 7.883271389929755e-05, |
| "loss": 0.5266, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.5137085137085137, |
| "grad_norm": 1.5607960630672684, |
| "learning_rate": 7.87673037384996e-05, |
| "loss": 0.5151, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.5194805194805194, |
| "grad_norm": 1.2317264148870677, |
| "learning_rate": 7.870013917235039e-05, |
| "loss": 0.5138, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.5252525252525253, |
| "grad_norm": 1.2174210025618513, |
| "learning_rate": 7.86312232403668e-05, |
| "loss": 0.5084, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.5310245310245311, |
| "grad_norm": 0.9844891141990263, |
| "learning_rate": 7.856055906132337e-05, |
| "loss": 0.5013, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.5367965367965368, |
| "grad_norm": 1.6249846234942358, |
| "learning_rate": 7.848814983311114e-05, |
| "loss": 0.4875, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.5425685425685426, |
| "grad_norm": 1.110842702049686, |
| "learning_rate": 7.841399883259298e-05, |
| "loss": 0.5072, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.5483405483405484, |
| "grad_norm": 1.2409317588868078, |
| "learning_rate": 7.833810941545525e-05, |
| "loss": 0.4981, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.5541125541125541, |
| "grad_norm": 0.9216863812635485, |
| "learning_rate": 7.826048501605592e-05, |
| "loss": 0.4976, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.5598845598845599, |
| "grad_norm": 1.1689690422801344, |
| "learning_rate": 7.818112914726922e-05, |
| "loss": 0.4939, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.5656565656565656, |
| "grad_norm": 1.3912487511676008, |
| "learning_rate": 7.81000454003266e-05, |
| "loss": 0.4973, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 1.5119375553594363, |
| "learning_rate": 7.801723744465427e-05, |
| "loss": 0.4877, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.5772005772005772, |
| "grad_norm": 1.1451986843068807, |
| "learning_rate": 7.793270902770707e-05, |
| "loss": 0.4847, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.5829725829725829, |
| "grad_norm": 0.8253514693262355, |
| "learning_rate": 7.784646397479892e-05, |
| "loss": 0.4789, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.5887445887445888, |
| "grad_norm": 1.0501024034599673, |
| "learning_rate": 7.775850618892973e-05, |
| "loss": 0.4826, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5945165945165946, |
| "grad_norm": 1.6363531551977177, |
| "learning_rate": 7.766883965060871e-05, |
| "loss": 0.4801, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.6002886002886003, |
| "grad_norm": 0.9154587109673673, |
| "learning_rate": 7.757746841767431e-05, |
| "loss": 0.4871, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.6060606060606061, |
| "grad_norm": 1.5192770236375652, |
| "learning_rate": 7.748439662511049e-05, |
| "loss": 0.4835, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.6118326118326118, |
| "grad_norm": 0.9122272724371713, |
| "learning_rate": 7.738962848485967e-05, |
| "loss": 0.4768, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.6176046176046176, |
| "grad_norm": 1.2673986949755514, |
| "learning_rate": 7.729316828563207e-05, |
| "loss": 0.4824, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.6233766233766234, |
| "grad_norm": 1.6724258053369745, |
| "learning_rate": 7.71950203927117e-05, |
| "loss": 0.4826, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.6291486291486291, |
| "grad_norm": 0.8735759562195867, |
| "learning_rate": 7.709518924775869e-05, |
| "loss": 0.4903, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.6349206349206349, |
| "grad_norm": 1.9881565552085574, |
| "learning_rate": 7.69936793686084e-05, |
| "loss": 0.4827, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.6406926406926406, |
| "grad_norm": 1.1525310802346136, |
| "learning_rate": 7.689049534906688e-05, |
| "loss": 0.4839, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.6464646464646465, |
| "grad_norm": 1.6737242634010223, |
| "learning_rate": 7.678564185870306e-05, |
| "loss": 0.4905, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.6522366522366523, |
| "grad_norm": 1.4430692554643452, |
| "learning_rate": 7.667912364263734e-05, |
| "loss": 0.4935, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.658008658008658, |
| "grad_norm": 1.0555012525229592, |
| "learning_rate": 7.657094552132692e-05, |
| "loss": 0.4753, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.6637806637806638, |
| "grad_norm": 1.2792144841892175, |
| "learning_rate": 7.646111239034762e-05, |
| "loss": 0.4847, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.6695526695526696, |
| "grad_norm": 0.8673391499660823, |
| "learning_rate": 7.634962922017234e-05, |
| "loss": 0.4812, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.6753246753246753, |
| "grad_norm": 1.450882719103234, |
| "learning_rate": 7.623650105594616e-05, |
| "loss": 0.4789, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.6810966810966811, |
| "grad_norm": 1.1189212432889974, |
| "learning_rate": 7.612173301725791e-05, |
| "loss": 0.4782, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.6868686868686869, |
| "grad_norm": 1.2500032375967438, |
| "learning_rate": 7.600533029790859e-05, |
| "loss": 0.4766, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.6926406926406926, |
| "grad_norm": 0.9200876953926456, |
| "learning_rate": 7.588729816567634e-05, |
| "loss": 0.4742, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.6984126984126984, |
| "grad_norm": 1.1553568867731325, |
| "learning_rate": 7.576764196207793e-05, |
| "loss": 0.4715, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.7041847041847041, |
| "grad_norm": 0.8934619182669754, |
| "learning_rate": 7.564636710212717e-05, |
| "loss": 0.4727, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.70995670995671, |
| "grad_norm": 1.0904046636453852, |
| "learning_rate": 7.552347907408973e-05, |
| "loss": 0.4679, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.7157287157287158, |
| "grad_norm": 1.3764671359522456, |
| "learning_rate": 7.539898343923487e-05, |
| "loss": 0.4634, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.7215007215007215, |
| "grad_norm": 1.171521245459155, |
| "learning_rate": 7.527288583158371e-05, |
| "loss": 0.4685, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.7272727272727273, |
| "grad_norm": 0.7696078257367441, |
| "learning_rate": 7.514519195765428e-05, |
| "loss": 0.4673, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.733044733044733, |
| "grad_norm": 1.1883468674609674, |
| "learning_rate": 7.501590759620324e-05, |
| "loss": 0.4725, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.7388167388167388, |
| "grad_norm": 0.9291536438970882, |
| "learning_rate": 7.488503859796446e-05, |
| "loss": 0.4621, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.7445887445887446, |
| "grad_norm": 1.0627658395705477, |
| "learning_rate": 7.475259088538412e-05, |
| "loss": 0.4565, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.7503607503607503, |
| "grad_norm": 1.54642074651037, |
| "learning_rate": 7.461857045235278e-05, |
| "loss": 0.4643, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.7561327561327561, |
| "grad_norm": 0.6810343830586086, |
| "learning_rate": 7.448298336393409e-05, |
| "loss": 0.4661, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.7619047619047619, |
| "grad_norm": 0.6372542725369494, |
| "learning_rate": 7.434583575609034e-05, |
| "loss": 0.4653, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.7676767676767676, |
| "grad_norm": 1.0126413662175853, |
| "learning_rate": 7.420713383540476e-05, |
| "loss": 0.4693, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.7734487734487735, |
| "grad_norm": 1.3737542427125524, |
| "learning_rate": 7.406688387880064e-05, |
| "loss": 0.4623, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.7792207792207793, |
| "grad_norm": 0.9130609282834156, |
| "learning_rate": 7.392509223325727e-05, |
| "loss": 0.4625, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.784992784992785, |
| "grad_norm": 1.9579393793658253, |
| "learning_rate": 7.378176531552272e-05, |
| "loss": 0.4695, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.7907647907647908, |
| "grad_norm": 0.8632201170827392, |
| "learning_rate": 7.363690961182348e-05, |
| "loss": 0.4603, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.7965367965367965, |
| "grad_norm": 2.814682600145235, |
| "learning_rate": 7.349053167757087e-05, |
| "loss": 0.4729, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.8023088023088023, |
| "grad_norm": 2.2606064260809635, |
| "learning_rate": 7.334263813706438e-05, |
| "loss": 0.4901, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.8080808080808081, |
| "grad_norm": 1.7214477605860958, |
| "learning_rate": 7.319323568319195e-05, |
| "loss": 0.4643, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.8138528138528138, |
| "grad_norm": 1.7622962795748711, |
| "learning_rate": 7.304233107712699e-05, |
| "loss": 0.4732, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.8196248196248196, |
| "grad_norm": 1.5507278602835506, |
| "learning_rate": 7.288993114802251e-05, |
| "loss": 0.4805, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.8253968253968254, |
| "grad_norm": 1.0461957792765249, |
| "learning_rate": 7.273604279270203e-05, |
| "loss": 0.4575, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.8311688311688312, |
| "grad_norm": 1.4147827443269227, |
| "learning_rate": 7.25806729753474e-05, |
| "loss": 0.461, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.836940836940837, |
| "grad_norm": 0.8903624685904831, |
| "learning_rate": 7.242382872718371e-05, |
| "loss": 0.4587, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.8427128427128427, |
| "grad_norm": 1.534917997797476, |
| "learning_rate": 7.226551714616111e-05, |
| "loss": 0.4647, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.8484848484848485, |
| "grad_norm": 0.857263805930584, |
| "learning_rate": 7.210574539663351e-05, |
| "loss": 0.4595, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.8542568542568543, |
| "grad_norm": 1.4926745213805535, |
| "learning_rate": 7.194452070903443e-05, |
| "loss": 0.4592, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.86002886002886, |
| "grad_norm": 0.9521702084080935, |
| "learning_rate": 7.178185037954977e-05, |
| "loss": 0.4556, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.8658008658008658, |
| "grad_norm": 1.5854019697451536, |
| "learning_rate": 7.16177417697876e-05, |
| "loss": 0.461, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.8715728715728716, |
| "grad_norm": 1.0922004596100605, |
| "learning_rate": 7.145220230644507e-05, |
| "loss": 0.4597, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.8773448773448773, |
| "grad_norm": 1.369601810507737, |
| "learning_rate": 7.128523948097222e-05, |
| "loss": 0.4626, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.8831168831168831, |
| "grad_norm": 1.2602330173138279, |
| "learning_rate": 7.111686084923306e-05, |
| "loss": 0.4599, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 1.194998174450651, |
| "learning_rate": 7.094707403116356e-05, |
| "loss": 0.4595, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.8946608946608947, |
| "grad_norm": 0.9745804896707817, |
| "learning_rate": 7.077588671042686e-05, |
| "loss": 0.4544, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.9004329004329005, |
| "grad_norm": 1.0884187236190401, |
| "learning_rate": 7.06033066340655e-05, |
| "loss": 0.4435, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.9062049062049062, |
| "grad_norm": 0.8051773982771812, |
| "learning_rate": 7.042934161215085e-05, |
| "loss": 0.4608, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.911976911976912, |
| "grad_norm": 0.9378153819388926, |
| "learning_rate": 7.02539995174297e-05, |
| "loss": 0.4583, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.9177489177489178, |
| "grad_norm": 0.6989550093651844, |
| "learning_rate": 7.007728828496793e-05, |
| "loss": 0.4524, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.9235209235209235, |
| "grad_norm": 0.8390345572841903, |
| "learning_rate": 6.989921591179145e-05, |
| "loss": 0.4519, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.9292929292929293, |
| "grad_norm": 0.6441569982951852, |
| "learning_rate": 6.97197904565242e-05, |
| "loss": 0.4504, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.935064935064935, |
| "grad_norm": 0.9443858881471812, |
| "learning_rate": 6.953902003902368e-05, |
| "loss": 0.4535, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.9408369408369408, |
| "grad_norm": 0.798028444293174, |
| "learning_rate": 6.935691284001322e-05, |
| "loss": 0.4495, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.9466089466089466, |
| "grad_norm": 0.8082815856339027, |
| "learning_rate": 6.917347710071193e-05, |
| "loss": 0.4466, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.9523809523809523, |
| "grad_norm": 1.1854187052850362, |
| "learning_rate": 6.898872112246172e-05, |
| "loss": 0.4458, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.9581529581529582, |
| "grad_norm": 1.4406146951543184, |
| "learning_rate": 6.880265326635159e-05, |
| "loss": 0.4537, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.963924963924964, |
| "grad_norm": 0.5540098885616462, |
| "learning_rate": 6.861528195283924e-05, |
| "loss": 0.4392, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.9696969696969697, |
| "grad_norm": 1.4158419971388627, |
| "learning_rate": 6.842661566137011e-05, |
| "loss": 0.4453, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.9754689754689755, |
| "grad_norm": 0.9692734629530391, |
| "learning_rate": 6.82366629299935e-05, |
| "loss": 0.4534, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.9812409812409812, |
| "grad_norm": 1.0422752052381763, |
| "learning_rate": 6.804543235497625e-05, |
| "loss": 0.4427, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.987012987012987, |
| "grad_norm": 1.0057520882099593, |
| "learning_rate": 6.785293259041376e-05, |
| "loss": 0.4388, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.9927849927849928, |
| "grad_norm": 0.5628295830650956, |
| "learning_rate": 6.76591723478383e-05, |
| "loss": 0.4407, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.9985569985569985, |
| "grad_norm": 0.6701556147889078, |
| "learning_rate": 6.746416039582474e-05, |
| "loss": 0.4422, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.0043290043290043, |
| "grad_norm": 1.1221935027853214, |
| "learning_rate": 6.726790555959383e-05, |
| "loss": 0.7579, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.0101010101010102, |
| "grad_norm": 1.5081319057253486, |
| "learning_rate": 6.707041672061271e-05, |
| "loss": 0.4294, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.0158730158730158, |
| "grad_norm": 0.5264438763222767, |
| "learning_rate": 6.687170281619302e-05, |
| "loss": 0.4276, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.0216450216450217, |
| "grad_norm": 1.2609191944611602, |
| "learning_rate": 6.667177283908654e-05, |
| "loss": 0.4262, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.0274170274170273, |
| "grad_norm": 0.8861386082370325, |
| "learning_rate": 6.647063583707802e-05, |
| "loss": 0.4242, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.0331890331890332, |
| "grad_norm": 1.1725958779338623, |
| "learning_rate": 6.62683009125759e-05, |
| "loss": 0.4237, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.0389610389610389, |
| "grad_norm": 0.9929961855942879, |
| "learning_rate": 6.606477722220036e-05, |
| "loss": 0.4195, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.0447330447330447, |
| "grad_norm": 0.9200828863123336, |
| "learning_rate": 6.586007397636885e-05, |
| "loss": 0.418, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.0505050505050506, |
| "grad_norm": 0.9908709522642545, |
| "learning_rate": 6.565420043887931e-05, |
| "loss": 0.4257, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.0562770562770563, |
| "grad_norm": 0.9523415844130142, |
| "learning_rate": 6.544716592649105e-05, |
| "loss": 0.4236, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.0620490620490621, |
| "grad_norm": 0.7301914835828962, |
| "learning_rate": 6.523897980850294e-05, |
| "loss": 0.4232, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.0678210678210678, |
| "grad_norm": 0.6165866879818018, |
| "learning_rate": 6.502965150632955e-05, |
| "loss": 0.4259, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.0735930735930737, |
| "grad_norm": 0.8435043150433259, |
| "learning_rate": 6.48191904930747e-05, |
| "loss": 0.4258, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.0793650793650793, |
| "grad_norm": 1.0653357822717338, |
| "learning_rate": 6.460760629310277e-05, |
| "loss": 0.421, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.0851370851370852, |
| "grad_norm": 0.896889134293885, |
| "learning_rate": 6.439490848160778e-05, |
| "loss": 0.4185, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.0909090909090908, |
| "grad_norm": 0.445405759766472, |
| "learning_rate": 6.418110668417987e-05, |
| "loss": 0.4285, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.0966810966810967, |
| "grad_norm": 1.091734393127673, |
| "learning_rate": 6.396621057636989e-05, |
| "loss": 0.4179, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.1024531024531024, |
| "grad_norm": 0.8153012892445496, |
| "learning_rate": 6.375022988325141e-05, |
| "loss": 0.4189, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.1082251082251082, |
| "grad_norm": 0.47339196574510367, |
| "learning_rate": 6.353317437898067e-05, |
| "loss": 0.4219, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.113997113997114, |
| "grad_norm": 0.7949439049041327, |
| "learning_rate": 6.331505388635425e-05, |
| "loss": 0.4209, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.1197691197691197, |
| "grad_norm": 0.6343684319179256, |
| "learning_rate": 6.309587827636452e-05, |
| "loss": 0.416, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.1255411255411256, |
| "grad_norm": 0.4991129395667874, |
| "learning_rate": 6.287565746775295e-05, |
| "loss": 0.4173, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.1313131313131313, |
| "grad_norm": 0.8121276548673446, |
| "learning_rate": 6.265440142656122e-05, |
| "loss": 0.4202, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.1370851370851371, |
| "grad_norm": 0.6074410669253114, |
| "learning_rate": 6.243212016568022e-05, |
| "loss": 0.4131, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 0.44314411552468735, |
| "learning_rate": 6.220882374439692e-05, |
| "loss": 0.4163, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.1486291486291487, |
| "grad_norm": 0.729137709710534, |
| "learning_rate": 6.198452226793918e-05, |
| "loss": 0.4107, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.1544011544011543, |
| "grad_norm": 0.5568871901766357, |
| "learning_rate": 6.175922588701831e-05, |
| "loss": 0.4154, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.1601731601731602, |
| "grad_norm": 0.4394583517605951, |
| "learning_rate": 6.153294479736988e-05, |
| "loss": 0.4173, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.1659451659451658, |
| "grad_norm": 0.5781221410704105, |
| "learning_rate": 6.130568923929218e-05, |
| "loss": 0.4185, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.1717171717171717, |
| "grad_norm": 0.4445795099202651, |
| "learning_rate": 6.107746949718286e-05, |
| "loss": 0.4159, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.1774891774891776, |
| "grad_norm": 0.29486481158921624, |
| "learning_rate": 6.084829589907348e-05, |
| "loss": 0.4108, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.1832611832611832, |
| "grad_norm": 0.4361187764956284, |
| "learning_rate": 6.061817881616215e-05, |
| "loss": 0.4091, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.189033189033189, |
| "grad_norm": 0.3800065537217578, |
| "learning_rate": 6.038712866234415e-05, |
| "loss": 0.4122, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.1948051948051948, |
| "grad_norm": 0.3347247020532211, |
| "learning_rate": 6.015515589374066e-05, |
| "loss": 0.4157, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.2005772005772006, |
| "grad_norm": 0.5905935850780871, |
| "learning_rate": 5.992227100822562e-05, |
| "loss": 0.4151, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.2063492063492063, |
| "grad_norm": 0.6199555792896543, |
| "learning_rate": 5.968848454495058e-05, |
| "loss": 0.4114, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.2121212121212122, |
| "grad_norm": 0.4743837488073826, |
| "learning_rate": 5.945380708386776e-05, |
| "loss": 0.4111, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.2178932178932178, |
| "grad_norm": 0.5588814047789137, |
| "learning_rate": 5.921824924525132e-05, |
| "loss": 0.4199, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.2236652236652237, |
| "grad_norm": 0.7590505057556534, |
| "learning_rate": 5.898182168921669e-05, |
| "loss": 0.413, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.2294372294372296, |
| "grad_norm": 0.9817733675700191, |
| "learning_rate": 5.874453511523815e-05, |
| "loss": 0.4055, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.2352092352092352, |
| "grad_norm": 1.1447333189993498, |
| "learning_rate": 5.8506400261664645e-05, |
| "loss": 0.4181, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.240981240981241, |
| "grad_norm": 0.643085872578358, |
| "learning_rate": 5.826742790523382e-05, |
| "loss": 0.4178, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.2467532467532467, |
| "grad_norm": 0.6054604348523384, |
| "learning_rate": 5.80276288605843e-05, |
| "loss": 0.4069, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.2525252525252526, |
| "grad_norm": 1.182426250184434, |
| "learning_rate": 5.7787013979766334e-05, |
| "loss": 0.4185, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.2582972582972582, |
| "grad_norm": 0.8047691901798822, |
| "learning_rate": 5.75455941517506e-05, |
| "loss": 0.4118, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.2640692640692641, |
| "grad_norm": 0.2930520495485002, |
| "learning_rate": 5.7303380301935495e-05, |
| "loss": 0.4095, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.2698412698412698, |
| "grad_norm": 0.5888144741471648, |
| "learning_rate": 5.70603833916527e-05, |
| "loss": 0.4093, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.2756132756132756, |
| "grad_norm": 0.6886762220666399, |
| "learning_rate": 5.68166144176711e-05, |
| "loss": 0.4044, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.2813852813852815, |
| "grad_norm": 0.6308802163826126, |
| "learning_rate": 5.657208441169916e-05, |
| "loss": 0.4125, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.2871572871572872, |
| "grad_norm": 0.42058805295199864, |
| "learning_rate": 5.6326804439885645e-05, |
| "loss": 0.4101, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.2929292929292928, |
| "grad_norm": 0.3592378091851368, |
| "learning_rate": 5.608078560231888e-05, |
| "loss": 0.4041, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.2987012987012987, |
| "grad_norm": 0.43029512729600117, |
| "learning_rate": 5.583403903252437e-05, |
| "loss": 0.4121, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.3044733044733046, |
| "grad_norm": 0.3889055430906675, |
| "learning_rate": 5.558657589696098e-05, |
| "loss": 0.4059, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.3102453102453102, |
| "grad_norm": 0.3440494840478949, |
| "learning_rate": 5.533840739451559e-05, |
| "loss": 0.4034, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.316017316017316, |
| "grad_norm": 0.2571934157256294, |
| "learning_rate": 5.5089544755996294e-05, |
| "loss": 0.41, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.3217893217893217, |
| "grad_norm": 0.3824710170652386, |
| "learning_rate": 5.483999924362417e-05, |
| "loss": 0.406, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.3275613275613276, |
| "grad_norm": 0.41843958170342843, |
| "learning_rate": 5.458978215052357e-05, |
| "loss": 0.4046, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.3498561520298379, |
| "learning_rate": 5.433890480021107e-05, |
| "loss": 0.4036, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.3391053391053391, |
| "grad_norm": 0.42164963063157984, |
| "learning_rate": 5.408737854608307e-05, |
| "loss": 0.4027, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.3448773448773448, |
| "grad_norm": 0.48195135845030257, |
| "learning_rate": 5.3835214770901945e-05, |
| "loss": 0.4018, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.3506493506493507, |
| "grad_norm": 0.4852917535336072, |
| "learning_rate": 5.358242488628092e-05, |
| "loss": 0.4083, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.3564213564213565, |
| "grad_norm": 0.41225922180900215, |
| "learning_rate": 5.332902033216766e-05, |
| "loss": 0.4058, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.3621933621933622, |
| "grad_norm": 0.3010982188590577, |
| "learning_rate": 5.3075012576326607e-05, |
| "loss": 0.4051, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.3679653679653678, |
| "grad_norm": 0.2433157254360926, |
| "learning_rate": 5.2820413113819885e-05, |
| "loss": 0.3998, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.3737373737373737, |
| "grad_norm": 0.2833835472451347, |
| "learning_rate": 5.256523346648721e-05, |
| "loss": 0.404, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.3795093795093796, |
| "grad_norm": 0.27072179535322477, |
| "learning_rate": 5.230948518242439e-05, |
| "loss": 0.4019, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.3852813852813852, |
| "grad_norm": 0.24437603340900985, |
| "learning_rate": 5.205317983546083e-05, |
| "loss": 0.4076, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.391053391053391, |
| "grad_norm": 0.22711332037700857, |
| "learning_rate": 5.179632902463562e-05, |
| "loss": 0.4, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.3968253968253967, |
| "grad_norm": 0.29929194968825346, |
| "learning_rate": 5.1538944373672694e-05, |
| "loss": 0.4179, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.4025974025974026, |
| "grad_norm": 0.284448857778488, |
| "learning_rate": 5.128103753045484e-05, |
| "loss": 0.4097, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.4083694083694085, |
| "grad_norm": 0.32649263835005693, |
| "learning_rate": 5.1022620166496486e-05, |
| "loss": 0.4046, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.4141414141414141, |
| "grad_norm": 0.2601804107726681, |
| "learning_rate": 5.07637039764156e-05, |
| "loss": 0.4007, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.4199134199134198, |
| "grad_norm": 0.2985437577875243, |
| "learning_rate": 5.05043006774044e-05, |
| "loss": 0.4113, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.4256854256854257, |
| "grad_norm": 0.3479198216565732, |
| "learning_rate": 5.024442200869907e-05, |
| "loss": 0.4063, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.4314574314574315, |
| "grad_norm": 0.2934502700017442, |
| "learning_rate": 4.9984079731048594e-05, |
| "loss": 0.4044, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.4372294372294372, |
| "grad_norm": 0.284443761108607, |
| "learning_rate": 4.972328562618244e-05, |
| "loss": 0.4021, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.443001443001443, |
| "grad_norm": 0.2686380250003549, |
| "learning_rate": 4.946205149627742e-05, |
| "loss": 0.3967, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.4487734487734487, |
| "grad_norm": 0.20965782391240942, |
| "learning_rate": 4.920038916342355e-05, |
| "loss": 0.4055, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.4545454545454546, |
| "grad_norm": 0.26593807250010953, |
| "learning_rate": 4.893831046908913e-05, |
| "loss": 0.3919, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.4603174603174602, |
| "grad_norm": 0.3244764958086802, |
| "learning_rate": 4.8675827273584764e-05, |
| "loss": 0.4066, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.466089466089466, |
| "grad_norm": 0.4519123025012951, |
| "learning_rate": 4.841295145552664e-05, |
| "loss": 0.409, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.4718614718614718, |
| "grad_norm": 0.47539799585538184, |
| "learning_rate": 4.8149694911299046e-05, |
| "loss": 0.4068, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.4776334776334776, |
| "grad_norm": 0.3163497152972903, |
| "learning_rate": 4.788606955451589e-05, |
| "loss": 0.4008, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.4834054834054835, |
| "grad_norm": 0.2530368981149338, |
| "learning_rate": 4.762208731548166e-05, |
| "loss": 0.4045, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.4891774891774892, |
| "grad_norm": 0.23038203037988386, |
| "learning_rate": 4.735776014065142e-05, |
| "loss": 0.3988, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.494949494949495, |
| "grad_norm": 0.21695905607050836, |
| "learning_rate": 4.709309999209023e-05, |
| "loss": 0.3978, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.5007215007215007, |
| "grad_norm": 0.2567325295594068, |
| "learning_rate": 4.682811884693185e-05, |
| "loss": 0.3936, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.5064935064935066, |
| "grad_norm": 0.2692904261100886, |
| "learning_rate": 4.656282869683659e-05, |
| "loss": 0.3969, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.5122655122655124, |
| "grad_norm": 0.2636411848036993, |
| "learning_rate": 4.629724154744876e-05, |
| "loss": 0.3992, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.518037518037518, |
| "grad_norm": 0.20496250107354846, |
| "learning_rate": 4.603136941785328e-05, |
| "loss": 0.3947, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.5238095238095237, |
| "grad_norm": 0.24102557044701634, |
| "learning_rate": 4.576522434003183e-05, |
| "loss": 0.3984, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.5295815295815296, |
| "grad_norm": 0.270197841622627, |
| "learning_rate": 4.549881835831821e-05, |
| "loss": 0.3915, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.5353535353535355, |
| "grad_norm": 0.31636039416656614, |
| "learning_rate": 4.523216352885345e-05, |
| "loss": 0.3952, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.5411255411255411, |
| "grad_norm": 0.3077596806884647, |
| "learning_rate": 4.496527191904009e-05, |
| "loss": 0.3976, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.5468975468975468, |
| "grad_norm": 0.2675132016090662, |
| "learning_rate": 4.4698155606996104e-05, |
| "loss": 0.4057, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.5526695526695526, |
| "grad_norm": 0.21038804294274088, |
| "learning_rate": 4.4430826681008316e-05, |
| "loss": 0.4074, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.5584415584415585, |
| "grad_norm": 0.23735821005743604, |
| "learning_rate": 4.416329723898536e-05, |
| "loss": 0.3951, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.5642135642135642, |
| "grad_norm": 0.18061314585892582, |
| "learning_rate": 4.389557938791019e-05, |
| "loss": 0.394, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.56998556998557, |
| "grad_norm": 0.23191872327293342, |
| "learning_rate": 4.362768524329212e-05, |
| "loss": 0.3993, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.5757575757575757, |
| "grad_norm": 0.20523251504710777, |
| "learning_rate": 4.335962692861862e-05, |
| "loss": 0.4013, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.5815295815295816, |
| "grad_norm": 0.21213920806551073, |
| "learning_rate": 4.3091416574806644e-05, |
| "loss": 0.3932, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.5873015873015874, |
| "grad_norm": 0.20822870111060587, |
| "learning_rate": 4.2823066319653606e-05, |
| "loss": 0.4066, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.593073593073593, |
| "grad_norm": 0.2677158742340245, |
| "learning_rate": 4.2554588307288166e-05, |
| "loss": 0.3953, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.5988455988455987, |
| "grad_norm": 0.24447097986602137, |
| "learning_rate": 4.2285994687620586e-05, |
| "loss": 0.3908, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.6046176046176046, |
| "grad_norm": 0.211196522709894, |
| "learning_rate": 4.20172976157929e-05, |
| "loss": 0.4048, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.6103896103896105, |
| "grad_norm": 0.2035651604206692, |
| "learning_rate": 4.174850925162887e-05, |
| "loss": 0.3994, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.6161616161616161, |
| "grad_norm": 0.28087618115996654, |
| "learning_rate": 4.1479641759083653e-05, |
| "loss": 0.3954, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.6219336219336218, |
| "grad_norm": 0.2869550764537825, |
| "learning_rate": 4.121070730569336e-05, |
| "loss": 0.3934, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.6277056277056277, |
| "grad_norm": 0.2627243894997915, |
| "learning_rate": 4.094171806202436e-05, |
| "loss": 0.3929, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.6334776334776335, |
| "grad_norm": 0.18504635663976976, |
| "learning_rate": 4.067268620112259e-05, |
| "loss": 0.3921, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.6392496392496394, |
| "grad_norm": 0.24273499891124686, |
| "learning_rate": 4.04036238979626e-05, |
| "loss": 0.3863, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.645021645021645, |
| "grad_norm": 0.21825468272936616, |
| "learning_rate": 4.01345433288966e-05, |
| "loss": 0.3903, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.6507936507936507, |
| "grad_norm": 0.21301467745169061, |
| "learning_rate": 3.9865456671103404e-05, |
| "loss": 0.3941, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.6565656565656566, |
| "grad_norm": 0.24240452578983904, |
| "learning_rate": 3.959637610203741e-05, |
| "loss": 0.3914, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.6623376623376624, |
| "grad_norm": 0.24775047125742394, |
| "learning_rate": 3.932731379887742e-05, |
| "loss": 0.3865, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.668109668109668, |
| "grad_norm": 0.20875238150820688, |
| "learning_rate": 3.905828193797566e-05, |
| "loss": 0.3946, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.6738816738816737, |
| "grad_norm": 0.2092017685246258, |
| "learning_rate": 3.878929269430667e-05, |
| "loss": 0.3905, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.6796536796536796, |
| "grad_norm": 0.21363231113184575, |
| "learning_rate": 3.852035824091635e-05, |
| "loss": 0.3893, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.6854256854256855, |
| "grad_norm": 0.21569898511300334, |
| "learning_rate": 3.8251490748371136e-05, |
| "loss": 0.3942, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.6911976911976911, |
| "grad_norm": 0.2018232998507949, |
| "learning_rate": 3.798270238420711e-05, |
| "loss": 0.3901, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.696969696969697, |
| "grad_norm": 0.1925594320139884, |
| "learning_rate": 3.771400531237942e-05, |
| "loss": 0.3906, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.7027417027417027, |
| "grad_norm": 0.18974891989147144, |
| "learning_rate": 3.744541169271184e-05, |
| "loss": 0.3954, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.7085137085137085, |
| "grad_norm": 0.20798653934785333, |
| "learning_rate": 3.7176933680346414e-05, |
| "loss": 0.3943, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 0.22892716154764753, |
| "learning_rate": 3.6908583425193376e-05, |
| "loss": 0.3927, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.72005772005772, |
| "grad_norm": 0.20442563522892104, |
| "learning_rate": 3.66403730713814e-05, |
| "loss": 0.3899, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.7258297258297257, |
| "grad_norm": 0.2290558666884635, |
| "learning_rate": 3.637231475670789e-05, |
| "loss": 0.3893, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.7316017316017316, |
| "grad_norm": 0.24205910207531414, |
| "learning_rate": 3.6104420612089814e-05, |
| "loss": 0.3785, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.7373737373737375, |
| "grad_norm": 0.25593097184058905, |
| "learning_rate": 3.583670276101464e-05, |
| "loss": 0.3933, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.743145743145743, |
| "grad_norm": 0.17738260043921314, |
| "learning_rate": 3.556917331899169e-05, |
| "loss": 0.3972, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.7489177489177488, |
| "grad_norm": 0.22220255965346145, |
| "learning_rate": 3.530184439300392e-05, |
| "loss": 0.3932, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.7546897546897546, |
| "grad_norm": 0.2123289870960355, |
| "learning_rate": 3.503472808095993e-05, |
| "loss": 0.388, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.7604617604617605, |
| "grad_norm": 0.21684056813791056, |
| "learning_rate": 3.476783647114656e-05, |
| "loss": 0.3871, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.7662337662337664, |
| "grad_norm": 0.2986703879447781, |
| "learning_rate": 3.4501181641681806e-05, |
| "loss": 0.4022, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.772005772005772, |
| "grad_norm": 0.22910905968331996, |
| "learning_rate": 3.4234775659968185e-05, |
| "loss": 0.387, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.30310896794339226, |
| "learning_rate": 3.396863058214671e-05, |
| "loss": 0.3954, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.7835497835497836, |
| "grad_norm": 0.28241561357778666, |
| "learning_rate": 3.370275845255125e-05, |
| "loss": 0.3876, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.7893217893217894, |
| "grad_norm": 0.2509414521780602, |
| "learning_rate": 3.343717130316344e-05, |
| "loss": 0.3844, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.795093795093795, |
| "grad_norm": 0.23323065292973938, |
| "learning_rate": 3.317188115306817e-05, |
| "loss": 0.3869, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.8008658008658007, |
| "grad_norm": 0.26047360683879345, |
| "learning_rate": 3.290690000790978e-05, |
| "loss": 0.3953, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.8066378066378066, |
| "grad_norm": 0.23595979586126703, |
| "learning_rate": 3.2642239859348594e-05, |
| "loss": 0.3893, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.8124098124098125, |
| "grad_norm": 0.22572346653298958, |
| "learning_rate": 3.237791268451835e-05, |
| "loss": 0.3847, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.8181818181818183, |
| "grad_norm": 0.2098525856576488, |
| "learning_rate": 3.211393044548411e-05, |
| "loss": 0.4011, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.823953823953824, |
| "grad_norm": 0.2116216292884672, |
| "learning_rate": 3.185030508870096e-05, |
| "loss": 0.3806, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.8297258297258296, |
| "grad_norm": 0.21095945535091404, |
| "learning_rate": 3.158704854447337e-05, |
| "loss": 0.385, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.8354978354978355, |
| "grad_norm": 0.20877744581550697, |
| "learning_rate": 3.1324172726415256e-05, |
| "loss": 0.3879, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.8412698412698414, |
| "grad_norm": 0.19527613530959578, |
| "learning_rate": 3.106168953091088e-05, |
| "loss": 0.3892, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.847041847041847, |
| "grad_norm": 0.20996890512994054, |
| "learning_rate": 3.079961083657646e-05, |
| "loss": 0.3821, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.8528138528138527, |
| "grad_norm": 0.19908534919754334, |
| "learning_rate": 3.0537948503722595e-05, |
| "loss": 0.391, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.8585858585858586, |
| "grad_norm": 0.2069424165481144, |
| "learning_rate": 3.027671437381757e-05, |
| "loss": 0.3896, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.8643578643578644, |
| "grad_norm": 0.18991524333796642, |
| "learning_rate": 3.0015920268951412e-05, |
| "loss": 0.3927, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.87012987012987, |
| "grad_norm": 0.202639438723347, |
| "learning_rate": 2.975557799130094e-05, |
| "loss": 0.3837, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.8759018759018757, |
| "grad_norm": 0.17613137029703754, |
| "learning_rate": 2.9495699322595615e-05, |
| "loss": 0.3975, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.8816738816738816, |
| "grad_norm": 0.21155619568693387, |
| "learning_rate": 2.923629602358441e-05, |
| "loss": 0.387, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.8874458874458875, |
| "grad_norm": 0.1725011702032577, |
| "learning_rate": 2.8977379833503524e-05, |
| "loss": 0.391, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.8932178932178934, |
| "grad_norm": 0.22393946086021008, |
| "learning_rate": 2.871896246954518e-05, |
| "loss": 0.3924, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.898989898989899, |
| "grad_norm": 0.19876543898525306, |
| "learning_rate": 2.8461055626327313e-05, |
| "loss": 0.3861, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.9047619047619047, |
| "grad_norm": 0.2245112603739307, |
| "learning_rate": 2.8203670975364395e-05, |
| "loss": 0.3923, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.9105339105339105, |
| "grad_norm": 0.16602895720578406, |
| "learning_rate": 2.7946820164539182e-05, |
| "loss": 0.3891, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.9163059163059164, |
| "grad_norm": 0.21870332861694683, |
| "learning_rate": 2.7690514817575615e-05, |
| "loss": 0.3924, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.922077922077922, |
| "grad_norm": 0.17060774656580643, |
| "learning_rate": 2.7434766533512806e-05, |
| "loss": 0.3737, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.9278499278499277, |
| "grad_norm": 0.21300688928118505, |
| "learning_rate": 2.7179586886180128e-05, |
| "loss": 0.3948, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.9336219336219336, |
| "grad_norm": 0.175788452853784, |
| "learning_rate": 2.69249874236734e-05, |
| "loss": 0.3866, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.9393939393939394, |
| "grad_norm": 0.18460721760511187, |
| "learning_rate": 2.667097966783234e-05, |
| "loss": 0.3862, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.9451659451659453, |
| "grad_norm": 0.17444022188650732, |
| "learning_rate": 2.6417575113719087e-05, |
| "loss": 0.3802, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.950937950937951, |
| "grad_norm": 0.1484989021731224, |
| "learning_rate": 2.616478522909807e-05, |
| "loss": 0.392, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.9567099567099566, |
| "grad_norm": 0.16268258501003877, |
| "learning_rate": 2.5912621453916944e-05, |
| "loss": 0.3885, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.9624819624819625, |
| "grad_norm": 0.16171751920146682, |
| "learning_rate": 2.566109519978894e-05, |
| "loss": 0.3808, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.9682539682539684, |
| "grad_norm": 0.16382873859878613, |
| "learning_rate": 2.5410217849476447e-05, |
| "loss": 0.3752, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.974025974025974, |
| "grad_norm": 0.17482617424613112, |
| "learning_rate": 2.516000075637584e-05, |
| "loss": 0.3763, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.9797979797979797, |
| "grad_norm": 0.1702896815624782, |
| "learning_rate": 2.4910455244003702e-05, |
| "loss": 0.3814, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.9855699855699855, |
| "grad_norm": 0.16803526970105556, |
| "learning_rate": 2.4661592605484422e-05, |
| "loss": 0.3886, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.9913419913419914, |
| "grad_norm": 0.17404030012234012, |
| "learning_rate": 2.4413424103039035e-05, |
| "loss": 0.3856, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.997113997113997, |
| "grad_norm": 0.16281429029619196, |
| "learning_rate": 2.416596096747564e-05, |
| "loss": 0.3812, |
| "step": 346 |
| }, |
| { |
| "epoch": 2.0028860028860027, |
| "grad_norm": 0.34376210625654163, |
| "learning_rate": 2.3919214397681137e-05, |
| "loss": 0.6364, |
| "step": 347 |
| }, |
| { |
| "epoch": 2.0086580086580086, |
| "grad_norm": 0.25772199023989556, |
| "learning_rate": 2.3673195560114365e-05, |
| "loss": 0.3528, |
| "step": 348 |
| }, |
| { |
| "epoch": 2.0144300144300145, |
| "grad_norm": 0.276112829230668, |
| "learning_rate": 2.3427915588300846e-05, |
| "loss": 0.3535, |
| "step": 349 |
| }, |
| { |
| "epoch": 2.0202020202020203, |
| "grad_norm": 0.31992574049691747, |
| "learning_rate": 2.31833855823289e-05, |
| "loss": 0.3491, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.0259740259740258, |
| "grad_norm": 0.2608841192334598, |
| "learning_rate": 2.2939616608347316e-05, |
| "loss": 0.3554, |
| "step": 351 |
| }, |
| { |
| "epoch": 2.0317460317460316, |
| "grad_norm": 0.2653214329011091, |
| "learning_rate": 2.2696619698064515e-05, |
| "loss": 0.3493, |
| "step": 352 |
| }, |
| { |
| "epoch": 2.0375180375180375, |
| "grad_norm": 0.2472838788658597, |
| "learning_rate": 2.245440584824941e-05, |
| "loss": 0.347, |
| "step": 353 |
| }, |
| { |
| "epoch": 2.0432900432900434, |
| "grad_norm": 0.26371573358746464, |
| "learning_rate": 2.2212986020233683e-05, |
| "loss": 0.3493, |
| "step": 354 |
| }, |
| { |
| "epoch": 2.0490620490620493, |
| "grad_norm": 0.20878293083270738, |
| "learning_rate": 2.1972371139415705e-05, |
| "loss": 0.3553, |
| "step": 355 |
| }, |
| { |
| "epoch": 2.0548340548340547, |
| "grad_norm": 0.23273233249130984, |
| "learning_rate": 2.1732572094766188e-05, |
| "loss": 0.3532, |
| "step": 356 |
| }, |
| { |
| "epoch": 2.0606060606060606, |
| "grad_norm": 0.23527543008138327, |
| "learning_rate": 2.1493599738335358e-05, |
| "loss": 0.3553, |
| "step": 357 |
| }, |
| { |
| "epoch": 2.0663780663780664, |
| "grad_norm": 0.20970708352117728, |
| "learning_rate": 2.1255464884761863e-05, |
| "loss": 0.3574, |
| "step": 358 |
| }, |
| { |
| "epoch": 2.0721500721500723, |
| "grad_norm": 0.2083525361419124, |
| "learning_rate": 2.1018178310783322e-05, |
| "loss": 0.3467, |
| "step": 359 |
| }, |
| { |
| "epoch": 2.0779220779220777, |
| "grad_norm": 0.20295702822985864, |
| "learning_rate": 2.07817507547487e-05, |
| "loss": 0.3518, |
| "step": 360 |
| }, |
| { |
| "epoch": 2.0836940836940836, |
| "grad_norm": 0.17820081287215864, |
| "learning_rate": 2.054619291613226e-05, |
| "loss": 0.3454, |
| "step": 361 |
| }, |
| { |
| "epoch": 2.0894660894660895, |
| "grad_norm": 0.21352202172185789, |
| "learning_rate": 2.031151545504944e-05, |
| "loss": 0.3509, |
| "step": 362 |
| }, |
| { |
| "epoch": 2.0952380952380953, |
| "grad_norm": 0.16570061254078286, |
| "learning_rate": 2.007772899177438e-05, |
| "loss": 0.3591, |
| "step": 363 |
| }, |
| { |
| "epoch": 2.101010101010101, |
| "grad_norm": 0.17922618028941195, |
| "learning_rate": 1.984484410625935e-05, |
| "loss": 0.3462, |
| "step": 364 |
| }, |
| { |
| "epoch": 2.1067821067821066, |
| "grad_norm": 0.15633838545430417, |
| "learning_rate": 1.9612871337655882e-05, |
| "loss": 0.3474, |
| "step": 365 |
| }, |
| { |
| "epoch": 2.1125541125541125, |
| "grad_norm": 0.15747516923085514, |
| "learning_rate": 1.938182118383787e-05, |
| "loss": 0.3532, |
| "step": 366 |
| }, |
| { |
| "epoch": 2.1183261183261184, |
| "grad_norm": 0.15811521645472146, |
| "learning_rate": 1.9151704100926536e-05, |
| "loss": 0.348, |
| "step": 367 |
| }, |
| { |
| "epoch": 2.1240981240981243, |
| "grad_norm": 0.14644722471559174, |
| "learning_rate": 1.892253050281715e-05, |
| "loss": 0.349, |
| "step": 368 |
| }, |
| { |
| "epoch": 2.1298701298701297, |
| "grad_norm": 0.1548058583190097, |
| "learning_rate": 1.8694310760707824e-05, |
| "loss": 0.346, |
| "step": 369 |
| }, |
| { |
| "epoch": 2.1356421356421356, |
| "grad_norm": 0.13350674475881225, |
| "learning_rate": 1.8467055202630137e-05, |
| "loss": 0.3472, |
| "step": 370 |
| }, |
| { |
| "epoch": 2.1414141414141414, |
| "grad_norm": 0.1390929667594732, |
| "learning_rate": 1.8240774112981702e-05, |
| "loss": 0.3482, |
| "step": 371 |
| }, |
| { |
| "epoch": 2.1471861471861473, |
| "grad_norm": 0.13904631097945175, |
| "learning_rate": 1.8015477732060853e-05, |
| "loss": 0.3461, |
| "step": 372 |
| }, |
| { |
| "epoch": 2.1529581529581527, |
| "grad_norm": 0.1388842546531878, |
| "learning_rate": 1.779117625560309e-05, |
| "loss": 0.3534, |
| "step": 373 |
| }, |
| { |
| "epoch": 2.1587301587301586, |
| "grad_norm": 0.1329557989872869, |
| "learning_rate": 1.7567879834319793e-05, |
| "loss": 0.3541, |
| "step": 374 |
| }, |
| { |
| "epoch": 2.1645021645021645, |
| "grad_norm": 0.13708881477559062, |
| "learning_rate": 1.734559857343878e-05, |
| "loss": 0.3512, |
| "step": 375 |
| }, |
| { |
| "epoch": 2.1702741702741704, |
| "grad_norm": 0.13046281719011577, |
| "learning_rate": 1.7124342532247057e-05, |
| "loss": 0.3453, |
| "step": 376 |
| }, |
| { |
| "epoch": 2.1760461760461762, |
| "grad_norm": 0.14597316841401312, |
| "learning_rate": 1.6904121723635477e-05, |
| "loss": 0.3513, |
| "step": 377 |
| }, |
| { |
| "epoch": 2.1818181818181817, |
| "grad_norm": 0.15098750108014802, |
| "learning_rate": 1.6684946113645746e-05, |
| "loss": 0.3406, |
| "step": 378 |
| }, |
| { |
| "epoch": 2.1875901875901875, |
| "grad_norm": 0.1341433244109453, |
| "learning_rate": 1.6466825621019337e-05, |
| "loss": 0.3469, |
| "step": 379 |
| }, |
| { |
| "epoch": 2.1933621933621934, |
| "grad_norm": 0.14986397828347373, |
| "learning_rate": 1.6249770116748604e-05, |
| "loss": 0.3485, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.1991341991341993, |
| "grad_norm": 0.13918404166032933, |
| "learning_rate": 1.6033789423630134e-05, |
| "loss": 0.3499, |
| "step": 381 |
| }, |
| { |
| "epoch": 2.2049062049062047, |
| "grad_norm": 0.12359065581631808, |
| "learning_rate": 1.5818893315820144e-05, |
| "loss": 0.3467, |
| "step": 382 |
| }, |
| { |
| "epoch": 2.2106782106782106, |
| "grad_norm": 0.1513201157889788, |
| "learning_rate": 1.5605091518392234e-05, |
| "loss": 0.3431, |
| "step": 383 |
| }, |
| { |
| "epoch": 2.2164502164502164, |
| "grad_norm": 0.12485709624581334, |
| "learning_rate": 1.5392393706897224e-05, |
| "loss": 0.3483, |
| "step": 384 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.13602588524238346, |
| "learning_rate": 1.5180809506925306e-05, |
| "loss": 0.3532, |
| "step": 385 |
| }, |
| { |
| "epoch": 2.227994227994228, |
| "grad_norm": 0.14562695629291877, |
| "learning_rate": 1.4970348493670454e-05, |
| "loss": 0.3483, |
| "step": 386 |
| }, |
| { |
| "epoch": 2.2337662337662336, |
| "grad_norm": 0.11828582123138484, |
| "learning_rate": 1.476102019149707e-05, |
| "loss": 0.3516, |
| "step": 387 |
| }, |
| { |
| "epoch": 2.2395382395382395, |
| "grad_norm": 0.12945968901614865, |
| "learning_rate": 1.4552834073508958e-05, |
| "loss": 0.3475, |
| "step": 388 |
| }, |
| { |
| "epoch": 2.2453102453102454, |
| "grad_norm": 0.1170563090280043, |
| "learning_rate": 1.434579956112069e-05, |
| "loss": 0.3453, |
| "step": 389 |
| }, |
| { |
| "epoch": 2.2510822510822512, |
| "grad_norm": 0.1287506811756111, |
| "learning_rate": 1.4139926023631168e-05, |
| "loss": 0.3405, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.2568542568542567, |
| "grad_norm": 0.11142792038606325, |
| "learning_rate": 1.3935222777799652e-05, |
| "loss": 0.3467, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.2626262626262625, |
| "grad_norm": 0.1274164884676621, |
| "learning_rate": 1.3731699087424115e-05, |
| "loss": 0.3493, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.2683982683982684, |
| "grad_norm": 0.11590691394723747, |
| "learning_rate": 1.3529364162922004e-05, |
| "loss": 0.3441, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.2741702741702743, |
| "grad_norm": 0.13145419286429477, |
| "learning_rate": 1.3328227160913478e-05, |
| "loss": 0.3432, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.27994227994228, |
| "grad_norm": 0.10726667067770969, |
| "learning_rate": 1.3128297183806976e-05, |
| "loss": 0.3454, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.2857142857142856, |
| "grad_norm": 0.1144655280850949, |
| "learning_rate": 1.2929583279387296e-05, |
| "loss": 0.3437, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.2914862914862915, |
| "grad_norm": 0.11276212616579144, |
| "learning_rate": 1.2732094440406177e-05, |
| "loss": 0.352, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.2972582972582973, |
| "grad_norm": 0.11880977448596589, |
| "learning_rate": 1.2535839604175255e-05, |
| "loss": 0.3486, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.303030303030303, |
| "grad_norm": 0.10962217727818492, |
| "learning_rate": 1.2340827652161713e-05, |
| "loss": 0.3452, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.3088023088023086, |
| "grad_norm": 0.11274713005381089, |
| "learning_rate": 1.2147067409586243e-05, |
| "loss": 0.348, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.3145743145743145, |
| "grad_norm": 0.11693801240672108, |
| "learning_rate": 1.1954567645023759e-05, |
| "loss": 0.3535, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.3203463203463204, |
| "grad_norm": 0.12532346971561012, |
| "learning_rate": 1.1763337070006515e-05, |
| "loss": 0.3495, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.3261183261183263, |
| "grad_norm": 0.10893628624682762, |
| "learning_rate": 1.1573384338629908e-05, |
| "loss": 0.3518, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.3318903318903317, |
| "grad_norm": 0.10920863452315147, |
| "learning_rate": 1.138471804716076e-05, |
| "loss": 0.349, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.3376623376623376, |
| "grad_norm": 0.12529050366314204, |
| "learning_rate": 1.1197346733648424e-05, |
| "loss": 0.3472, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.3434343434343434, |
| "grad_norm": 0.10496748545263929, |
| "learning_rate": 1.1011278877538292e-05, |
| "loss": 0.3517, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.3492063492063493, |
| "grad_norm": 0.11719451671854277, |
| "learning_rate": 1.0826522899288077e-05, |
| "loss": 0.3447, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.354978354978355, |
| "grad_norm": 0.10969963263744975, |
| "learning_rate": 1.0643087159986796e-05, |
| "loss": 0.3497, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.3607503607503606, |
| "grad_norm": 0.10835055548857855, |
| "learning_rate": 1.046097996097633e-05, |
| "loss": 0.3399, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.3665223665223665, |
| "grad_norm": 0.10687762232224175, |
| "learning_rate": 1.0280209543475799e-05, |
| "loss": 0.3469, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.3722943722943723, |
| "grad_norm": 0.10737382891175314, |
| "learning_rate": 1.0100784088208569e-05, |
| "loss": 0.3498, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.378066378066378, |
| "grad_norm": 0.11196662282124958, |
| "learning_rate": 9.922711715032069e-06, |
| "loss": 0.3479, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.3838383838383836, |
| "grad_norm": 0.10249258673290118, |
| "learning_rate": 9.746000482570306e-06, |
| "loss": 0.3507, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.3896103896103895, |
| "grad_norm": 0.10682683103804237, |
| "learning_rate": 9.570658387849163e-06, |
| "loss": 0.3396, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.3953823953823954, |
| "grad_norm": 0.11318251935991214, |
| "learning_rate": 9.396693365934517e-06, |
| "loss": 0.3409, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.4011544011544013, |
| "grad_norm": 0.1019527978859083, |
| "learning_rate": 9.22411328957315e-06, |
| "loss": 0.3476, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.4069264069264067, |
| "grad_norm": 0.10749236145185602, |
| "learning_rate": 9.052925968836442e-06, |
| "loss": 0.343, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.4126984126984126, |
| "grad_norm": 0.0986457791725115, |
| "learning_rate": 8.883139150766941e-06, |
| "loss": 0.3457, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.4184704184704184, |
| "grad_norm": 0.10213111766143514, |
| "learning_rate": 8.714760519027789e-06, |
| "loss": 0.3491, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.4242424242424243, |
| "grad_norm": 0.10746607521482601, |
| "learning_rate": 8.547797693554946e-06, |
| "loss": 0.3486, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.43001443001443, |
| "grad_norm": 0.10441851840468046, |
| "learning_rate": 8.3822582302124e-06, |
| "loss": 0.3456, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.4357864357864356, |
| "grad_norm": 0.10831025638581726, |
| "learning_rate": 8.218149620450235e-06, |
| "loss": 0.3411, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.4415584415584415, |
| "grad_norm": 0.10028072114634466, |
| "learning_rate": 8.055479290965569e-06, |
| "loss": 0.3414, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.4473304473304474, |
| "grad_norm": 0.10675866856021239, |
| "learning_rate": 7.894254603366497e-06, |
| "loss": 0.3382, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.4531024531024532, |
| "grad_norm": 0.10790874855042172, |
| "learning_rate": 7.734482853838892e-06, |
| "loss": 0.35, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.458874458874459, |
| "grad_norm": 0.10662476000191072, |
| "learning_rate": 7.576171272816286e-06, |
| "loss": 0.3414, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.4646464646464645, |
| "grad_norm": 0.10947969228036052, |
| "learning_rate": 7.4193270246526135e-06, |
| "loss": 0.3412, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.4704184704184704, |
| "grad_norm": 0.10274131450450585, |
| "learning_rate": 7.263957207297978e-06, |
| "loss": 0.3433, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.4761904761904763, |
| "grad_norm": 0.10154033582796251, |
| "learning_rate": 7.1100688519774876e-06, |
| "loss": 0.3356, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.481962481962482, |
| "grad_norm": 0.10986813684307772, |
| "learning_rate": 6.957668922873031e-06, |
| "loss": 0.3526, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.4877344877344876, |
| "grad_norm": 0.11268326800105738, |
| "learning_rate": 6.8067643168080766e-06, |
| "loss": 0.3488, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.4935064935064934, |
| "grad_norm": 0.09840769427350186, |
| "learning_rate": 6.6573618629356274e-06, |
| "loss": 0.3452, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.4992784992784993, |
| "grad_norm": 0.11269175164918041, |
| "learning_rate": 6.509468322429131e-06, |
| "loss": 0.347, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.505050505050505, |
| "grad_norm": 0.10999136780654757, |
| "learning_rate": 6.36309038817652e-06, |
| "loss": 0.3455, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.5108225108225106, |
| "grad_norm": 0.09840209750950327, |
| "learning_rate": 6.218234684477278e-06, |
| "loss": 0.3439, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.5165945165945165, |
| "grad_norm": 0.09447419631866041, |
| "learning_rate": 6.07490776674275e-06, |
| "loss": 0.3378, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.5223665223665224, |
| "grad_norm": 0.09706328476200818, |
| "learning_rate": 5.933116121199378e-06, |
| "loss": 0.3452, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.5281385281385282, |
| "grad_norm": 0.10231049024580399, |
| "learning_rate": 5.792866164595245e-06, |
| "loss": 0.3368, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.533910533910534, |
| "grad_norm": 0.09978034629381005, |
| "learning_rate": 5.654164243909659e-06, |
| "loss": 0.3396, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.5396825396825395, |
| "grad_norm": 0.10413041011105151, |
| "learning_rate": 5.517016636065915e-06, |
| "loss": 0.3432, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.5454545454545454, |
| "grad_norm": 0.0967954090668441, |
| "learning_rate": 5.381429547647239e-06, |
| "loss": 0.3499, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.5512265512265513, |
| "grad_norm": 0.10109502518725756, |
| "learning_rate": 5.247409114615898e-06, |
| "loss": 0.3407, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.556998556998557, |
| "grad_norm": 0.09490435025341039, |
| "learning_rate": 5.114961402035552e-06, |
| "loss": 0.3424, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.562770562770563, |
| "grad_norm": 0.10229915180017767, |
| "learning_rate": 4.984092403796763e-06, |
| "loss": 0.3413, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.5685425685425685, |
| "grad_norm": 0.09805972167431506, |
| "learning_rate": 4.854808042345736e-06, |
| "loss": 0.3373, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.5743145743145743, |
| "grad_norm": 0.09581515115465772, |
| "learning_rate": 4.7271141684163e-06, |
| "loss": 0.3486, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.58008658008658, |
| "grad_norm": 0.09484063048204838, |
| "learning_rate": 4.601016560765144e-06, |
| "loss": 0.3419, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.5858585858585856, |
| "grad_norm": 0.09536057522665425, |
| "learning_rate": 4.476520925910284e-06, |
| "loss": 0.3366, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.5916305916305915, |
| "grad_norm": 0.09987541239632843, |
| "learning_rate": 4.353632897872846e-06, |
| "loss": 0.3425, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.5974025974025974, |
| "grad_norm": 0.09963505235706265, |
| "learning_rate": 4.2323580379220755e-06, |
| "loss": 0.3484, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.6031746031746033, |
| "grad_norm": 0.09253983593459067, |
| "learning_rate": 4.112701834323672e-06, |
| "loss": 0.3429, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.608946608946609, |
| "grad_norm": 0.09199990563523672, |
| "learning_rate": 3.994669702091418e-06, |
| "loss": 0.3351, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.6147186147186146, |
| "grad_norm": 0.09874324626738527, |
| "learning_rate": 3.878266982742109e-06, |
| "loss": 0.3464, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.6204906204906204, |
| "grad_norm": 0.10055349590155505, |
| "learning_rate": 3.7634989440538603e-06, |
| "loss": 0.3493, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.6262626262626263, |
| "grad_norm": 0.09935024527889372, |
| "learning_rate": 3.650370779827661e-06, |
| "loss": 0.3407, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.632034632034632, |
| "grad_norm": 0.09534517733096698, |
| "learning_rate": 3.538887609652388e-06, |
| "loss": 0.3378, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.637806637806638, |
| "grad_norm": 0.09350817574484052, |
| "learning_rate": 3.4290544786730996e-06, |
| "loss": 0.3414, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.6435786435786435, |
| "grad_norm": 0.10205262104677329, |
| "learning_rate": 3.3208763573626766e-06, |
| "loss": 0.3423, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.6493506493506493, |
| "grad_norm": 0.09409112126481023, |
| "learning_rate": 3.214358141296949e-06, |
| "loss": 0.3454, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.655122655122655, |
| "grad_norm": 0.0917867834220046, |
| "learning_rate": 3.1095046509331196e-06, |
| "loss": 0.3406, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.6608946608946606, |
| "grad_norm": 0.09278650920624583, |
| "learning_rate": 3.006320631391613e-06, |
| "loss": 0.3476, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.0897517799081907, |
| "learning_rate": 2.904810752241316e-06, |
| "loss": 0.3351, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.6724386724386724, |
| "grad_norm": 0.08778981943962322, |
| "learning_rate": 2.804979607288312e-06, |
| "loss": 0.3364, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.6782106782106783, |
| "grad_norm": 0.09570343221719087, |
| "learning_rate": 2.706831714367932e-06, |
| "loss": 0.351, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.683982683982684, |
| "grad_norm": 0.09311012426822064, |
| "learning_rate": 2.6103715151403463e-06, |
| "loss": 0.3432, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.6897546897546896, |
| "grad_norm": 0.09350413954762375, |
| "learning_rate": 2.515603374889519e-06, |
| "loss": 0.3444, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.6955266955266954, |
| "grad_norm": 0.08867208161846349, |
| "learning_rate": 2.4225315823257e-06, |
| "loss": 0.3396, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.7012987012987013, |
| "grad_norm": 0.09924016061963505, |
| "learning_rate": 2.3311603493912927e-06, |
| "loss": 0.3452, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.707070707070707, |
| "grad_norm": 0.09992707634020694, |
| "learning_rate": 2.241493811070279e-06, |
| "loss": 0.3468, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.712842712842713, |
| "grad_norm": 0.09281248365094172, |
| "learning_rate": 2.1535360252010885e-06, |
| "loss": 0.3399, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.7186147186147185, |
| "grad_norm": 0.08481302767106144, |
| "learning_rate": 2.0672909722929412e-06, |
| "loss": 0.3356, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.7243867243867244, |
| "grad_norm": 0.0891639961832306, |
| "learning_rate": 1.982762555345734e-06, |
| "loss": 0.3458, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.7301587301587302, |
| "grad_norm": 0.0892742474664926, |
| "learning_rate": 1.8999545996734037e-06, |
| "loss": 0.3379, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.7359307359307357, |
| "grad_norm": 0.09452191488114642, |
| "learning_rate": 1.8188708527307942e-06, |
| "loss": 0.3419, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.741702741702742, |
| "grad_norm": 0.08990569611001864, |
| "learning_rate": 1.7395149839440994e-06, |
| "loss": 0.3531, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.7474747474747474, |
| "grad_norm": 0.09350546944764407, |
| "learning_rate": 1.6618905845447697e-06, |
| "loss": 0.339, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.7532467532467533, |
| "grad_norm": 0.08986097068862921, |
| "learning_rate": 1.5860011674070275e-06, |
| "loss": 0.3501, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.759018759018759, |
| "grad_norm": 0.09079040328351605, |
| "learning_rate": 1.5118501668888664e-06, |
| "loss": 0.3398, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.7647907647907646, |
| "grad_norm": 0.11261442419130217, |
| "learning_rate": 1.4394409386766461e-06, |
| "loss": 0.3463, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.7705627705627704, |
| "grad_norm": 0.08862585291124896, |
| "learning_rate": 1.3687767596332058e-06, |
| "loss": 0.3353, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.7763347763347763, |
| "grad_norm": 0.08868031485746894, |
| "learning_rate": 1.2998608276496128e-06, |
| "loss": 0.3454, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.782106782106782, |
| "grad_norm": 0.08746810675950548, |
| "learning_rate": 1.232696261500399e-06, |
| "loss": 0.3379, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.787878787878788, |
| "grad_norm": 0.08636658234472545, |
| "learning_rate": 1.1672861007024472e-06, |
| "loss": 0.3445, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.7936507936507935, |
| "grad_norm": 0.08321719005621234, |
| "learning_rate": 1.1036333053774429e-06, |
| "loss": 0.3381, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.7994227994227994, |
| "grad_norm": 0.08872817366905046, |
| "learning_rate": 1.0417407561178839e-06, |
| "loss": 0.346, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.8051948051948052, |
| "grad_norm": 0.08920218308026576, |
| "learning_rate": 9.816112538567578e-07, |
| "loss": 0.3417, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.810966810966811, |
| "grad_norm": 0.08547034523589102, |
| "learning_rate": 9.232475197407553e-07, |
| "loss": 0.3418, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.816738816738817, |
| "grad_norm": 0.08471208619998576, |
| "learning_rate": 8.666521950071538e-07, |
| "loss": 0.3361, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.8225108225108224, |
| "grad_norm": 0.08685346174962041, |
| "learning_rate": 8.118278408642655e-07, |
| "loss": 0.3417, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.8282828282828283, |
| "grad_norm": 0.09031115384378957, |
| "learning_rate": 7.587769383755383e-07, |
| "loss": 0.3476, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.834054834054834, |
| "grad_norm": 0.08344504301313972, |
| "learning_rate": 7.07501888347264e-07, |
| "loss": 0.3498, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.8398268398268396, |
| "grad_norm": 0.08370353552469191, |
| "learning_rate": 6.580050112199709e-07, |
| "loss": 0.3358, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.8455988455988455, |
| "grad_norm": 0.08633408464372305, |
| "learning_rate": 6.10288546963358e-07, |
| "loss": 0.3427, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.8513708513708513, |
| "grad_norm": 0.08347643597997188, |
| "learning_rate": 5.643546549749656e-07, |
| "loss": 0.3361, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.08240574474272953, |
| "learning_rate": 5.202054139824419e-07, |
| "loss": 0.3416, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.862914862914863, |
| "grad_norm": 0.08206414021141857, |
| "learning_rate": 4.778428219494613e-07, |
| "loss": 0.3431, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.8686868686868685, |
| "grad_norm": 0.08094241936651854, |
| "learning_rate": 4.372687959853039e-07, |
| "loss": 0.3372, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.8744588744588744, |
| "grad_norm": 0.08376193752213902, |
| "learning_rate": 3.984851722581251e-07, |
| "loss": 0.3337, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.8802308802308803, |
| "grad_norm": 0.0833016035487455, |
| "learning_rate": 3.614937059118351e-07, |
| "loss": 0.3453, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.886002886002886, |
| "grad_norm": 0.08536696753078746, |
| "learning_rate": 3.262960709866692e-07, |
| "loss": 0.3392, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.891774891774892, |
| "grad_norm": 0.08538387172597513, |
| "learning_rate": 2.9289386034344835e-07, |
| "loss": 0.3441, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.8975468975468974, |
| "grad_norm": 0.08180946091642226, |
| "learning_rate": 2.6128858559147705e-07, |
| "loss": 0.3376, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.9033189033189033, |
| "grad_norm": 0.08190246281621259, |
| "learning_rate": 2.3148167702014e-07, |
| "loss": 0.3393, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.909090909090909, |
| "grad_norm": 0.0829720124973207, |
| "learning_rate": 2.0347448353418508e-07, |
| "loss": 0.34, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.9148629148629146, |
| "grad_norm": 0.08167916104889678, |
| "learning_rate": 1.7726827259266554e-07, |
| "loss": 0.336, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.9206349206349205, |
| "grad_norm": 0.08161907646122096, |
| "learning_rate": 1.5286423015158591e-07, |
| "loss": 0.3353, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.9264069264069263, |
| "grad_norm": 0.08545137866828025, |
| "learning_rate": 1.302634606102382e-07, |
| "loss": 0.3412, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.932178932178932, |
| "grad_norm": 0.08380300828431841, |
| "learning_rate": 1.0946698676120637e-07, |
| "loss": 0.3451, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.937950937950938, |
| "grad_norm": 0.08344201696465267, |
| "learning_rate": 9.047574974410555e-08, |
| "loss": 0.3426, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.9437229437229435, |
| "grad_norm": 0.08265827123035746, |
| "learning_rate": 7.32906090029628e-08, |
| "loss": 0.3424, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.9494949494949494, |
| "grad_norm": 0.08102461126707187, |
| "learning_rate": 5.791234224734155e-08, |
| "loss": 0.3378, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.9552669552669553, |
| "grad_norm": 0.08230714376381316, |
| "learning_rate": 4.434164541714747e-08, |
| "loss": 0.344, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.961038961038961, |
| "grad_norm": 0.08355558229737302, |
| "learning_rate": 3.2579132651111566e-08, |
| "loss": 0.3362, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.966810966810967, |
| "grad_norm": 0.08254140174865716, |
| "learning_rate": 2.2625336259038955e-08, |
| "loss": 0.3418, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.9725829725829724, |
| "grad_norm": 0.08243153799305375, |
| "learning_rate": 1.4480706697668213e-08, |
| "loss": 0.3437, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.9783549783549783, |
| "grad_norm": 0.08625002296494041, |
| "learning_rate": 8.145612550327642e-09, |
| "loss": 0.3529, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.984126984126984, |
| "grad_norm": 0.08225559535813236, |
| "learning_rate": 3.620340510233078e-09, |
| "loss": 0.3441, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.98989898989899, |
| "grad_norm": 0.08106046733970695, |
| "learning_rate": 9.050953675293627e-10, |
| "loss": 0.3343, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.995670995670996, |
| "grad_norm": 0.08184675524305934, |
| "learning_rate": 0.0, |
| "loss": 0.3398, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.995670995670996, |
| "step": 519, |
| "total_flos": 1.0686069936733815e+19, |
| "train_loss": 0.4367897748258073, |
| "train_runtime": 27788.7927, |
| "train_samples_per_second": 9.571, |
| "train_steps_per_second": 0.019 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 519, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.0686069936733815e+19, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|