| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 12.30379746835443, |
| "eval_steps": 500, |
| "global_step": 117, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 5.895437431587112, |
| "learning_rate": 8.333333333333333e-07, |
| "loss": 0.8025, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 6.55011655692263, |
| "learning_rate": 1.6666666666666667e-06, |
| "loss": 0.8798, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 6.328385102296379, |
| "learning_rate": 2.5e-06, |
| "loss": 0.8668, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 5.617604720367703, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 0.8131, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 4.572290590133521, |
| "learning_rate": 4.166666666666667e-06, |
| "loss": 0.7931, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 2.817935556711524, |
| "learning_rate": 5e-06, |
| "loss": 0.8143, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 2.343065328802187, |
| "learning_rate": 5.833333333333334e-06, |
| "loss": 0.7461, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 4.030076959055218, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.7837, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 4.116892800507118, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.7347, |
| "step": 9 |
| }, |
| { |
| "epoch": 1.0506329113924051, |
| "grad_norm": 6.010861822444835, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 1.0788, |
| "step": 10 |
| }, |
| { |
| "epoch": 1.1518987341772151, |
| "grad_norm": 4.193504321170482, |
| "learning_rate": 9.166666666666666e-06, |
| "loss": 0.7081, |
| "step": 11 |
| }, |
| { |
| "epoch": 1.2531645569620253, |
| "grad_norm": 4.348430450002865, |
| "learning_rate": 1e-05, |
| "loss": 0.7532, |
| "step": 12 |
| }, |
| { |
| "epoch": 1.3544303797468356, |
| "grad_norm": 2.5333157696365998, |
| "learning_rate": 9.997762161417517e-06, |
| "loss": 0.6739, |
| "step": 13 |
| }, |
| { |
| "epoch": 1.4556962025316456, |
| "grad_norm": 1.8353886386232434, |
| "learning_rate": 9.991050648838676e-06, |
| "loss": 0.6103, |
| "step": 14 |
| }, |
| { |
| "epoch": 1.5569620253164556, |
| "grad_norm": 2.372649173717505, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.6793, |
| "step": 15 |
| }, |
| { |
| "epoch": 1.6582278481012658, |
| "grad_norm": 2.1879699813546067, |
| "learning_rate": 9.964234631709188e-06, |
| "loss": 0.5917, |
| "step": 16 |
| }, |
| { |
| "epoch": 1.759493670886076, |
| "grad_norm": 1.2387967220243845, |
| "learning_rate": 9.944154131125643e-06, |
| "loss": 0.5119, |
| "step": 17 |
| }, |
| { |
| "epoch": 1.8607594936708862, |
| "grad_norm": 1.3155704193489053, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.7505, |
| "step": 18 |
| }, |
| { |
| "epoch": 1.9620253164556962, |
| "grad_norm": 1.9861419098496649, |
| "learning_rate": 9.890738003669029e-06, |
| "loss": 0.8391, |
| "step": 19 |
| }, |
| { |
| "epoch": 2.1012658227848102, |
| "grad_norm": 1.2872463511211474, |
| "learning_rate": 9.857450191464337e-06, |
| "loss": 0.5918, |
| "step": 20 |
| }, |
| { |
| "epoch": 2.2025316455696204, |
| "grad_norm": 1.0115181200061254, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.6163, |
| "step": 21 |
| }, |
| { |
| "epoch": 2.3037974683544302, |
| "grad_norm": 0.9102784864819136, |
| "learning_rate": 9.777864028930705e-06, |
| "loss": 0.537, |
| "step": 22 |
| }, |
| { |
| "epoch": 2.4050632911392404, |
| "grad_norm": 0.7723417034750111, |
| "learning_rate": 9.731636918995821e-06, |
| "loss": 0.5367, |
| "step": 23 |
| }, |
| { |
| "epoch": 2.5063291139240507, |
| "grad_norm": 0.8783764489730859, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.5329, |
| "step": 24 |
| }, |
| { |
| "epoch": 2.607594936708861, |
| "grad_norm": 0.8782262047190333, |
| "learning_rate": 9.626521502369984e-06, |
| "loss": 0.5452, |
| "step": 25 |
| }, |
| { |
| "epoch": 2.708860759493671, |
| "grad_norm": 0.7389522197168812, |
| "learning_rate": 9.567727288213005e-06, |
| "loss": 0.4942, |
| "step": 26 |
| }, |
| { |
| "epoch": 2.810126582278481, |
| "grad_norm": 0.8121085712863032, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.4927, |
| "step": 27 |
| }, |
| { |
| "epoch": 2.911392405063291, |
| "grad_norm": 0.7618741438884619, |
| "learning_rate": 9.437928945022772e-06, |
| "loss": 0.5312, |
| "step": 28 |
| }, |
| { |
| "epoch": 3.050632911392405, |
| "grad_norm": 1.2718116583571646, |
| "learning_rate": 9.36704100308565e-06, |
| "loss": 0.7125, |
| "step": 29 |
| }, |
| { |
| "epoch": 3.151898734177215, |
| "grad_norm": 0.6500980617895746, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.4407, |
| "step": 30 |
| }, |
| { |
| "epoch": 3.2531645569620253, |
| "grad_norm": 0.8261449456501613, |
| "learning_rate": 9.213604793270196e-06, |
| "loss": 0.5216, |
| "step": 31 |
| }, |
| { |
| "epoch": 3.3544303797468356, |
| "grad_norm": 0.7149726839502812, |
| "learning_rate": 9.131193871579975e-06, |
| "loss": 0.4257, |
| "step": 32 |
| }, |
| { |
| "epoch": 3.4556962025316453, |
| "grad_norm": 0.7287392642072873, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.4487, |
| "step": 33 |
| }, |
| { |
| "epoch": 3.5569620253164556, |
| "grad_norm": 0.6926625472743289, |
| "learning_rate": 8.955355173281709e-06, |
| "loss": 0.4269, |
| "step": 34 |
| }, |
| { |
| "epoch": 3.6582278481012658, |
| "grad_norm": 0.7430355525518108, |
| "learning_rate": 8.862084796122998e-06, |
| "loss": 0.4594, |
| "step": 35 |
| }, |
| { |
| "epoch": 3.759493670886076, |
| "grad_norm": 0.6951675618053621, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.4411, |
| "step": 36 |
| }, |
| { |
| "epoch": 3.8607594936708862, |
| "grad_norm": 0.8131115512274725, |
| "learning_rate": 8.665259359149132e-06, |
| "loss": 0.4851, |
| "step": 37 |
| }, |
| { |
| "epoch": 3.962025316455696, |
| "grad_norm": 0.9746912551804889, |
| "learning_rate": 8.561880484756726e-06, |
| "loss": 0.6522, |
| "step": 38 |
| }, |
| { |
| "epoch": 4.10126582278481, |
| "grad_norm": 0.6135819521316205, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.3605, |
| "step": 39 |
| }, |
| { |
| "epoch": 4.2025316455696204, |
| "grad_norm": 0.771617070195418, |
| "learning_rate": 8.345653031794292e-06, |
| "loss": 0.3884, |
| "step": 40 |
| }, |
| { |
| "epoch": 4.30379746835443, |
| "grad_norm": 0.7403480974516429, |
| "learning_rate": 8.232998006078998e-06, |
| "loss": 0.3899, |
| "step": 41 |
| }, |
| { |
| "epoch": 4.405063291139241, |
| "grad_norm": 0.6487685272560921, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.3628, |
| "step": 42 |
| }, |
| { |
| "epoch": 4.506329113924051, |
| "grad_norm": 0.6814856632845298, |
| "learning_rate": 7.99910947343957e-06, |
| "loss": 0.369, |
| "step": 43 |
| }, |
| { |
| "epoch": 4.6075949367088604, |
| "grad_norm": 0.7543767184996715, |
| "learning_rate": 7.87808532842837e-06, |
| "loss": 0.3729, |
| "step": 44 |
| }, |
| { |
| "epoch": 4.708860759493671, |
| "grad_norm": 0.6844089474834384, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.3336, |
| "step": 45 |
| }, |
| { |
| "epoch": 4.810126582278481, |
| "grad_norm": 0.7901863608944344, |
| "learning_rate": 7.628418849052523e-06, |
| "loss": 0.371, |
| "step": 46 |
| }, |
| { |
| "epoch": 4.911392405063291, |
| "grad_norm": 0.6753366404746309, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.3695, |
| "step": 47 |
| }, |
| { |
| "epoch": 5.050632911392405, |
| "grad_norm": 0.9374564193170443, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.4896, |
| "step": 48 |
| }, |
| { |
| "epoch": 5.151898734177215, |
| "grad_norm": 0.6734302320117497, |
| "learning_rate": 7.236565741578163e-06, |
| "loss": 0.3232, |
| "step": 49 |
| }, |
| { |
| "epoch": 5.253164556962025, |
| "grad_norm": 0.5853443311927671, |
| "learning_rate": 7.101786141547829e-06, |
| "loss": 0.2417, |
| "step": 50 |
| }, |
| { |
| "epoch": 5.3544303797468356, |
| "grad_norm": 0.8431653133600792, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.3806, |
| "step": 51 |
| }, |
| { |
| "epoch": 5.455696202531645, |
| "grad_norm": 0.6063521346545561, |
| "learning_rate": 6.8267051218319766e-06, |
| "loss": 0.2492, |
| "step": 52 |
| }, |
| { |
| "epoch": 5.556962025316456, |
| "grad_norm": 0.6633606297212277, |
| "learning_rate": 6.686649936914151e-06, |
| "loss": 0.3119, |
| "step": 53 |
| }, |
| { |
| "epoch": 5.658227848101266, |
| "grad_norm": 0.6688638883108692, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.2968, |
| "step": 54 |
| }, |
| { |
| "epoch": 5.759493670886076, |
| "grad_norm": 0.6424424076730402, |
| "learning_rate": 6.402136946530014e-06, |
| "loss": 0.2951, |
| "step": 55 |
| }, |
| { |
| "epoch": 5.860759493670886, |
| "grad_norm": 0.5924677605310528, |
| "learning_rate": 6.257933818722544e-06, |
| "loss": 0.2551, |
| "step": 56 |
| }, |
| { |
| "epoch": 5.962025316455696, |
| "grad_norm": 0.9869794559950298, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.4196, |
| "step": 57 |
| }, |
| { |
| "epoch": 6.10126582278481, |
| "grad_norm": 0.6499484367851245, |
| "learning_rate": 5.9662795889777666e-06, |
| "loss": 0.2624, |
| "step": 58 |
| }, |
| { |
| "epoch": 6.2025316455696204, |
| "grad_norm": 0.6415770304364082, |
| "learning_rate": 5.819089557075689e-06, |
| "loss": 0.2329, |
| "step": 59 |
| }, |
| { |
| "epoch": 6.30379746835443, |
| "grad_norm": 0.8307170104602447, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.2311, |
| "step": 60 |
| }, |
| { |
| "epoch": 6.405063291139241, |
| "grad_norm": 0.7447130507587343, |
| "learning_rate": 5.522642316338268e-06, |
| "loss": 0.2186, |
| "step": 61 |
| }, |
| { |
| "epoch": 6.506329113924051, |
| "grad_norm": 0.5778382545568016, |
| "learning_rate": 5.373650467932122e-06, |
| "loss": 0.2146, |
| "step": 62 |
| }, |
| { |
| "epoch": 6.6075949367088604, |
| "grad_norm": 0.7052886016028709, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.2163, |
| "step": 63 |
| }, |
| { |
| "epoch": 6.708860759493671, |
| "grad_norm": 0.7154994863489477, |
| "learning_rate": 5.074797035076319e-06, |
| "loss": 0.2283, |
| "step": 64 |
| }, |
| { |
| "epoch": 6.810126582278481, |
| "grad_norm": 0.643992126658529, |
| "learning_rate": 4.9252029649236835e-06, |
| "loss": 0.221, |
| "step": 65 |
| }, |
| { |
| "epoch": 6.911392405063291, |
| "grad_norm": 0.6724133134564504, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.2085, |
| "step": 66 |
| }, |
| { |
| "epoch": 7.050632911392405, |
| "grad_norm": 1.160211427627962, |
| "learning_rate": 4.626349532067879e-06, |
| "loss": 0.3412, |
| "step": 67 |
| }, |
| { |
| "epoch": 7.151898734177215, |
| "grad_norm": 0.6780483064743578, |
| "learning_rate": 4.477357683661734e-06, |
| "loss": 0.1891, |
| "step": 68 |
| }, |
| { |
| "epoch": 7.253164556962025, |
| "grad_norm": 0.780535064628723, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.1885, |
| "step": 69 |
| }, |
| { |
| "epoch": 7.3544303797468356, |
| "grad_norm": 0.5480518449729264, |
| "learning_rate": 4.180910442924312e-06, |
| "loss": 0.1405, |
| "step": 70 |
| }, |
| { |
| "epoch": 7.455696202531645, |
| "grad_norm": 0.8075066487029843, |
| "learning_rate": 4.033720411022235e-06, |
| "loss": 0.1995, |
| "step": 71 |
| }, |
| { |
| "epoch": 7.556962025316456, |
| "grad_norm": 0.956446937600229, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.1708, |
| "step": 72 |
| }, |
| { |
| "epoch": 7.658227848101266, |
| "grad_norm": 0.510378350157666, |
| "learning_rate": 3.7420661812774577e-06, |
| "loss": 0.131, |
| "step": 73 |
| }, |
| { |
| "epoch": 7.759493670886076, |
| "grad_norm": 0.656821227766225, |
| "learning_rate": 3.5978630534699873e-06, |
| "loss": 0.1576, |
| "step": 74 |
| }, |
| { |
| "epoch": 7.860759493670886, |
| "grad_norm": 0.7136751454302225, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.1818, |
| "step": 75 |
| }, |
| { |
| "epoch": 7.962025316455696, |
| "grad_norm": 1.1209877010915406, |
| "learning_rate": 3.3133500630858507e-06, |
| "loss": 0.2423, |
| "step": 76 |
| }, |
| { |
| "epoch": 8.10126582278481, |
| "grad_norm": 0.6899721837091559, |
| "learning_rate": 3.173294878168025e-06, |
| "loss": 0.1412, |
| "step": 77 |
| }, |
| { |
| "epoch": 8.20253164556962, |
| "grad_norm": 0.5290128885245698, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.1172, |
| "step": 78 |
| }, |
| { |
| "epoch": 8.30379746835443, |
| "grad_norm": 0.9518167488224024, |
| "learning_rate": 2.8982138584521734e-06, |
| "loss": 0.1349, |
| "step": 79 |
| }, |
| { |
| "epoch": 8.405063291139241, |
| "grad_norm": 0.7551500813610078, |
| "learning_rate": 2.7634342584218364e-06, |
| "loss": 0.1489, |
| "step": 80 |
| }, |
| { |
| "epoch": 8.50632911392405, |
| "grad_norm": 0.5985052847049391, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.1203, |
| "step": 81 |
| }, |
| { |
| "epoch": 8.60759493670886, |
| "grad_norm": 0.5751197656242599, |
| "learning_rate": 2.5000000000000015e-06, |
| "loss": 0.1245, |
| "step": 82 |
| }, |
| { |
| "epoch": 8.708860759493671, |
| "grad_norm": 0.6383274566722233, |
| "learning_rate": 2.371581150947476e-06, |
| "loss": 0.1496, |
| "step": 83 |
| }, |
| { |
| "epoch": 8.810126582278482, |
| "grad_norm": 0.5723593804098357, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.1352, |
| "step": 84 |
| }, |
| { |
| "epoch": 8.91139240506329, |
| "grad_norm": 0.6677978416090448, |
| "learning_rate": 2.1219146715716332e-06, |
| "loss": 0.1453, |
| "step": 85 |
| }, |
| { |
| "epoch": 9.050632911392405, |
| "grad_norm": 0.8960387547513665, |
| "learning_rate": 2.0008905265604316e-06, |
| "loss": 0.1754, |
| "step": 86 |
| }, |
| { |
| "epoch": 9.151898734177216, |
| "grad_norm": 0.6722939232433549, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.1084, |
| "step": 87 |
| }, |
| { |
| "epoch": 9.253164556962025, |
| "grad_norm": 0.4394195937877518, |
| "learning_rate": 1.7670019939210025e-06, |
| "loss": 0.0788, |
| "step": 88 |
| }, |
| { |
| "epoch": 9.354430379746836, |
| "grad_norm": 0.5305089443300481, |
| "learning_rate": 1.6543469682057105e-06, |
| "loss": 0.1111, |
| "step": 89 |
| }, |
| { |
| "epoch": 9.455696202531646, |
| "grad_norm": 0.6465935413812988, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.1283, |
| "step": 90 |
| }, |
| { |
| "epoch": 9.556962025316455, |
| "grad_norm": 0.6819902452141822, |
| "learning_rate": 1.438119515243277e-06, |
| "loss": 0.0843, |
| "step": 91 |
| }, |
| { |
| "epoch": 9.658227848101266, |
| "grad_norm": 0.6373388779124766, |
| "learning_rate": 1.3347406408508695e-06, |
| "loss": 0.1159, |
| "step": 92 |
| }, |
| { |
| "epoch": 9.759493670886076, |
| "grad_norm": 0.5123439890548894, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.0894, |
| "step": 93 |
| }, |
| { |
| "epoch": 9.860759493670885, |
| "grad_norm": 0.5860372625400543, |
| "learning_rate": 1.137915203877003e-06, |
| "loss": 0.1411, |
| "step": 94 |
| }, |
| { |
| "epoch": 9.962025316455696, |
| "grad_norm": 0.7835229137826922, |
| "learning_rate": 1.044644826718295e-06, |
| "loss": 0.1371, |
| "step": 95 |
| }, |
| { |
| "epoch": 10.10126582278481, |
| "grad_norm": 0.635322382492641, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.0946, |
| "step": 96 |
| }, |
| { |
| "epoch": 10.20253164556962, |
| "grad_norm": 0.5697842838240275, |
| "learning_rate": 8.688061284200266e-07, |
| "loss": 0.0856, |
| "step": 97 |
| }, |
| { |
| "epoch": 10.30379746835443, |
| "grad_norm": 0.5900259121677638, |
| "learning_rate": 7.863952067298042e-07, |
| "loss": 0.1062, |
| "step": 98 |
| }, |
| { |
| "epoch": 10.405063291139241, |
| "grad_norm": 0.5655488810994538, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.1031, |
| "step": 99 |
| }, |
| { |
| "epoch": 10.50632911392405, |
| "grad_norm": 0.44428290243971363, |
| "learning_rate": 6.329589969143518e-07, |
| "loss": 0.0913, |
| "step": 100 |
| }, |
| { |
| "epoch": 10.60759493670886, |
| "grad_norm": 0.4300060717644238, |
| "learning_rate": 5.620710549772295e-07, |
| "loss": 0.0987, |
| "step": 101 |
| }, |
| { |
| "epoch": 10.708860759493671, |
| "grad_norm": 0.48971617650201743, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.0811, |
| "step": 102 |
| }, |
| { |
| "epoch": 10.810126582278482, |
| "grad_norm": 0.4687521218771687, |
| "learning_rate": 4.322727117869951e-07, |
| "loss": 0.0858, |
| "step": 103 |
| }, |
| { |
| "epoch": 10.91139240506329, |
| "grad_norm": 0.5160899848695946, |
| "learning_rate": 3.734784976300165e-07, |
| "loss": 0.0838, |
| "step": 104 |
| }, |
| { |
| "epoch": 11.050632911392405, |
| "grad_norm": 0.9201523755739287, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.1569, |
| "step": 105 |
| }, |
| { |
| "epoch": 11.151898734177216, |
| "grad_norm": 0.4215647228430539, |
| "learning_rate": 2.6836308100417874e-07, |
| "loss": 0.0927, |
| "step": 106 |
| }, |
| { |
| "epoch": 11.253164556962025, |
| "grad_norm": 0.4270383042089781, |
| "learning_rate": 2.2213597106929608e-07, |
| "loss": 0.0738, |
| "step": 107 |
| }, |
| { |
| "epoch": 11.354430379746836, |
| "grad_norm": 0.4144363930867899, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.0712, |
| "step": 108 |
| }, |
| { |
| "epoch": 11.455696202531646, |
| "grad_norm": 0.3748170921055616, |
| "learning_rate": 1.4254980853566248e-07, |
| "loss": 0.0759, |
| "step": 109 |
| }, |
| { |
| "epoch": 11.556962025316455, |
| "grad_norm": 0.4568040505231358, |
| "learning_rate": 1.0926199633097156e-07, |
| "loss": 0.0889, |
| "step": 110 |
| }, |
| { |
| "epoch": 11.658227848101266, |
| "grad_norm": 0.3600441942152769, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.0651, |
| "step": 111 |
| }, |
| { |
| "epoch": 11.759493670886076, |
| "grad_norm": 0.4451833224686696, |
| "learning_rate": 5.584586887435739e-08, |
| "loss": 0.0981, |
| "step": 112 |
| }, |
| { |
| "epoch": 11.860759493670885, |
| "grad_norm": 0.42144624506197254, |
| "learning_rate": 3.576536829081323e-08, |
| "loss": 0.0997, |
| "step": 113 |
| }, |
| { |
| "epoch": 11.962025316455696, |
| "grad_norm": 0.8927133255766454, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.1527, |
| "step": 114 |
| }, |
| { |
| "epoch": 12.10126582278481, |
| "grad_norm": 0.38943910408029747, |
| "learning_rate": 8.949351161324227e-09, |
| "loss": 0.0872, |
| "step": 115 |
| }, |
| { |
| "epoch": 12.20253164556962, |
| "grad_norm": 0.40225895597156897, |
| "learning_rate": 2.237838582483387e-09, |
| "loss": 0.0769, |
| "step": 116 |
| }, |
| { |
| "epoch": 12.30379746835443, |
| "grad_norm": 0.3875069928114505, |
| "learning_rate": 0.0, |
| "loss": 0.1017, |
| "step": 117 |
| }, |
| { |
| "epoch": 12.30379746835443, |
| "step": 117, |
| "total_flos": 8.19186747005993e+16, |
| "train_loss": 0.33621527120853084, |
| "train_runtime": 3901.6974, |
| "train_samples_per_second": 1.053, |
| "train_steps_per_second": 0.03 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 117, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 13, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 8.19186747005993e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|