| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 508, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.019714144898965006, |
| "grad_norm": 2.673164129257202, |
| "learning_rate": 1.3114754098360657e-06, |
| "loss": 3.8588, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03942828979793001, |
| "grad_norm": 2.886237621307373, |
| "learning_rate": 2.9508196721311478e-06, |
| "loss": 3.8163, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05914243469689502, |
| "grad_norm": 2.4650022983551025, |
| "learning_rate": 4.59016393442623e-06, |
| "loss": 3.7443, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07885657959586002, |
| "grad_norm": 2.4647622108459473, |
| "learning_rate": 6.229508196721312e-06, |
| "loss": 3.7573, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09857072449482504, |
| "grad_norm": 2.5846645832061768, |
| "learning_rate": 7.868852459016394e-06, |
| "loss": 3.6941, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.11828486939379004, |
| "grad_norm": 2.3263237476348877, |
| "learning_rate": 9.508196721311476e-06, |
| "loss": 3.5506, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13799901429275505, |
| "grad_norm": 2.181896448135376, |
| "learning_rate": 1.1147540983606558e-05, |
| "loss": 3.4485, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.15771315919172005, |
| "grad_norm": 2.374994993209839, |
| "learning_rate": 1.2786885245901639e-05, |
| "loss": 3.269, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.17742730409068508, |
| "grad_norm": 2.852327585220337, |
| "learning_rate": 1.4426229508196722e-05, |
| "loss": 3.1914, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.19714144898965008, |
| "grad_norm": 2.855822801589966, |
| "learning_rate": 1.6065573770491805e-05, |
| "loss": 2.9482, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.21685559388861508, |
| "grad_norm": 3.2098145484924316, |
| "learning_rate": 1.7704918032786887e-05, |
| "loss": 2.7581, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.23656973878758009, |
| "grad_norm": 2.033719062805176, |
| "learning_rate": 1.934426229508197e-05, |
| "loss": 2.5988, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2562838836865451, |
| "grad_norm": 2.0503222942352295, |
| "learning_rate": 2.098360655737705e-05, |
| "loss": 2.6184, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.2759980285855101, |
| "grad_norm": 1.858931303024292, |
| "learning_rate": 2.262295081967213e-05, |
| "loss": 2.5111, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2957121734844751, |
| "grad_norm": 1.6557573080062866, |
| "learning_rate": 2.4262295081967215e-05, |
| "loss": 2.4524, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3154263183834401, |
| "grad_norm": 1.706704020500183, |
| "learning_rate": 2.5901639344262297e-05, |
| "loss": 2.4317, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3351404632824051, |
| "grad_norm": 1.4746320247650146, |
| "learning_rate": 2.754098360655738e-05, |
| "loss": 2.3915, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.35485460818137016, |
| "grad_norm": 1.7562377452850342, |
| "learning_rate": 2.9180327868852458e-05, |
| "loss": 2.3791, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.37456875308033516, |
| "grad_norm": 1.7075871229171753, |
| "learning_rate": 3.0819672131147544e-05, |
| "loss": 2.3717, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.39428289797930016, |
| "grad_norm": 1.5588061809539795, |
| "learning_rate": 3.245901639344263e-05, |
| "loss": 2.3924, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.41399704287826516, |
| "grad_norm": 1.5635536909103394, |
| "learning_rate": 3.409836065573771e-05, |
| "loss": 2.3031, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.43371118777723017, |
| "grad_norm": 1.7249213457107544, |
| "learning_rate": 3.5737704918032786e-05, |
| "loss": 2.3186, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.45342533267619517, |
| "grad_norm": 1.868545651435852, |
| "learning_rate": 3.737704918032787e-05, |
| "loss": 2.1715, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.47313947757516017, |
| "grad_norm": 1.9135463237762451, |
| "learning_rate": 3.901639344262295e-05, |
| "loss": 2.2488, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4928536224741252, |
| "grad_norm": 1.895492434501648, |
| "learning_rate": 4.0655737704918036e-05, |
| "loss": 2.2387, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5125677673730902, |
| "grad_norm": 1.767385721206665, |
| "learning_rate": 4.229508196721312e-05, |
| "loss": 2.2541, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5322819122720552, |
| "grad_norm": 2.0554378032684326, |
| "learning_rate": 4.3934426229508194e-05, |
| "loss": 2.1672, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5519960571710202, |
| "grad_norm": 2.3369956016540527, |
| "learning_rate": 4.557377049180328e-05, |
| "loss": 2.223, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5717102020699852, |
| "grad_norm": 2.0532915592193604, |
| "learning_rate": 4.7213114754098365e-05, |
| "loss": 2.1007, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5914243469689502, |
| "grad_norm": 2.2023046016693115, |
| "learning_rate": 4.885245901639344e-05, |
| "loss": 2.1027, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6111384918679152, |
| "grad_norm": 2.194356918334961, |
| "learning_rate": 5.049180327868853e-05, |
| "loss": 2.1188, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6308526367668802, |
| "grad_norm": 2.6036267280578613, |
| "learning_rate": 5.213114754098361e-05, |
| "loss": 1.9685, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6505667816658453, |
| "grad_norm": 2.6643617153167725, |
| "learning_rate": 5.3770491803278686e-05, |
| "loss": 2.0843, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6702809265648102, |
| "grad_norm": 2.3738605976104736, |
| "learning_rate": 5.540983606557377e-05, |
| "loss": 2.0489, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6899950714637753, |
| "grad_norm": 2.43137526512146, |
| "learning_rate": 5.704918032786886e-05, |
| "loss": 1.976, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7097092163627403, |
| "grad_norm": 3.0450685024261475, |
| "learning_rate": 5.868852459016394e-05, |
| "loss": 1.8117, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7294233612617053, |
| "grad_norm": 2.9214789867401123, |
| "learning_rate": 6.032786885245902e-05, |
| "loss": 1.8229, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7491375061606703, |
| "grad_norm": 2.7570457458496094, |
| "learning_rate": 6.19672131147541e-05, |
| "loss": 1.8662, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7688516510596353, |
| "grad_norm": 4.113077640533447, |
| "learning_rate": 6.360655737704918e-05, |
| "loss": 1.7928, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7885657959586003, |
| "grad_norm": 3.127991199493408, |
| "learning_rate": 6.524590163934427e-05, |
| "loss": 1.8187, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8082799408575653, |
| "grad_norm": 3.4384043216705322, |
| "learning_rate": 6.688524590163935e-05, |
| "loss": 1.7548, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8279940857565303, |
| "grad_norm": 3.2650253772735596, |
| "learning_rate": 6.852459016393443e-05, |
| "loss": 1.8635, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8477082306554953, |
| "grad_norm": 3.676208019256592, |
| "learning_rate": 7.016393442622952e-05, |
| "loss": 1.6462, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8674223755544603, |
| "grad_norm": 3.6363656520843506, |
| "learning_rate": 7.180327868852459e-05, |
| "loss": 1.6826, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8871365204534253, |
| "grad_norm": 3.487661123275757, |
| "learning_rate": 7.344262295081968e-05, |
| "loss": 1.7096, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9068506653523903, |
| "grad_norm": 4.129843235015869, |
| "learning_rate": 7.508196721311476e-05, |
| "loss": 1.6489, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9265648102513554, |
| "grad_norm": 3.7981042861938477, |
| "learning_rate": 7.672131147540984e-05, |
| "loss": 1.5388, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9462789551503203, |
| "grad_norm": 4.129542827606201, |
| "learning_rate": 7.836065573770493e-05, |
| "loss": 1.5883, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9659931000492854, |
| "grad_norm": 3.7202744483947754, |
| "learning_rate": 8e-05, |
| "loss": 1.5327, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9857072449482503, |
| "grad_norm": 5.224013805389404, |
| "learning_rate": 8.163934426229509e-05, |
| "loss": 1.5075, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.003942828979793, |
| "grad_norm": 4.3526787757873535, |
| "learning_rate": 8.327868852459016e-05, |
| "loss": 1.4976, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.023656973878758, |
| "grad_norm": 5.239654541015625, |
| "learning_rate": 8.491803278688524e-05, |
| "loss": 1.3648, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.043371118777723, |
| "grad_norm": 4.757369518280029, |
| "learning_rate": 8.655737704918033e-05, |
| "loss": 1.3312, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.063085263676688, |
| "grad_norm": 4.107004642486572, |
| "learning_rate": 8.819672131147541e-05, |
| "loss": 1.3081, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.082799408575653, |
| "grad_norm": 3.8866994380950928, |
| "learning_rate": 8.98360655737705e-05, |
| "loss": 1.3473, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.102513553474618, |
| "grad_norm": 4.225423812866211, |
| "learning_rate": 9.147540983606557e-05, |
| "loss": 1.3429, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.122227698373583, |
| "grad_norm": 4.068089485168457, |
| "learning_rate": 9.311475409836066e-05, |
| "loss": 1.3851, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.141941843272548, |
| "grad_norm": 4.4080986976623535, |
| "learning_rate": 9.475409836065574e-05, |
| "loss": 1.1531, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1616559881715132, |
| "grad_norm": 3.994210720062256, |
| "learning_rate": 9.639344262295082e-05, |
| "loss": 1.1968, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.181370133070478, |
| "grad_norm": 4.020788669586182, |
| "learning_rate": 9.803278688524591e-05, |
| "loss": 1.1876, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.201084277969443, |
| "grad_norm": 5.0161848068237305, |
| "learning_rate": 9.967213114754099e-05, |
| "loss": 1.135, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.2207984228684081, |
| "grad_norm": 4.258621692657471, |
| "learning_rate": 9.999947530447293e-05, |
| "loss": 1.0355, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2405125677673732, |
| "grad_norm": 4.326329708099365, |
| "learning_rate": 9.99973437477677e-05, |
| "loss": 1.0635, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.2602267126663382, |
| "grad_norm": 3.8772778511047363, |
| "learning_rate": 9.999357260626116e-05, |
| "loss": 1.1091, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.279940857565303, |
| "grad_norm": 4.675949573516846, |
| "learning_rate": 9.998816200362199e-05, |
| "loss": 1.0763, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.2996550024642681, |
| "grad_norm": 4.871826171875, |
| "learning_rate": 9.998111211728248e-05, |
| "loss": 1.016, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3193691473632332, |
| "grad_norm": 4.208927154541016, |
| "learning_rate": 9.99724231784326e-05, |
| "loss": 0.9334, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.339083292262198, |
| "grad_norm": 3.859124183654785, |
| "learning_rate": 9.996209547201258e-05, |
| "loss": 0.9612, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.358797437161163, |
| "grad_norm": 4.167286396026611, |
| "learning_rate": 9.99501293367034e-05, |
| "loss": 1.071, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.3785115820601281, |
| "grad_norm": 4.229668617248535, |
| "learning_rate": 9.993652516491579e-05, |
| "loss": 0.9526, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.3982257269590932, |
| "grad_norm": 6.326845645904541, |
| "learning_rate": 9.992128340277729e-05, |
| "loss": 0.8992, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4179398718580583, |
| "grad_norm": 4.119451522827148, |
| "learning_rate": 9.990440455011769e-05, |
| "loss": 0.9638, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.437654016757023, |
| "grad_norm": 4.288192272186279, |
| "learning_rate": 9.988588916045263e-05, |
| "loss": 0.849, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4573681616559881, |
| "grad_norm": 3.985097885131836, |
| "learning_rate": 9.986573784096536e-05, |
| "loss": 0.8638, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.4770823065549532, |
| "grad_norm": 3.398552894592285, |
| "learning_rate": 9.984395125248695e-05, |
| "loss": 0.8869, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.4967964514539183, |
| "grad_norm": 5.5323486328125, |
| "learning_rate": 9.982053010947455e-05, |
| "loss": 0.8194, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5165105963528833, |
| "grad_norm": 4.199021816253662, |
| "learning_rate": 9.979547517998796e-05, |
| "loss": 0.7118, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5362247412518482, |
| "grad_norm": 4.152290344238281, |
| "learning_rate": 9.976878728566443e-05, |
| "loss": 0.8509, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5559388861508132, |
| "grad_norm": 3.820864200592041, |
| "learning_rate": 9.974046730169183e-05, |
| "loss": 0.768, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.5756530310497783, |
| "grad_norm": 3.8033151626586914, |
| "learning_rate": 9.971051615677974e-05, |
| "loss": 0.7861, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.595367175948743, |
| "grad_norm": 4.548523902893066, |
| "learning_rate": 9.967893483312922e-05, |
| "loss": 0.8347, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6150813208477084, |
| "grad_norm": 3.6572163105010986, |
| "learning_rate": 9.964572436640045e-05, |
| "loss": 0.7201, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.6347954657466732, |
| "grad_norm": 3.980424404144287, |
| "learning_rate": 9.961088584567882e-05, |
| "loss": 0.7655, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6545096106456383, |
| "grad_norm": 3.2318334579467773, |
| "learning_rate": 9.95744204134392e-05, |
| "loss": 0.8285, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.6742237555446033, |
| "grad_norm": 4.426093578338623, |
| "learning_rate": 9.953632926550847e-05, |
| "loss": 0.6971, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.6939379004435682, |
| "grad_norm": 3.0529074668884277, |
| "learning_rate": 9.949661365102637e-05, |
| "loss": 0.7098, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7136520453425332, |
| "grad_norm": 3.948803424835205, |
| "learning_rate": 9.945527487240442e-05, |
| "loss": 0.7815, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7333661902414983, |
| "grad_norm": 3.2549209594726562, |
| "learning_rate": 9.941231428528332e-05, |
| "loss": 0.7385, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7530803351404631, |
| "grad_norm": 3.8000028133392334, |
| "learning_rate": 9.93677332984884e-05, |
| "loss": 0.7357, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.7727944800394284, |
| "grad_norm": 3.5841259956359863, |
| "learning_rate": 9.932153337398354e-05, |
| "loss": 0.6641, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.7925086249383932, |
| "grad_norm": 4.315596580505371, |
| "learning_rate": 9.927371602682305e-05, |
| "loss": 0.6193, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8122227698373583, |
| "grad_norm": 3.2834675312042236, |
| "learning_rate": 9.92242828251022e-05, |
| "loss": 0.5768, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8319369147363234, |
| "grad_norm": 3.127601385116577, |
| "learning_rate": 9.917323538990561e-05, |
| "loss": 0.5912, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8516510596352882, |
| "grad_norm": 3.2609682083129883, |
| "learning_rate": 9.912057539525419e-05, |
| "loss": 0.6093, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.8713652045342535, |
| "grad_norm": 3.4914708137512207, |
| "learning_rate": 9.906630456805024e-05, |
| "loss": 0.5924, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.8910793494332183, |
| "grad_norm": 3.5819380283355713, |
| "learning_rate": 9.901042468802074e-05, |
| "loss": 0.6426, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9107934943321834, |
| "grad_norm": 3.237508773803711, |
| "learning_rate": 9.89529375876591e-05, |
| "loss": 0.5933, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9305076392311484, |
| "grad_norm": 3.832315444946289, |
| "learning_rate": 9.889384515216501e-05, |
| "loss": 0.5767, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.9502217841301133, |
| "grad_norm": 3.097031593322754, |
| "learning_rate": 9.883314931938258e-05, |
| "loss": 0.5225, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.9699359290290783, |
| "grad_norm": 3.5058581829071045, |
| "learning_rate": 9.877085207973684e-05, |
| "loss": 0.5591, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.9896500739280434, |
| "grad_norm": 3.0875792503356934, |
| "learning_rate": 9.870695547616851e-05, |
| "loss": 0.537, |
| "step": 505 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 3048, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 12, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7972217130786816.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|