| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.39564787339268054, |
| "eval_steps": 500, |
| "global_step": 100, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.003956478733926805, |
| "grad_norm": 23.302045822143555, |
| "learning_rate": 0.0, |
| "loss": 16.2961, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00791295746785361, |
| "grad_norm": 27.37419319152832, |
| "learning_rate": 4e-05, |
| "loss": 18.4332, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.011869436201780416, |
| "grad_norm": 27.051715850830078, |
| "learning_rate": 8e-05, |
| "loss": 18.1618, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01582591493570722, |
| "grad_norm": 25.241186141967773, |
| "learning_rate": 0.00012, |
| "loss": 15.8617, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.019782393669634024, |
| "grad_norm": 19.574819564819336, |
| "learning_rate": 0.00016, |
| "loss": 12.3903, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02373887240356083, |
| "grad_norm": 14.805221557617188, |
| "learning_rate": 0.0002, |
| "loss": 11.0495, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.027695351137487636, |
| "grad_norm": 8.009269714355469, |
| "learning_rate": 0.00019789473684210526, |
| "loss": 8.2391, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03165182987141444, |
| "grad_norm": 6.785006523132324, |
| "learning_rate": 0.00019578947368421054, |
| "loss": 7.9713, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03560830860534125, |
| "grad_norm": 6.116010665893555, |
| "learning_rate": 0.0001936842105263158, |
| "loss": 7.2536, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03956478733926805, |
| "grad_norm": 7.095190048217773, |
| "learning_rate": 0.00019157894736842104, |
| "loss": 7.2029, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.043521266073194856, |
| "grad_norm": 6.16122579574585, |
| "learning_rate": 0.00018947368421052632, |
| "loss": 6.6535, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04747774480712166, |
| "grad_norm": 5.9373626708984375, |
| "learning_rate": 0.0001873684210526316, |
| "loss": 6.6532, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.051434223541048464, |
| "grad_norm": 3.409684896469116, |
| "learning_rate": 0.00018526315789473685, |
| "loss": 6.1402, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05539070227497527, |
| "grad_norm": 2.748903512954712, |
| "learning_rate": 0.0001831578947368421, |
| "loss": 5.544, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.05934718100890208, |
| "grad_norm": 2.9191884994506836, |
| "learning_rate": 0.00018105263157894739, |
| "loss": 6.5131, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06330365974282888, |
| "grad_norm": 2.935926675796509, |
| "learning_rate": 0.00017894736842105264, |
| "loss": 5.0912, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06726013847675569, |
| "grad_norm": 2.6484129428863525, |
| "learning_rate": 0.0001768421052631579, |
| "loss": 5.4763, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0712166172106825, |
| "grad_norm": 2.2135961055755615, |
| "learning_rate": 0.00017473684210526317, |
| "loss": 5.1443, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0751730959446093, |
| "grad_norm": 2.2263529300689697, |
| "learning_rate": 0.00017263157894736842, |
| "loss": 5.6504, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0791295746785361, |
| "grad_norm": 2.599684476852417, |
| "learning_rate": 0.0001705263157894737, |
| "loss": 4.7292, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0830860534124629, |
| "grad_norm": 2.9603826999664307, |
| "learning_rate": 0.00016842105263157895, |
| "loss": 5.4304, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08704253214638971, |
| "grad_norm": 2.178344249725342, |
| "learning_rate": 0.00016631578947368423, |
| "loss": 6.4425, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09099901088031652, |
| "grad_norm": 2.872128963470459, |
| "learning_rate": 0.00016421052631578948, |
| "loss": 5.4385, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09495548961424333, |
| "grad_norm": 2.738940715789795, |
| "learning_rate": 0.00016210526315789473, |
| "loss": 5.4402, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.09891196834817013, |
| "grad_norm": 3.214587688446045, |
| "learning_rate": 0.00016, |
| "loss": 4.7338, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10286844708209693, |
| "grad_norm": 2.9738221168518066, |
| "learning_rate": 0.00015789473684210527, |
| "loss": 5.4748, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10682492581602374, |
| "grad_norm": 3.050741195678711, |
| "learning_rate": 0.00015578947368421052, |
| "loss": 5.1201, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11078140454995054, |
| "grad_norm": 3.187633991241455, |
| "learning_rate": 0.0001536842105263158, |
| "loss": 5.3061, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11473788328387735, |
| "grad_norm": 2.965158700942993, |
| "learning_rate": 0.00015157894736842108, |
| "loss": 4.9141, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.11869436201780416, |
| "grad_norm": 2.6440789699554443, |
| "learning_rate": 0.00014947368421052633, |
| "loss": 4.4023, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12265084075173097, |
| "grad_norm": 4.033472061157227, |
| "learning_rate": 0.00014736842105263158, |
| "loss": 7.4582, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12660731948565776, |
| "grad_norm": 3.3675754070281982, |
| "learning_rate": 0.00014526315789473686, |
| "loss": 4.6167, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13056379821958458, |
| "grad_norm": 3.0074822902679443, |
| "learning_rate": 0.0001431578947368421, |
| "loss": 4.8099, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.13452027695351138, |
| "grad_norm": 2.872875690460205, |
| "learning_rate": 0.00014105263157894736, |
| "loss": 4.8146, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.13847675568743817, |
| "grad_norm": 3.0648305416107178, |
| "learning_rate": 0.00013894736842105264, |
| "loss": 5.9663, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.142433234421365, |
| "grad_norm": 3.0596470832824707, |
| "learning_rate": 0.0001368421052631579, |
| "loss": 4.8893, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.14638971315529178, |
| "grad_norm": 3.099954843521118, |
| "learning_rate": 0.00013473684210526317, |
| "loss": 5.9515, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1503461918892186, |
| "grad_norm": 2.4886155128479004, |
| "learning_rate": 0.00013263157894736842, |
| "loss": 4.1192, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1543026706231454, |
| "grad_norm": 3.0190577507019043, |
| "learning_rate": 0.0001305263157894737, |
| "loss": 6.2374, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1582591493570722, |
| "grad_norm": 3.001729726791382, |
| "learning_rate": 0.00012842105263157895, |
| "loss": 5.051, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.16221562809099901, |
| "grad_norm": 3.3806393146514893, |
| "learning_rate": 0.0001263157894736842, |
| "loss": 5.2736, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1661721068249258, |
| "grad_norm": 3.381579637527466, |
| "learning_rate": 0.00012421052631578949, |
| "loss": 5.5759, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17012858555885263, |
| "grad_norm": 3.010310411453247, |
| "learning_rate": 0.00012210526315789474, |
| "loss": 5.106, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17408506429277942, |
| "grad_norm": 2.5623176097869873, |
| "learning_rate": 0.00012, |
| "loss": 5.5447, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.17804154302670624, |
| "grad_norm": 3.0234758853912354, |
| "learning_rate": 0.00011789473684210525, |
| "loss": 4.3962, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18199802176063304, |
| "grad_norm": 2.936678886413574, |
| "learning_rate": 0.00011578947368421053, |
| "loss": 5.5828, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.18595450049455983, |
| "grad_norm": 3.5103631019592285, |
| "learning_rate": 0.0001136842105263158, |
| "loss": 4.3974, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.18991097922848665, |
| "grad_norm": 3.447932720184326, |
| "learning_rate": 0.00011157894736842105, |
| "loss": 5.7814, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19386745796241345, |
| "grad_norm": 2.6578755378723145, |
| "learning_rate": 0.00010947368421052633, |
| "loss": 4.732, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.19782393669634027, |
| "grad_norm": 2.9903833866119385, |
| "learning_rate": 0.00010736842105263158, |
| "loss": 4.3962, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20178041543026706, |
| "grad_norm": 2.790576696395874, |
| "learning_rate": 0.00010526315789473685, |
| "loss": 4.8403, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.20573689416419386, |
| "grad_norm": 3.302182912826538, |
| "learning_rate": 0.00010315789473684211, |
| "loss": 4.607, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.20969337289812068, |
| "grad_norm": 3.1099631786346436, |
| "learning_rate": 0.00010105263157894738, |
| "loss": 4.9049, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21364985163204747, |
| "grad_norm": 3.343353509902954, |
| "learning_rate": 9.894736842105263e-05, |
| "loss": 5.6211, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2176063303659743, |
| "grad_norm": 4.264265060424805, |
| "learning_rate": 9.68421052631579e-05, |
| "loss": 6.1449, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2215628090999011, |
| "grad_norm": 2.6139657497406006, |
| "learning_rate": 9.473684210526316e-05, |
| "loss": 5.3725, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.22551928783382788, |
| "grad_norm": 3.1093761920928955, |
| "learning_rate": 9.263157894736843e-05, |
| "loss": 5.158, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.2294757665677547, |
| "grad_norm": 2.8728318214416504, |
| "learning_rate": 9.052631578947369e-05, |
| "loss": 5.2214, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2334322453016815, |
| "grad_norm": 3.1342532634735107, |
| "learning_rate": 8.842105263157894e-05, |
| "loss": 6.7797, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.23738872403560832, |
| "grad_norm": 2.9147534370422363, |
| "learning_rate": 8.631578947368421e-05, |
| "loss": 4.9591, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2413452027695351, |
| "grad_norm": 2.71091365814209, |
| "learning_rate": 8.421052631578948e-05, |
| "loss": 5.5919, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.24530168150346193, |
| "grad_norm": 4.9715681076049805, |
| "learning_rate": 8.210526315789474e-05, |
| "loss": 7.9325, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.24925816023738873, |
| "grad_norm": 3.243007183074951, |
| "learning_rate": 8e-05, |
| "loss": 4.9809, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2532146389713155, |
| "grad_norm": 2.425530433654785, |
| "learning_rate": 7.789473684210526e-05, |
| "loss": 4.7267, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.2571711177052423, |
| "grad_norm": 2.584341526031494, |
| "learning_rate": 7.578947368421054e-05, |
| "loss": 5.1655, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26112759643916916, |
| "grad_norm": 2.864600658416748, |
| "learning_rate": 7.368421052631579e-05, |
| "loss": 4.7094, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.26508407517309596, |
| "grad_norm": 2.7184255123138428, |
| "learning_rate": 7.157894736842105e-05, |
| "loss": 5.0806, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.26904055390702275, |
| "grad_norm": 3.3327622413635254, |
| "learning_rate": 6.947368421052632e-05, |
| "loss": 4.8204, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.27299703264094954, |
| "grad_norm": 2.6390984058380127, |
| "learning_rate": 6.736842105263159e-05, |
| "loss": 4.4369, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.27695351137487634, |
| "grad_norm": 2.8620619773864746, |
| "learning_rate": 6.526315789473685e-05, |
| "loss": 4.8152, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2809099901088032, |
| "grad_norm": 2.886209011077881, |
| "learning_rate": 6.31578947368421e-05, |
| "loss": 4.5341, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.28486646884273, |
| "grad_norm": 3.60684871673584, |
| "learning_rate": 6.105263157894737e-05, |
| "loss": 5.147, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2888229475766568, |
| "grad_norm": 2.6427412033081055, |
| "learning_rate": 5.894736842105263e-05, |
| "loss": 5.1367, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.29277942631058357, |
| "grad_norm": 3.7594962120056152, |
| "learning_rate": 5.68421052631579e-05, |
| "loss": 5.821, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.29673590504451036, |
| "grad_norm": 2.2990944385528564, |
| "learning_rate": 5.4736842105263165e-05, |
| "loss": 4.6194, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3006923837784372, |
| "grad_norm": 2.6638574600219727, |
| "learning_rate": 5.2631578947368424e-05, |
| "loss": 5.8379, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.304648862512364, |
| "grad_norm": 3.256946086883545, |
| "learning_rate": 5.052631578947369e-05, |
| "loss": 5.1437, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3086053412462908, |
| "grad_norm": 2.8275022506713867, |
| "learning_rate": 4.842105263157895e-05, |
| "loss": 5.0719, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3125618199802176, |
| "grad_norm": 2.6694302558898926, |
| "learning_rate": 4.6315789473684214e-05, |
| "loss": 4.9186, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3165182987141444, |
| "grad_norm": 3.2759788036346436, |
| "learning_rate": 4.421052631578947e-05, |
| "loss": 4.7457, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.32047477744807124, |
| "grad_norm": 3.3160855770111084, |
| "learning_rate": 4.210526315789474e-05, |
| "loss": 4.2814, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.32443125618199803, |
| "grad_norm": 2.8146486282348633, |
| "learning_rate": 4e-05, |
| "loss": 4.8129, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3283877349159248, |
| "grad_norm": 2.767481565475464, |
| "learning_rate": 3.789473684210527e-05, |
| "loss": 5.5585, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3323442136498516, |
| "grad_norm": 3.330084800720215, |
| "learning_rate": 3.578947368421053e-05, |
| "loss": 5.271, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.3363006923837784, |
| "grad_norm": 2.786287546157837, |
| "learning_rate": 3.368421052631579e-05, |
| "loss": 4.7267, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.34025717111770526, |
| "grad_norm": 3.373779296875, |
| "learning_rate": 3.157894736842105e-05, |
| "loss": 5.3886, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.34421364985163205, |
| "grad_norm": 2.731201171875, |
| "learning_rate": 2.9473684210526314e-05, |
| "loss": 5.1014, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.34817012858555885, |
| "grad_norm": 3.213326930999756, |
| "learning_rate": 2.7368421052631583e-05, |
| "loss": 5.2954, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.35212660731948564, |
| "grad_norm": 2.9587507247924805, |
| "learning_rate": 2.5263157894736845e-05, |
| "loss": 4.9975, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.3560830860534125, |
| "grad_norm": 2.7448644638061523, |
| "learning_rate": 2.3157894736842107e-05, |
| "loss": 4.8342, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.3600395647873393, |
| "grad_norm": 3.670548915863037, |
| "learning_rate": 2.105263157894737e-05, |
| "loss": 5.6434, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.3639960435212661, |
| "grad_norm": 3.459158182144165, |
| "learning_rate": 1.8947368421052634e-05, |
| "loss": 4.0876, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.36795252225519287, |
| "grad_norm": 2.871955633163452, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 5.4301, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.37190900098911966, |
| "grad_norm": 3.1002166271209717, |
| "learning_rate": 1.4736842105263157e-05, |
| "loss": 4.7507, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3758654797230465, |
| "grad_norm": 2.540811061859131, |
| "learning_rate": 1.2631578947368422e-05, |
| "loss": 4.8875, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3798219584569733, |
| "grad_norm": 3.154196262359619, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 5.8064, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3837784371909001, |
| "grad_norm": 2.2360923290252686, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 5.1602, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3877349159248269, |
| "grad_norm": 4.483743667602539, |
| "learning_rate": 6.315789473684211e-06, |
| "loss": 6.253, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.3916913946587537, |
| "grad_norm": 2.8893954753875732, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 4.7195, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.39564787339268054, |
| "grad_norm": 3.337634325027466, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 5.5186, |
| "step": 100 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 100, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2480523648539904.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|