| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 254, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.019714144898965006, |
| "grad_norm": 2.673164129257202, |
| "learning_rate": 1.3114754098360657e-06, |
| "loss": 3.8588, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03942828979793001, |
| "grad_norm": 2.886237621307373, |
| "learning_rate": 2.9508196721311478e-06, |
| "loss": 3.8163, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05914243469689502, |
| "grad_norm": 2.4650022983551025, |
| "learning_rate": 4.59016393442623e-06, |
| "loss": 3.7443, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07885657959586002, |
| "grad_norm": 2.4647622108459473, |
| "learning_rate": 6.229508196721312e-06, |
| "loss": 3.7573, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09857072449482504, |
| "grad_norm": 2.5846645832061768, |
| "learning_rate": 7.868852459016394e-06, |
| "loss": 3.6941, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.11828486939379004, |
| "grad_norm": 2.3263237476348877, |
| "learning_rate": 9.508196721311476e-06, |
| "loss": 3.5506, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13799901429275505, |
| "grad_norm": 2.181896448135376, |
| "learning_rate": 1.1147540983606558e-05, |
| "loss": 3.4485, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.15771315919172005, |
| "grad_norm": 2.374994993209839, |
| "learning_rate": 1.2786885245901639e-05, |
| "loss": 3.269, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.17742730409068508, |
| "grad_norm": 2.852327585220337, |
| "learning_rate": 1.4426229508196722e-05, |
| "loss": 3.1914, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.19714144898965008, |
| "grad_norm": 2.855822801589966, |
| "learning_rate": 1.6065573770491805e-05, |
| "loss": 2.9482, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.21685559388861508, |
| "grad_norm": 3.2098145484924316, |
| "learning_rate": 1.7704918032786887e-05, |
| "loss": 2.7581, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.23656973878758009, |
| "grad_norm": 2.033719062805176, |
| "learning_rate": 1.934426229508197e-05, |
| "loss": 2.5988, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2562838836865451, |
| "grad_norm": 2.0503222942352295, |
| "learning_rate": 2.098360655737705e-05, |
| "loss": 2.6184, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.2759980285855101, |
| "grad_norm": 1.858931303024292, |
| "learning_rate": 2.262295081967213e-05, |
| "loss": 2.5111, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2957121734844751, |
| "grad_norm": 1.6557573080062866, |
| "learning_rate": 2.4262295081967215e-05, |
| "loss": 2.4524, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3154263183834401, |
| "grad_norm": 1.706704020500183, |
| "learning_rate": 2.5901639344262297e-05, |
| "loss": 2.4317, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3351404632824051, |
| "grad_norm": 1.4746320247650146, |
| "learning_rate": 2.754098360655738e-05, |
| "loss": 2.3915, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.35485460818137016, |
| "grad_norm": 1.7562377452850342, |
| "learning_rate": 2.9180327868852458e-05, |
| "loss": 2.3791, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.37456875308033516, |
| "grad_norm": 1.7075871229171753, |
| "learning_rate": 3.0819672131147544e-05, |
| "loss": 2.3717, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.39428289797930016, |
| "grad_norm": 1.5588061809539795, |
| "learning_rate": 3.245901639344263e-05, |
| "loss": 2.3924, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.41399704287826516, |
| "grad_norm": 1.5635536909103394, |
| "learning_rate": 3.409836065573771e-05, |
| "loss": 2.3031, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.43371118777723017, |
| "grad_norm": 1.7249213457107544, |
| "learning_rate": 3.5737704918032786e-05, |
| "loss": 2.3186, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.45342533267619517, |
| "grad_norm": 1.868545651435852, |
| "learning_rate": 3.737704918032787e-05, |
| "loss": 2.1715, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.47313947757516017, |
| "grad_norm": 1.9135463237762451, |
| "learning_rate": 3.901639344262295e-05, |
| "loss": 2.2488, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4928536224741252, |
| "grad_norm": 1.895492434501648, |
| "learning_rate": 4.0655737704918036e-05, |
| "loss": 2.2387, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5125677673730902, |
| "grad_norm": 1.767385721206665, |
| "learning_rate": 4.229508196721312e-05, |
| "loss": 2.2541, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5322819122720552, |
| "grad_norm": 2.0554378032684326, |
| "learning_rate": 4.3934426229508194e-05, |
| "loss": 2.1672, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5519960571710202, |
| "grad_norm": 2.3369956016540527, |
| "learning_rate": 4.557377049180328e-05, |
| "loss": 2.223, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5717102020699852, |
| "grad_norm": 2.0532915592193604, |
| "learning_rate": 4.7213114754098365e-05, |
| "loss": 2.1007, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5914243469689502, |
| "grad_norm": 2.2023046016693115, |
| "learning_rate": 4.885245901639344e-05, |
| "loss": 2.1027, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6111384918679152, |
| "grad_norm": 2.194356918334961, |
| "learning_rate": 5.049180327868853e-05, |
| "loss": 2.1188, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6308526367668802, |
| "grad_norm": 2.6036267280578613, |
| "learning_rate": 5.213114754098361e-05, |
| "loss": 1.9685, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6505667816658453, |
| "grad_norm": 2.6643617153167725, |
| "learning_rate": 5.3770491803278686e-05, |
| "loss": 2.0843, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6702809265648102, |
| "grad_norm": 2.3738605976104736, |
| "learning_rate": 5.540983606557377e-05, |
| "loss": 2.0489, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6899950714637753, |
| "grad_norm": 2.43137526512146, |
| "learning_rate": 5.704918032786886e-05, |
| "loss": 1.976, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7097092163627403, |
| "grad_norm": 3.0450685024261475, |
| "learning_rate": 5.868852459016394e-05, |
| "loss": 1.8117, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7294233612617053, |
| "grad_norm": 2.9214789867401123, |
| "learning_rate": 6.032786885245902e-05, |
| "loss": 1.8229, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7491375061606703, |
| "grad_norm": 2.7570457458496094, |
| "learning_rate": 6.19672131147541e-05, |
| "loss": 1.8662, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7688516510596353, |
| "grad_norm": 4.113077640533447, |
| "learning_rate": 6.360655737704918e-05, |
| "loss": 1.7928, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7885657959586003, |
| "grad_norm": 3.127991199493408, |
| "learning_rate": 6.524590163934427e-05, |
| "loss": 1.8187, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8082799408575653, |
| "grad_norm": 3.4384043216705322, |
| "learning_rate": 6.688524590163935e-05, |
| "loss": 1.7548, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8279940857565303, |
| "grad_norm": 3.2650253772735596, |
| "learning_rate": 6.852459016393443e-05, |
| "loss": 1.8635, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8477082306554953, |
| "grad_norm": 3.676208019256592, |
| "learning_rate": 7.016393442622952e-05, |
| "loss": 1.6462, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8674223755544603, |
| "grad_norm": 3.6363656520843506, |
| "learning_rate": 7.180327868852459e-05, |
| "loss": 1.6826, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8871365204534253, |
| "grad_norm": 3.487661123275757, |
| "learning_rate": 7.344262295081968e-05, |
| "loss": 1.7096, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9068506653523903, |
| "grad_norm": 4.129843235015869, |
| "learning_rate": 7.508196721311476e-05, |
| "loss": 1.6489, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9265648102513554, |
| "grad_norm": 3.7981042861938477, |
| "learning_rate": 7.672131147540984e-05, |
| "loss": 1.5388, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9462789551503203, |
| "grad_norm": 4.129542827606201, |
| "learning_rate": 7.836065573770493e-05, |
| "loss": 1.5883, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9659931000492854, |
| "grad_norm": 3.7202744483947754, |
| "learning_rate": 8e-05, |
| "loss": 1.5327, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9857072449482503, |
| "grad_norm": 5.224013805389404, |
| "learning_rate": 8.163934426229509e-05, |
| "loss": 1.5075, |
| "step": 250 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 3048, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 12, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3986108565393408.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|