| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.1, |
| "eval_steps": 500, |
| "global_step": 1000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "grad_norm": 1.3495701551437378, |
| "learning_rate": 1.8e-06, |
| "loss": 1.1757, |
| "step": 10 |
| }, |
| { |
| "grad_norm": 0.7409749031066895, |
| "learning_rate": 3.8e-06, |
| "loss": 1.1588, |
| "step": 20 |
| }, |
| { |
| "grad_norm": 0.5010238885879517, |
| "learning_rate": 5.8e-06, |
| "loss": 1.118, |
| "step": 30 |
| }, |
| { |
| "grad_norm": 0.38745883107185364, |
| "learning_rate": 7.8e-06, |
| "loss": 1.071, |
| "step": 40 |
| }, |
| { |
| "grad_norm": 0.2737957239151001, |
| "learning_rate": 9.800000000000001e-06, |
| "loss": 1.0505, |
| "step": 50 |
| }, |
| { |
| "grad_norm": 0.17909298837184906, |
| "learning_rate": 1.18e-05, |
| "loss": 1.0511, |
| "step": 60 |
| }, |
| { |
| "grad_norm": 0.15505068004131317, |
| "learning_rate": 1.3800000000000002e-05, |
| "loss": 1.0386, |
| "step": 70 |
| }, |
| { |
| "grad_norm": 0.19511660933494568, |
| "learning_rate": 1.58e-05, |
| "loss": 1.0291, |
| "step": 80 |
| }, |
| { |
| "grad_norm": 0.17100006341934204, |
| "learning_rate": 1.78e-05, |
| "loss": 1.0237, |
| "step": 90 |
| }, |
| { |
| "grad_norm": 0.1620642989873886, |
| "learning_rate": 1.9800000000000004e-05, |
| "loss": 1.0225, |
| "step": 100 |
| }, |
| { |
| "grad_norm": 0.1624162495136261, |
| "learning_rate": 2.18e-05, |
| "loss": 1.0158, |
| "step": 110 |
| }, |
| { |
| "grad_norm": 0.6077130436897278, |
| "learning_rate": 2.38e-05, |
| "loss": 0.9931, |
| "step": 120 |
| }, |
| { |
| "grad_norm": 0.43021947145462036, |
| "learning_rate": 2.58e-05, |
| "loss": 0.9595, |
| "step": 130 |
| }, |
| { |
| "grad_norm": 0.5613481402397156, |
| "learning_rate": 2.7800000000000005e-05, |
| "loss": 0.9041, |
| "step": 140 |
| }, |
| { |
| "grad_norm": 0.6526139378547668, |
| "learning_rate": 2.98e-05, |
| "loss": 0.8396, |
| "step": 150 |
| }, |
| { |
| "grad_norm": 0.7447936534881592, |
| "learning_rate": 3.18e-05, |
| "loss": 0.7885, |
| "step": 160 |
| }, |
| { |
| "grad_norm": 0.7026267051696777, |
| "learning_rate": 3.38e-05, |
| "loss": 0.7439, |
| "step": 170 |
| }, |
| { |
| "grad_norm": 0.7374712824821472, |
| "learning_rate": 3.58e-05, |
| "loss": 0.6911, |
| "step": 180 |
| }, |
| { |
| "grad_norm": 0.7992081642150879, |
| "learning_rate": 3.7800000000000004e-05, |
| "loss": 0.6503, |
| "step": 190 |
| }, |
| { |
| "grad_norm": 1.095856785774231, |
| "learning_rate": 3.9800000000000005e-05, |
| "loss": 0.6026, |
| "step": 200 |
| }, |
| { |
| "grad_norm": 1.0275039672851562, |
| "learning_rate": 4.18e-05, |
| "loss": 0.5638, |
| "step": 210 |
| }, |
| { |
| "grad_norm": 0.8001906871795654, |
| "learning_rate": 4.38e-05, |
| "loss": 0.5221, |
| "step": 220 |
| }, |
| { |
| "grad_norm": 1.0533819198608398, |
| "learning_rate": 4.58e-05, |
| "loss": 0.4761, |
| "step": 230 |
| }, |
| { |
| "grad_norm": 0.9747286438941956, |
| "learning_rate": 4.78e-05, |
| "loss": 0.4439, |
| "step": 240 |
| }, |
| { |
| "grad_norm": 0.8441274762153625, |
| "learning_rate": 4.9800000000000004e-05, |
| "loss": 0.412, |
| "step": 250 |
| }, |
| { |
| "grad_norm": 1.0516191720962524, |
| "learning_rate": 5.1800000000000005e-05, |
| "loss": 0.3905, |
| "step": 260 |
| }, |
| { |
| "grad_norm": 1.1816160678863525, |
| "learning_rate": 5.380000000000001e-05, |
| "loss": 0.3687, |
| "step": 270 |
| }, |
| { |
| "grad_norm": 1.1618976593017578, |
| "learning_rate": 5.580000000000001e-05, |
| "loss": 0.3411, |
| "step": 280 |
| }, |
| { |
| "grad_norm": 1.151973843574524, |
| "learning_rate": 5.7799999999999995e-05, |
| "loss": 0.3124, |
| "step": 290 |
| }, |
| { |
| "grad_norm": 1.1495264768600464, |
| "learning_rate": 5.9800000000000003e-05, |
| "loss": 0.2993, |
| "step": 300 |
| }, |
| { |
| "grad_norm": 1.0407155752182007, |
| "learning_rate": 6.18e-05, |
| "loss": 0.2866, |
| "step": 310 |
| }, |
| { |
| "grad_norm": 0.9749088883399963, |
| "learning_rate": 6.38e-05, |
| "loss": 0.2567, |
| "step": 320 |
| }, |
| { |
| "grad_norm": 1.688358187675476, |
| "learning_rate": 6.58e-05, |
| "loss": 0.2514, |
| "step": 330 |
| }, |
| { |
| "grad_norm": 1.1689329147338867, |
| "learning_rate": 6.780000000000001e-05, |
| "loss": 0.2367, |
| "step": 340 |
| }, |
| { |
| "grad_norm": 1.1307363510131836, |
| "learning_rate": 6.98e-05, |
| "loss": 0.2104, |
| "step": 350 |
| }, |
| { |
| "grad_norm": 1.1584901809692383, |
| "learning_rate": 7.18e-05, |
| "loss": 0.2008, |
| "step": 360 |
| }, |
| { |
| "grad_norm": 1.0391298532485962, |
| "learning_rate": 7.38e-05, |
| "loss": 0.1792, |
| "step": 370 |
| }, |
| { |
| "grad_norm": 0.9876890182495117, |
| "learning_rate": 7.58e-05, |
| "loss": 0.1607, |
| "step": 380 |
| }, |
| { |
| "grad_norm": 0.9350799322128296, |
| "learning_rate": 7.780000000000001e-05, |
| "loss": 0.1535, |
| "step": 390 |
| }, |
| { |
| "grad_norm": 1.1527447700500488, |
| "learning_rate": 7.98e-05, |
| "loss": 0.1479, |
| "step": 400 |
| }, |
| { |
| "grad_norm": 1.156333327293396, |
| "learning_rate": 8.18e-05, |
| "loss": 0.1359, |
| "step": 410 |
| }, |
| { |
| "grad_norm": 0.9062078595161438, |
| "learning_rate": 8.38e-05, |
| "loss": 0.1159, |
| "step": 420 |
| }, |
| { |
| "grad_norm": 1.0753194093704224, |
| "learning_rate": 8.58e-05, |
| "loss": 0.1107, |
| "step": 430 |
| }, |
| { |
| "grad_norm": 1.0954129695892334, |
| "learning_rate": 8.78e-05, |
| "loss": 0.0974, |
| "step": 440 |
| }, |
| { |
| "grad_norm": 0.8616589903831482, |
| "learning_rate": 8.98e-05, |
| "loss": 0.0986, |
| "step": 450 |
| }, |
| { |
| "grad_norm": 1.0874602794647217, |
| "learning_rate": 9.180000000000001e-05, |
| "loss": 0.0956, |
| "step": 460 |
| }, |
| { |
| "grad_norm": 0.8882458209991455, |
| "learning_rate": 9.38e-05, |
| "loss": 0.0896, |
| "step": 470 |
| }, |
| { |
| "grad_norm": 1.0427426099777222, |
| "learning_rate": 9.58e-05, |
| "loss": 0.087, |
| "step": 480 |
| }, |
| { |
| "grad_norm": 1.2230194807052612, |
| "learning_rate": 9.78e-05, |
| "loss": 0.0768, |
| "step": 490 |
| }, |
| { |
| "grad_norm": 0.847732663154602, |
| "learning_rate": 9.98e-05, |
| "loss": 0.0704, |
| "step": 500 |
| }, |
| { |
| "grad_norm": 0.9105225801467896, |
| "learning_rate": 9.9999778549206e-05, |
| "loss": 0.0626, |
| "step": 510 |
| }, |
| { |
| "grad_norm": 1.1461974382400513, |
| "learning_rate": 9.999901304280685e-05, |
| "loss": 0.0607, |
| "step": 520 |
| }, |
| { |
| "grad_norm": 0.9256789684295654, |
| "learning_rate": 9.999770075521164e-05, |
| "loss": 0.0596, |
| "step": 530 |
| }, |
| { |
| "grad_norm": 0.929280161857605, |
| "learning_rate": 9.99958417007713e-05, |
| "loss": 0.0615, |
| "step": 540 |
| }, |
| { |
| "grad_norm": 0.8615525364875793, |
| "learning_rate": 9.999343589981615e-05, |
| "loss": 0.0506, |
| "step": 550 |
| }, |
| { |
| "grad_norm": 0.6592100262641907, |
| "learning_rate": 9.999048337865568e-05, |
| "loss": 0.057, |
| "step": 560 |
| }, |
| { |
| "grad_norm": 1.0078480243682861, |
| "learning_rate": 9.998698416957815e-05, |
| "loss": 0.0622, |
| "step": 570 |
| }, |
| { |
| "grad_norm": 0.9626633524894714, |
| "learning_rate": 9.998293831085037e-05, |
| "loss": 0.0583, |
| "step": 580 |
| }, |
| { |
| "grad_norm": 0.8204993009567261, |
| "learning_rate": 9.997834584671719e-05, |
| "loss": 0.0549, |
| "step": 590 |
| }, |
| { |
| "grad_norm": 1.0736879110336304, |
| "learning_rate": 9.997320682740107e-05, |
| "loss": 0.0596, |
| "step": 600 |
| }, |
| { |
| "grad_norm": 0.5788105726242065, |
| "learning_rate": 9.996752130910149e-05, |
| "loss": 0.0537, |
| "step": 610 |
| }, |
| { |
| "grad_norm": 0.9862315654754639, |
| "learning_rate": 9.99612893539944e-05, |
| "loss": 0.0523, |
| "step": 620 |
| }, |
| { |
| "grad_norm": 0.8640528917312622, |
| "learning_rate": 9.995451103023144e-05, |
| "loss": 0.052, |
| "step": 630 |
| }, |
| { |
| "grad_norm": 0.7985650897026062, |
| "learning_rate": 9.994718641193928e-05, |
| "loss": 0.0519, |
| "step": 640 |
| }, |
| { |
| "grad_norm": 0.6485480070114136, |
| "learning_rate": 9.993931557921874e-05, |
| "loss": 0.053, |
| "step": 650 |
| }, |
| { |
| "grad_norm": 0.944926381111145, |
| "learning_rate": 9.993089861814402e-05, |
| "loss": 0.0512, |
| "step": 660 |
| }, |
| { |
| "grad_norm": 0.9629592299461365, |
| "learning_rate": 9.992193562076166e-05, |
| "loss": 0.0499, |
| "step": 670 |
| }, |
| { |
| "grad_norm": 0.8933172821998596, |
| "learning_rate": 9.991242668508954e-05, |
| "loss": 0.0454, |
| "step": 680 |
| }, |
| { |
| "grad_norm": 0.8176608681678772, |
| "learning_rate": 9.990237191511587e-05, |
| "loss": 0.042, |
| "step": 690 |
| }, |
| { |
| "grad_norm": 0.8295159935951233, |
| "learning_rate": 9.989177142079802e-05, |
| "loss": 0.044, |
| "step": 700 |
| }, |
| { |
| "grad_norm": 1.0152912139892578, |
| "learning_rate": 9.988062531806126e-05, |
| "loss": 0.0453, |
| "step": 710 |
| }, |
| { |
| "grad_norm": 0.9435839056968689, |
| "learning_rate": 9.986893372879762e-05, |
| "loss": 0.0473, |
| "step": 720 |
| }, |
| { |
| "grad_norm": 0.846592903137207, |
| "learning_rate": 9.985669678086443e-05, |
| "loss": 0.0475, |
| "step": 730 |
| }, |
| { |
| "grad_norm": 0.7264214754104614, |
| "learning_rate": 9.984391460808298e-05, |
| "loss": 0.0498, |
| "step": 740 |
| }, |
| { |
| "grad_norm": 0.7152071595191956, |
| "learning_rate": 9.983058735023709e-05, |
| "loss": 0.0493, |
| "step": 750 |
| }, |
| { |
| "grad_norm": 0.7937959432601929, |
| "learning_rate": 9.98167151530715e-05, |
| "loss": 0.0445, |
| "step": 760 |
| }, |
| { |
| "grad_norm": 0.7876198887825012, |
| "learning_rate": 9.980229816829034e-05, |
| "loss": 0.0486, |
| "step": 770 |
| }, |
| { |
| "grad_norm": 0.8420520424842834, |
| "learning_rate": 9.978733655355544e-05, |
| "loss": 0.0465, |
| "step": 780 |
| }, |
| { |
| "grad_norm": 0.7807585597038269, |
| "learning_rate": 9.977183047248464e-05, |
| "loss": 0.0415, |
| "step": 790 |
| }, |
| { |
| "grad_norm": 0.8610442280769348, |
| "learning_rate": 9.975578009464992e-05, |
| "loss": 0.0437, |
| "step": 800 |
| }, |
| { |
| "grad_norm": 0.844108521938324, |
| "learning_rate": 9.97391855955757e-05, |
| "loss": 0.0424, |
| "step": 810 |
| }, |
| { |
| "grad_norm": 0.6607022881507874, |
| "learning_rate": 9.972204715673669e-05, |
| "loss": 0.0404, |
| "step": 820 |
| }, |
| { |
| "grad_norm": 0.7759642004966736, |
| "learning_rate": 9.970436496555617e-05, |
| "loss": 0.042, |
| "step": 830 |
| }, |
| { |
| "grad_norm": 0.7415374517440796, |
| "learning_rate": 9.968613921540373e-05, |
| "loss": 0.0452, |
| "step": 840 |
| }, |
| { |
| "grad_norm": 0.7532877922058105, |
| "learning_rate": 9.966737010559326e-05, |
| "loss": 0.0438, |
| "step": 850 |
| }, |
| { |
| "grad_norm": 0.5179683566093445, |
| "learning_rate": 9.964805784138072e-05, |
| "loss": 0.0432, |
| "step": 860 |
| }, |
| { |
| "grad_norm": 0.7171632051467896, |
| "learning_rate": 9.962820263396195e-05, |
| "loss": 0.0387, |
| "step": 870 |
| }, |
| { |
| "grad_norm": 0.7029691338539124, |
| "learning_rate": 9.960780470047033e-05, |
| "loss": 0.0414, |
| "step": 880 |
| }, |
| { |
| "grad_norm": 0.7791739702224731, |
| "learning_rate": 9.958686426397437e-05, |
| "loss": 0.0384, |
| "step": 890 |
| }, |
| { |
| "grad_norm": 0.8373865485191345, |
| "learning_rate": 9.956538155347534e-05, |
| "loss": 0.0433, |
| "step": 900 |
| }, |
| { |
| "grad_norm": 0.5835863351821899, |
| "learning_rate": 9.95433568039047e-05, |
| "loss": 0.0382, |
| "step": 910 |
| }, |
| { |
| "grad_norm": 0.5688396692276001, |
| "learning_rate": 9.952079025612162e-05, |
| "loss": 0.0367, |
| "step": 920 |
| }, |
| { |
| "grad_norm": 0.6643704175949097, |
| "learning_rate": 9.949768215691022e-05, |
| "loss": 0.0383, |
| "step": 930 |
| }, |
| { |
| "grad_norm": 0.6345815062522888, |
| "learning_rate": 9.9474032758977e-05, |
| "loss": 0.0387, |
| "step": 940 |
| }, |
| { |
| "grad_norm": 0.6888556480407715, |
| "learning_rate": 9.944984232094794e-05, |
| "loss": 0.0404, |
| "step": 950 |
| }, |
| { |
| "grad_norm": 0.6753697395324707, |
| "learning_rate": 9.942511110736584e-05, |
| "loss": 0.0405, |
| "step": 960 |
| }, |
| { |
| "grad_norm": 0.7859911322593689, |
| "learning_rate": 9.939983938868726e-05, |
| "loss": 0.0406, |
| "step": 970 |
| }, |
| { |
| "grad_norm": 0.6559446454048157, |
| "learning_rate": 9.93740274412797e-05, |
| "loss": 0.0376, |
| "step": 980 |
| }, |
| { |
| "grad_norm": 0.7269642353057861, |
| "learning_rate": 9.934767554741846e-05, |
| "loss": 0.0459, |
| "step": 990 |
| }, |
| { |
| "grad_norm": 0.632786214351654, |
| "learning_rate": 9.932078399528361e-05, |
| "loss": 0.0361, |
| "step": 1000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 10000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 64, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|