| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 25.0, |
| "eval_steps": 500, |
| "global_step": 725, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.1724137931034483, |
| "grad_norm": 24.532774084242813, |
| "learning_rate": 1.999849788616454e-05, |
| "loss": 1.4207, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.3448275862068966, |
| "grad_norm": 15.039785697497418, |
| "learning_rate": 1.9992396322115213e-05, |
| "loss": 0.4531, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.5172413793103449, |
| "grad_norm": 11.068602275701998, |
| "learning_rate": 1.9981604287632104e-05, |
| "loss": 0.2539, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.6896551724137931, |
| "grad_norm": 10.064753194401916, |
| "learning_rate": 1.996612684853896e-05, |
| "loss": 0.2192, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.8620689655172413, |
| "grad_norm": 11.621703211076142, |
| "learning_rate": 1.994597127000704e-05, |
| "loss": 0.2058, |
| "step": 25 |
| }, |
| { |
| "epoch": 1.0344827586206897, |
| "grad_norm": 11.053647982714269, |
| "learning_rate": 1.9921147013144782e-05, |
| "loss": 0.2045, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.206896551724138, |
| "grad_norm": 12.029451451469665, |
| "learning_rate": 1.9891665730556727e-05, |
| "loss": 0.2097, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.3793103448275863, |
| "grad_norm": 10.75340602132627, |
| "learning_rate": 1.9857541260873764e-05, |
| "loss": 0.1976, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.5517241379310345, |
| "grad_norm": 12.305173501446632, |
| "learning_rate": 1.9818789622257197e-05, |
| "loss": 0.1954, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.7241379310344827, |
| "grad_norm": 11.967313030536257, |
| "learning_rate": 1.977542900487977e-05, |
| "loss": 0.1867, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.896551724137931, |
| "grad_norm": 12.226764504643585, |
| "learning_rate": 1.9727479762387115e-05, |
| "loss": 0.1967, |
| "step": 55 |
| }, |
| { |
| "epoch": 2.0689655172413794, |
| "grad_norm": 13.346169442214025, |
| "learning_rate": 1.9674964402343684e-05, |
| "loss": 0.1961, |
| "step": 60 |
| }, |
| { |
| "epoch": 2.2413793103448274, |
| "grad_norm": 11.702611356777243, |
| "learning_rate": 1.9617907575667602e-05, |
| "loss": 0.2044, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.413793103448276, |
| "grad_norm": 11.798162323499401, |
| "learning_rate": 1.955633606505943e-05, |
| "loss": 0.2012, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.586206896551724, |
| "grad_norm": 11.673995798827626, |
| "learning_rate": 1.9490278772430255e-05, |
| "loss": 0.1936, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.7586206896551726, |
| "grad_norm": 10.686734043411159, |
| "learning_rate": 1.9419766705335025e-05, |
| "loss": 0.1927, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.9310344827586206, |
| "grad_norm": 11.160586116670075, |
| "learning_rate": 1.9344832962417475e-05, |
| "loss": 0.1889, |
| "step": 85 |
| }, |
| { |
| "epoch": 3.103448275862069, |
| "grad_norm": 10.769874091338295, |
| "learning_rate": 1.92655127178735e-05, |
| "loss": 0.1858, |
| "step": 90 |
| }, |
| { |
| "epoch": 3.2758620689655173, |
| "grad_norm": 11.311883329593053, |
| "learning_rate": 1.9181843204940232e-05, |
| "loss": 0.2001, |
| "step": 95 |
| }, |
| { |
| "epoch": 3.4482758620689653, |
| "grad_norm": 12.707306428263719, |
| "learning_rate": 1.9093863698418627e-05, |
| "loss": 0.1937, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.6206896551724137, |
| "grad_norm": 13.066551084589346, |
| "learning_rate": 1.9001615496237714e-05, |
| "loss": 0.2021, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.793103448275862, |
| "grad_norm": 11.526972333003544, |
| "learning_rate": 1.890514190006918e-05, |
| "loss": 0.1961, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.9655172413793105, |
| "grad_norm": 10.492067731277176, |
| "learning_rate": 1.8804488195001394e-05, |
| "loss": 0.2002, |
| "step": 115 |
| }, |
| { |
| "epoch": 4.137931034482759, |
| "grad_norm": 11.0280237578772, |
| "learning_rate": 1.869970162828241e-05, |
| "loss": 0.201, |
| "step": 120 |
| }, |
| { |
| "epoch": 4.310344827586207, |
| "grad_norm": 13.09901469391884, |
| "learning_rate": 1.859083138714191e-05, |
| "loss": 0.1916, |
| "step": 125 |
| }, |
| { |
| "epoch": 4.482758620689655, |
| "grad_norm": 9.77218748564033, |
| "learning_rate": 1.847792857570255e-05, |
| "loss": 0.1819, |
| "step": 130 |
| }, |
| { |
| "epoch": 4.655172413793103, |
| "grad_norm": 10.397235406767715, |
| "learning_rate": 1.8361046190991457e-05, |
| "loss": 0.1924, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.827586206896552, |
| "grad_norm": 9.046262037311052, |
| "learning_rate": 1.824023909806322e-05, |
| "loss": 0.1813, |
| "step": 140 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 9.373790580903188, |
| "learning_rate": 1.8115564004246025e-05, |
| "loss": 0.1814, |
| "step": 145 |
| }, |
| { |
| "epoch": 5.172413793103448, |
| "grad_norm": 10.125444036531317, |
| "learning_rate": 1.7987079432522997e-05, |
| "loss": 0.1841, |
| "step": 150 |
| }, |
| { |
| "epoch": 5.344827586206897, |
| "grad_norm": 12.214021854754943, |
| "learning_rate": 1.7854845694061294e-05, |
| "loss": 0.1827, |
| "step": 155 |
| }, |
| { |
| "epoch": 5.517241379310345, |
| "grad_norm": 11.231831307080368, |
| "learning_rate": 1.7718924859901793e-05, |
| "loss": 0.1905, |
| "step": 160 |
| }, |
| { |
| "epoch": 5.689655172413794, |
| "grad_norm": 10.970143913905503, |
| "learning_rate": 1.7579380731822712e-05, |
| "loss": 0.1877, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.862068965517241, |
| "grad_norm": 10.553953229075919, |
| "learning_rate": 1.7436278812390788e-05, |
| "loss": 0.1973, |
| "step": 170 |
| }, |
| { |
| "epoch": 6.0344827586206895, |
| "grad_norm": 9.844282254513766, |
| "learning_rate": 1.7289686274214116e-05, |
| "loss": 0.1912, |
| "step": 175 |
| }, |
| { |
| "epoch": 6.206896551724138, |
| "grad_norm": 10.48046238068855, |
| "learning_rate": 1.7139671928411074e-05, |
| "loss": 0.1895, |
| "step": 180 |
| }, |
| { |
| "epoch": 6.379310344827586, |
| "grad_norm": 10.884308915720293, |
| "learning_rate": 1.6986306192310086e-05, |
| "loss": 0.1931, |
| "step": 185 |
| }, |
| { |
| "epoch": 6.551724137931035, |
| "grad_norm": 10.530508782873323, |
| "learning_rate": 1.6829661056395473e-05, |
| "loss": 0.1845, |
| "step": 190 |
| }, |
| { |
| "epoch": 6.724137931034483, |
| "grad_norm": 8.923866337916758, |
| "learning_rate": 1.6669810050514827e-05, |
| "loss": 0.1805, |
| "step": 195 |
| }, |
| { |
| "epoch": 6.896551724137931, |
| "grad_norm": 10.099322440099796, |
| "learning_rate": 1.6506828209363796e-05, |
| "loss": 0.1772, |
| "step": 200 |
| }, |
| { |
| "epoch": 7.068965517241379, |
| "grad_norm": 10.458521429491142, |
| "learning_rate": 1.634079203726453e-05, |
| "loss": 0.1743, |
| "step": 205 |
| }, |
| { |
| "epoch": 7.241379310344827, |
| "grad_norm": 10.463312384855938, |
| "learning_rate": 1.6171779472254206e-05, |
| "loss": 0.1929, |
| "step": 210 |
| }, |
| { |
| "epoch": 7.413793103448276, |
| "grad_norm": 10.118912810787648, |
| "learning_rate": 1.599986984950065e-05, |
| "loss": 0.1805, |
| "step": 215 |
| }, |
| { |
| "epoch": 7.586206896551724, |
| "grad_norm": 9.327155921513027, |
| "learning_rate": 1.5825143864062063e-05, |
| "loss": 0.1727, |
| "step": 220 |
| }, |
| { |
| "epoch": 7.758620689655173, |
| "grad_norm": 8.24932430986195, |
| "learning_rate": 1.5647683533008455e-05, |
| "loss": 0.1864, |
| "step": 225 |
| }, |
| { |
| "epoch": 7.931034482758621, |
| "grad_norm": 10.172474933867472, |
| "learning_rate": 1.5467572156922504e-05, |
| "loss": 0.1764, |
| "step": 230 |
| }, |
| { |
| "epoch": 8.10344827586207, |
| "grad_norm": 9.969389715106933, |
| "learning_rate": 1.528489428079793e-05, |
| "loss": 0.1748, |
| "step": 235 |
| }, |
| { |
| "epoch": 8.275862068965518, |
| "grad_norm": 10.00608688194569, |
| "learning_rate": 1.509973565435375e-05, |
| "loss": 0.1841, |
| "step": 240 |
| }, |
| { |
| "epoch": 8.448275862068966, |
| "grad_norm": 9.994311879836584, |
| "learning_rate": 1.4912183191782995e-05, |
| "loss": 0.185, |
| "step": 245 |
| }, |
| { |
| "epoch": 8.620689655172415, |
| "grad_norm": 8.967701674753974, |
| "learning_rate": 1.4722324930954885e-05, |
| "loss": 0.1873, |
| "step": 250 |
| }, |
| { |
| "epoch": 8.793103448275861, |
| "grad_norm": 8.61339221417413, |
| "learning_rate": 1.453024999208946e-05, |
| "loss": 0.172, |
| "step": 255 |
| }, |
| { |
| "epoch": 8.96551724137931, |
| "grad_norm": 9.113590523548789, |
| "learning_rate": 1.4336048535924223e-05, |
| "loss": 0.177, |
| "step": 260 |
| }, |
| { |
| "epoch": 9.137931034482758, |
| "grad_norm": 6.366907049562298, |
| "learning_rate": 1.4139811721392325e-05, |
| "loss": 0.1766, |
| "step": 265 |
| }, |
| { |
| "epoch": 9.310344827586206, |
| "grad_norm": 8.257166469089526, |
| "learning_rate": 1.3941631662832201e-05, |
| "loss": 0.1738, |
| "step": 270 |
| }, |
| { |
| "epoch": 9.482758620689655, |
| "grad_norm": 9.95233344460982, |
| "learning_rate": 1.3741601386748728e-05, |
| "loss": 0.1681, |
| "step": 275 |
| }, |
| { |
| "epoch": 9.655172413793103, |
| "grad_norm": 9.655306228806518, |
| "learning_rate": 1.3539814788146235e-05, |
| "loss": 0.1866, |
| "step": 280 |
| }, |
| { |
| "epoch": 9.827586206896552, |
| "grad_norm": 8.289931367954654, |
| "learning_rate": 1.3336366586453783e-05, |
| "loss": 0.1913, |
| "step": 285 |
| }, |
| { |
| "epoch": 10.0, |
| "grad_norm": 8.252904158335944, |
| "learning_rate": 1.313135228106353e-05, |
| "loss": 0.1829, |
| "step": 290 |
| }, |
| { |
| "epoch": 10.172413793103448, |
| "grad_norm": 10.550290488102121, |
| "learning_rate": 1.292486810650289e-05, |
| "loss": 0.178, |
| "step": 295 |
| }, |
| { |
| "epoch": 10.344827586206897, |
| "grad_norm": 8.345722020395192, |
| "learning_rate": 1.2717010987261716e-05, |
| "loss": 0.1738, |
| "step": 300 |
| }, |
| { |
| "epoch": 10.517241379310345, |
| "grad_norm": 8.134962325536728, |
| "learning_rate": 1.250787849229552e-05, |
| "loss": 0.1754, |
| "step": 305 |
| }, |
| { |
| "epoch": 10.689655172413794, |
| "grad_norm": 8.815840186959745, |
| "learning_rate": 1.229756878922624e-05, |
| "loss": 0.1761, |
| "step": 310 |
| }, |
| { |
| "epoch": 10.862068965517242, |
| "grad_norm": 8.690893129618297, |
| "learning_rate": 1.2086180598261956e-05, |
| "loss": 0.1723, |
| "step": 315 |
| }, |
| { |
| "epoch": 11.03448275862069, |
| "grad_norm": 8.231717795365554, |
| "learning_rate": 1.187381314585725e-05, |
| "loss": 0.1683, |
| "step": 320 |
| }, |
| { |
| "epoch": 11.206896551724139, |
| "grad_norm": 8.935068264024606, |
| "learning_rate": 1.1660566118135894e-05, |
| "loss": 0.1777, |
| "step": 325 |
| }, |
| { |
| "epoch": 11.379310344827585, |
| "grad_norm": 8.235723206312889, |
| "learning_rate": 1.1446539614097814e-05, |
| "loss": 0.1744, |
| "step": 330 |
| }, |
| { |
| "epoch": 11.551724137931034, |
| "grad_norm": 9.472032012397955, |
| "learning_rate": 1.123183409863219e-05, |
| "loss": 0.1786, |
| "step": 335 |
| }, |
| { |
| "epoch": 11.724137931034482, |
| "grad_norm": 7.458462811267308, |
| "learning_rate": 1.1016550355358872e-05, |
| "loss": 0.1782, |
| "step": 340 |
| }, |
| { |
| "epoch": 11.89655172413793, |
| "grad_norm": 8.818348138750482, |
| "learning_rate": 1.0800789439320128e-05, |
| "loss": 0.1699, |
| "step": 345 |
| }, |
| { |
| "epoch": 12.068965517241379, |
| "grad_norm": 7.046157313918523, |
| "learning_rate": 1.0584652629545011e-05, |
| "loss": 0.167, |
| "step": 350 |
| }, |
| { |
| "epoch": 12.241379310344827, |
| "grad_norm": 7.5773668951999555, |
| "learning_rate": 1.036824138150859e-05, |
| "loss": 0.1724, |
| "step": 355 |
| }, |
| { |
| "epoch": 12.413793103448276, |
| "grad_norm": 8.439620308050772, |
| "learning_rate": 1.0151657279508335e-05, |
| "loss": 0.1679, |
| "step": 360 |
| }, |
| { |
| "epoch": 12.586206896551724, |
| "grad_norm": 7.182234665957953, |
| "learning_rate": 9.93500198898006e-06, |
| "loss": 0.1683, |
| "step": 365 |
| }, |
| { |
| "epoch": 12.758620689655173, |
| "grad_norm": 7.45841562952522, |
| "learning_rate": 9.718377208775744e-06, |
| "loss": 0.1679, |
| "step": 370 |
| }, |
| { |
| "epoch": 12.931034482758621, |
| "grad_norm": 9.05065663213446, |
| "learning_rate": 9.50188462342571e-06, |
| "loss": 0.1679, |
| "step": 375 |
| }, |
| { |
| "epoch": 13.10344827586207, |
| "grad_norm": 8.609074622939076, |
| "learning_rate": 9.285625855407485e-06, |
| "loss": 0.1742, |
| "step": 380 |
| }, |
| { |
| "epoch": 13.275862068965518, |
| "grad_norm": 8.2061328299682, |
| "learning_rate": 9.069702417443821e-06, |
| "loss": 0.1732, |
| "step": 385 |
| }, |
| { |
| "epoch": 13.448275862068966, |
| "grad_norm": 7.900839577431847, |
| "learning_rate": 8.854215664852207e-06, |
| "loss": 0.171, |
| "step": 390 |
| }, |
| { |
| "epoch": 13.620689655172415, |
| "grad_norm": 9.118846376646786, |
| "learning_rate": 8.63926674796829e-06, |
| "loss": 0.1655, |
| "step": 395 |
| }, |
| { |
| "epoch": 13.793103448275861, |
| "grad_norm": 7.4554871018559945, |
| "learning_rate": 8.424956564665508e-06, |
| "loss": 0.1676, |
| "step": 400 |
| }, |
| { |
| "epoch": 13.96551724137931, |
| "grad_norm": 7.834220281291942, |
| "learning_rate": 8.211385712993219e-06, |
| "loss": 0.1683, |
| "step": 405 |
| }, |
| { |
| "epoch": 14.137931034482758, |
| "grad_norm": 7.057089973676238, |
| "learning_rate": 7.998654443955586e-06, |
| "loss": 0.1721, |
| "step": 410 |
| }, |
| { |
| "epoch": 14.310344827586206, |
| "grad_norm": 7.010954237310117, |
| "learning_rate": 7.786862614453356e-06, |
| "loss": 0.164, |
| "step": 415 |
| }, |
| { |
| "epoch": 14.482758620689655, |
| "grad_norm": 8.54121625744651, |
| "learning_rate": 7.5761096404106335e-06, |
| "loss": 0.1632, |
| "step": 420 |
| }, |
| { |
| "epoch": 14.655172413793103, |
| "grad_norm": 7.32836259078595, |
| "learning_rate": 7.366494450108659e-06, |
| "loss": 0.1653, |
| "step": 425 |
| }, |
| { |
| "epoch": 14.827586206896552, |
| "grad_norm": 6.161874927822607, |
| "learning_rate": 7.158115437748467e-06, |
| "loss": 0.1718, |
| "step": 430 |
| }, |
| { |
| "epoch": 15.0, |
| "grad_norm": 5.960687988714388, |
| "learning_rate": 6.951070417264278e-06, |
| "loss": 0.1706, |
| "step": 435 |
| }, |
| { |
| "epoch": 15.172413793103448, |
| "grad_norm": 6.367501795811549, |
| "learning_rate": 6.745456576409227e-06, |
| "loss": 0.1611, |
| "step": 440 |
| }, |
| { |
| "epoch": 15.344827586206897, |
| "grad_norm": 5.06309133432644, |
| "learning_rate": 6.541370431135073e-06, |
| "loss": 0.1734, |
| "step": 445 |
| }, |
| { |
| "epoch": 15.517241379310345, |
| "grad_norm": 6.239307590864818, |
| "learning_rate": 6.338907780287198e-06, |
| "loss": 0.1753, |
| "step": 450 |
| }, |
| { |
| "epoch": 15.689655172413794, |
| "grad_norm": 7.09367607101717, |
| "learning_rate": 6.138163660636285e-06, |
| "loss": 0.1718, |
| "step": 455 |
| }, |
| { |
| "epoch": 15.862068965517242, |
| "grad_norm": 7.637051833850522, |
| "learning_rate": 5.939232302267646e-06, |
| "loss": 0.1613, |
| "step": 460 |
| }, |
| { |
| "epoch": 16.03448275862069, |
| "grad_norm": 6.919836610970123, |
| "learning_rate": 5.742207084349274e-06, |
| "loss": 0.1667, |
| "step": 465 |
| }, |
| { |
| "epoch": 16.20689655172414, |
| "grad_norm": 6.615501006073225, |
| "learning_rate": 5.547180491299278e-06, |
| "loss": 0.1653, |
| "step": 470 |
| }, |
| { |
| "epoch": 16.379310344827587, |
| "grad_norm": 6.3443598347944725, |
| "learning_rate": 5.35424406937333e-06, |
| "loss": 0.1691, |
| "step": 475 |
| }, |
| { |
| "epoch": 16.551724137931036, |
| "grad_norm": 6.545596807021073, |
| "learning_rate": 5.163488383692499e-06, |
| "loss": 0.167, |
| "step": 480 |
| }, |
| { |
| "epoch": 16.724137931034484, |
| "grad_norm": 5.794359907918518, |
| "learning_rate": 4.975002975731613e-06, |
| "loss": 0.1636, |
| "step": 485 |
| }, |
| { |
| "epoch": 16.896551724137932, |
| "grad_norm": 5.30655880304234, |
| "learning_rate": 4.78887632128814e-06, |
| "loss": 0.1732, |
| "step": 490 |
| }, |
| { |
| "epoch": 17.06896551724138, |
| "grad_norm": 5.564470023655747, |
| "learning_rate": 4.6051957889513e-06, |
| "loss": 0.1705, |
| "step": 495 |
| }, |
| { |
| "epoch": 17.24137931034483, |
| "grad_norm": 5.79246441022243, |
| "learning_rate": 4.42404759909091e-06, |
| "loss": 0.1739, |
| "step": 500 |
| }, |
| { |
| "epoch": 17.413793103448278, |
| "grad_norm": 4.968884353779036, |
| "learning_rate": 4.2455167833851804e-06, |
| "loss": 0.1684, |
| "step": 505 |
| }, |
| { |
| "epoch": 17.586206896551722, |
| "grad_norm": 5.820327270915885, |
| "learning_rate": 4.069687144906532e-06, |
| "loss": 0.1679, |
| "step": 510 |
| }, |
| { |
| "epoch": 17.75862068965517, |
| "grad_norm": 4.874594537233877, |
| "learning_rate": 3.896641218784081e-06, |
| "loss": 0.1671, |
| "step": 515 |
| }, |
| { |
| "epoch": 17.93103448275862, |
| "grad_norm": 4.322080959732308, |
| "learning_rate": 3.7264602334613385e-06, |
| "loss": 0.1615, |
| "step": 520 |
| }, |
| { |
| "epoch": 18.103448275862068, |
| "grad_norm": 3.394741423143991, |
| "learning_rate": 3.5592240725672476e-06, |
| "loss": 0.163, |
| "step": 525 |
| }, |
| { |
| "epoch": 18.275862068965516, |
| "grad_norm": 2.8899715660579917, |
| "learning_rate": 3.395011237418494e-06, |
| "loss": 0.1643, |
| "step": 530 |
| }, |
| { |
| "epoch": 18.448275862068964, |
| "grad_norm": 2.66333363991394, |
| "learning_rate": 3.2338988101706727e-06, |
| "loss": 0.1598, |
| "step": 535 |
| }, |
| { |
| "epoch": 18.620689655172413, |
| "grad_norm": 3.0652989937592787, |
| "learning_rate": 3.075962417635634e-06, |
| "loss": 0.1616, |
| "step": 540 |
| }, |
| { |
| "epoch": 18.79310344827586, |
| "grad_norm": 3.8408972623600373, |
| "learning_rate": 2.9212761957819347e-06, |
| "loss": 0.1673, |
| "step": 545 |
| }, |
| { |
| "epoch": 18.96551724137931, |
| "grad_norm": 3.031323684366623, |
| "learning_rate": 2.769912754935146e-06, |
| "loss": 0.1588, |
| "step": 550 |
| }, |
| { |
| "epoch": 19.137931034482758, |
| "grad_norm": 2.4766730582592027, |
| "learning_rate": 2.6219431456942536e-06, |
| "loss": 0.1626, |
| "step": 555 |
| }, |
| { |
| "epoch": 19.310344827586206, |
| "grad_norm": 3.347536737141708, |
| "learning_rate": 2.4774368255802483e-06, |
| "loss": 0.1607, |
| "step": 560 |
| }, |
| { |
| "epoch": 19.482758620689655, |
| "grad_norm": 2.75609785484982, |
| "learning_rate": 2.3364616264324725e-06, |
| "loss": 0.1664, |
| "step": 565 |
| }, |
| { |
| "epoch": 19.655172413793103, |
| "grad_norm": 3.7333685782452335, |
| "learning_rate": 2.199083722568095e-06, |
| "loss": 0.1634, |
| "step": 570 |
| }, |
| { |
| "epoch": 19.82758620689655, |
| "grad_norm": 3.008810658711964, |
| "learning_rate": 2.065367599719621e-06, |
| "loss": 0.1686, |
| "step": 575 |
| }, |
| { |
| "epoch": 20.0, |
| "grad_norm": 3.4023386470750996, |
| "learning_rate": 1.93537602476504e-06, |
| "loss": 0.1692, |
| "step": 580 |
| }, |
| { |
| "epoch": 20.17241379310345, |
| "grad_norm": 2.9763118671554314, |
| "learning_rate": 1.809170016264794e-06, |
| "loss": 0.1768, |
| "step": 585 |
| }, |
| { |
| "epoch": 20.344827586206897, |
| "grad_norm": 2.7215403943473957, |
| "learning_rate": 1.6868088158194351e-06, |
| "loss": 0.1677, |
| "step": 590 |
| }, |
| { |
| "epoch": 20.517241379310345, |
| "grad_norm": 2.48039880730862, |
| "learning_rate": 1.5683498602613689e-06, |
| "loss": 0.1638, |
| "step": 595 |
| }, |
| { |
| "epoch": 20.689655172413794, |
| "grad_norm": 1.8624071329024212, |
| "learning_rate": 1.4538487546937951e-06, |
| "loss": 0.1687, |
| "step": 600 |
| }, |
| { |
| "epoch": 20.862068965517242, |
| "grad_norm": 2.871924413048312, |
| "learning_rate": 1.3433592463894375e-06, |
| "loss": 0.1617, |
| "step": 605 |
| }, |
| { |
| "epoch": 21.03448275862069, |
| "grad_norm": 2.7255286515937667, |
| "learning_rate": 1.2369331995613664e-06, |
| "loss": 0.1622, |
| "step": 610 |
| }, |
| { |
| "epoch": 21.20689655172414, |
| "grad_norm": 2.642540001372744, |
| "learning_rate": 1.1346205710177304e-06, |
| "loss": 0.1608, |
| "step": 615 |
| }, |
| { |
| "epoch": 21.379310344827587, |
| "grad_norm": 2.975135375218524, |
| "learning_rate": 1.0364693867118425e-06, |
| "loss": 0.1644, |
| "step": 620 |
| }, |
| { |
| "epoch": 21.551724137931036, |
| "grad_norm": 2.0876137084099784, |
| "learning_rate": 9.425257191985859e-07, |
| "loss": 0.1639, |
| "step": 625 |
| }, |
| { |
| "epoch": 21.724137931034484, |
| "grad_norm": 1.387475835397412, |
| "learning_rate": 8.528336660077974e-07, |
| "loss": 0.1677, |
| "step": 630 |
| }, |
| { |
| "epoch": 21.896551724137932, |
| "grad_norm": 2.869659930472805, |
| "learning_rate": 7.674353289446946e-07, |
| "loss": 0.1621, |
| "step": 635 |
| }, |
| { |
| "epoch": 22.06896551724138, |
| "grad_norm": 3.0026307802656196, |
| "learning_rate": 6.863707943271325e-07, |
| "loss": 0.1655, |
| "step": 640 |
| }, |
| { |
| "epoch": 22.24137931034483, |
| "grad_norm": 1.8278436522298838, |
| "learning_rate": 6.096781141689223e-07, |
| "loss": 0.163, |
| "step": 645 |
| }, |
| { |
| "epoch": 22.413793103448278, |
| "grad_norm": 2.0229424538116145, |
| "learning_rate": 5.373932883180655e-07, |
| "loss": 0.1665, |
| "step": 650 |
| }, |
| { |
| "epoch": 22.586206896551722, |
| "grad_norm": 2.1750833091387043, |
| "learning_rate": 4.695502475582814e-07, |
| "loss": 0.1611, |
| "step": 655 |
| }, |
| { |
| "epoch": 22.75862068965517, |
| "grad_norm": 1.948795256385187, |
| "learning_rate": 4.0618083768176996e-07, |
| "loss": 0.1619, |
| "step": 660 |
| }, |
| { |
| "epoch": 22.93103448275862, |
| "grad_norm": 1.5695625649630254, |
| "learning_rate": 3.4731480454065823e-07, |
| "loss": 0.1621, |
| "step": 665 |
| }, |
| { |
| "epoch": 23.103448275862068, |
| "grad_norm": 1.5429369658213046, |
| "learning_rate": 2.9297978008419604e-07, |
| "loss": 0.1687, |
| "step": 670 |
| }, |
| { |
| "epoch": 23.275862068965516, |
| "grad_norm": 2.0587233544995325, |
| "learning_rate": 2.4320126938819023e-07, |
| "loss": 0.1573, |
| "step": 675 |
| }, |
| { |
| "epoch": 23.448275862068964, |
| "grad_norm": 1.6851058427704997, |
| "learning_rate": 1.9800263868283708e-07, |
| "loss": 0.1651, |
| "step": 680 |
| }, |
| { |
| "epoch": 23.620689655172413, |
| "grad_norm": 1.2703207998322799, |
| "learning_rate": 1.5740510438451374e-07, |
| "loss": 0.1646, |
| "step": 685 |
| }, |
| { |
| "epoch": 23.79310344827586, |
| "grad_norm": 1.6546893474906568, |
| "learning_rate": 1.214277231367078e-07, |
| "loss": 0.1672, |
| "step": 690 |
| }, |
| { |
| "epoch": 23.96551724137931, |
| "grad_norm": 1.4962982730142882, |
| "learning_rate": 9.008738286475748e-08, |
| "loss": 0.1621, |
| "step": 695 |
| }, |
| { |
| "epoch": 24.137931034482758, |
| "grad_norm": 1.5704585386814611, |
| "learning_rate": 6.339879484858924e-08, |
| "loss": 0.1675, |
| "step": 700 |
| }, |
| { |
| "epoch": 24.310344827586206, |
| "grad_norm": 1.367562782086171, |
| "learning_rate": 4.137448681718392e-08, |
| "loss": 0.1637, |
| "step": 705 |
| }, |
| { |
| "epoch": 24.482758620689655, |
| "grad_norm": 1.3662424883362485, |
| "learning_rate": 2.4024797068017414e-08, |
| "loss": 0.1625, |
| "step": 710 |
| }, |
| { |
| "epoch": 24.655172413793103, |
| "grad_norm": 1.7154032971232247, |
| "learning_rate": 1.135786961421248e-08, |
| "loss": 0.1692, |
| "step": 715 |
| }, |
| { |
| "epoch": 24.82758620689655, |
| "grad_norm": 0.9411353813301526, |
| "learning_rate": 3.3796503617167244e-09, |
| "loss": 0.1681, |
| "step": 720 |
| }, |
| { |
| "epoch": 25.0, |
| "grad_norm": 1.2181390829757093, |
| "learning_rate": 9.388431826629074e-11, |
| "loss": 0.1725, |
| "step": 725 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 725, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 25, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 50378664312832.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|