| { |
| "best_global_step": 7400, |
| "best_metric": 0.02321171946823597, |
| "best_model_checkpoint": "./t5-med-ner/checkpoint-7400", |
| "epoch": 5.0, |
| "eval_steps": 500, |
| "global_step": 7400, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.006756756756756757, |
| "grad_norm": 16.129451751708984, |
| "learning_rate": 4.993918918918919e-05, |
| "loss": 15.3156, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013513513513513514, |
| "grad_norm": 18.336261749267578, |
| "learning_rate": 4.987162162162162e-05, |
| "loss": 12.4855, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.02027027027027027, |
| "grad_norm": 12.824548721313477, |
| "learning_rate": 4.980405405405406e-05, |
| "loss": 9.9164, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02702702702702703, |
| "grad_norm": 12.065224647521973, |
| "learning_rate": 4.973648648648649e-05, |
| "loss": 8.696, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.033783783783783786, |
| "grad_norm": 11.090178489685059, |
| "learning_rate": 4.966891891891892e-05, |
| "loss": 7.5063, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.04054054054054054, |
| "grad_norm": 15.085448265075684, |
| "learning_rate": 4.9601351351351355e-05, |
| "loss": 6.2429, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0472972972972973, |
| "grad_norm": 13.112349510192871, |
| "learning_rate": 4.953378378378379e-05, |
| "loss": 5.2701, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.05405405405405406, |
| "grad_norm": 18.061439514160156, |
| "learning_rate": 4.946621621621622e-05, |
| "loss": 4.3682, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.060810810810810814, |
| "grad_norm": 10.440735816955566, |
| "learning_rate": 4.939864864864865e-05, |
| "loss": 3.6778, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.06756756756756757, |
| "grad_norm": 11.351167678833008, |
| "learning_rate": 4.9331081081081084e-05, |
| "loss": 3.2569, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.07432432432432433, |
| "grad_norm": 12.01860237121582, |
| "learning_rate": 4.926351351351352e-05, |
| "loss": 2.8868, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.08108108108108109, |
| "grad_norm": 12.096404075622559, |
| "learning_rate": 4.919594594594595e-05, |
| "loss": 2.4611, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.08783783783783784, |
| "grad_norm": 72.67072296142578, |
| "learning_rate": 4.912837837837838e-05, |
| "loss": 2.0179, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.0945945945945946, |
| "grad_norm": 10.841756820678711, |
| "learning_rate": 4.9060810810810813e-05, |
| "loss": 1.6158, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.10135135135135136, |
| "grad_norm": 8.544488906860352, |
| "learning_rate": 4.8993243243243246e-05, |
| "loss": 1.3312, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.10810810810810811, |
| "grad_norm": 13.064513206481934, |
| "learning_rate": 4.892567567567568e-05, |
| "loss": 1.0426, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.11486486486486487, |
| "grad_norm": 7.233090877532959, |
| "learning_rate": 4.885810810810811e-05, |
| "loss": 0.8803, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.12162162162162163, |
| "grad_norm": 2.897141456604004, |
| "learning_rate": 4.879054054054054e-05, |
| "loss": 0.7543, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.12837837837837837, |
| "grad_norm": 1.864625096321106, |
| "learning_rate": 4.8722972972972975e-05, |
| "loss": 0.6558, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.13513513513513514, |
| "grad_norm": 1.1638946533203125, |
| "learning_rate": 4.865540540540541e-05, |
| "loss": 0.5462, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.14189189189189189, |
| "grad_norm": 1.4550234079360962, |
| "learning_rate": 4.858783783783784e-05, |
| "loss": 0.4295, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.14864864864864866, |
| "grad_norm": 1.272861361503601, |
| "learning_rate": 4.852027027027027e-05, |
| "loss": 0.4206, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.1554054054054054, |
| "grad_norm": 1.448193907737732, |
| "learning_rate": 4.8452702702702704e-05, |
| "loss": 0.3532, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.16216216216216217, |
| "grad_norm": 0.8406581878662109, |
| "learning_rate": 4.838513513513514e-05, |
| "loss": 0.3442, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.16891891891891891, |
| "grad_norm": 1.0974594354629517, |
| "learning_rate": 4.831756756756757e-05, |
| "loss": 0.3011, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.17567567567567569, |
| "grad_norm": 10.254583358764648, |
| "learning_rate": 4.825e-05, |
| "loss": 0.2916, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.18243243243243243, |
| "grad_norm": 1.023024320602417, |
| "learning_rate": 4.818243243243243e-05, |
| "loss": 0.301, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1891891891891892, |
| "grad_norm": 0.8870915770530701, |
| "learning_rate": 4.8114864864864865e-05, |
| "loss": 0.2768, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.19594594594594594, |
| "grad_norm": 1.4651856422424316, |
| "learning_rate": 4.80472972972973e-05, |
| "loss": 0.2543, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.20270270270270271, |
| "grad_norm": 0.8050569891929626, |
| "learning_rate": 4.7979729729729736e-05, |
| "loss": 0.2513, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.20945945945945946, |
| "grad_norm": 0.7741556763648987, |
| "learning_rate": 4.791216216216217e-05, |
| "loss": 0.2343, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.21621621621621623, |
| "grad_norm": 0.7951841354370117, |
| "learning_rate": 4.78445945945946e-05, |
| "loss": 0.2058, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.22297297297297297, |
| "grad_norm": 4.641398906707764, |
| "learning_rate": 4.7777027027027026e-05, |
| "loss": 0.2249, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.22972972972972974, |
| "grad_norm": 0.795923113822937, |
| "learning_rate": 4.770945945945946e-05, |
| "loss": 0.1895, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.23648648648648649, |
| "grad_norm": 1.0727757215499878, |
| "learning_rate": 4.764189189189189e-05, |
| "loss": 0.1918, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.24324324324324326, |
| "grad_norm": 3.4822981357574463, |
| "learning_rate": 4.757432432432432e-05, |
| "loss": 0.2068, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.637052059173584, |
| "learning_rate": 4.750675675675676e-05, |
| "loss": 0.1923, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.25675675675675674, |
| "grad_norm": 0.893659770488739, |
| "learning_rate": 4.7439189189189194e-05, |
| "loss": 0.176, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.2635135135135135, |
| "grad_norm": 0.8992719650268555, |
| "learning_rate": 4.7371621621621627e-05, |
| "loss": 0.185, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.2702702702702703, |
| "grad_norm": 1.35341477394104, |
| "learning_rate": 4.730405405405406e-05, |
| "loss": 0.2073, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.27702702702702703, |
| "grad_norm": 0.8321266770362854, |
| "learning_rate": 4.7236486486486484e-05, |
| "loss": 0.1682, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.28378378378378377, |
| "grad_norm": 0.7957965731620789, |
| "learning_rate": 4.7168918918918917e-05, |
| "loss": 0.1377, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.2905405405405405, |
| "grad_norm": 1.0086628198623657, |
| "learning_rate": 4.7101351351351356e-05, |
| "loss": 0.1468, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.2972972972972973, |
| "grad_norm": 1.8587403297424316, |
| "learning_rate": 4.703378378378379e-05, |
| "loss": 0.1559, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.30405405405405406, |
| "grad_norm": 0.9978078603744507, |
| "learning_rate": 4.696621621621622e-05, |
| "loss": 0.1582, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3108108108108108, |
| "grad_norm": 0.7275466918945312, |
| "learning_rate": 4.689864864864865e-05, |
| "loss": 0.1515, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.31756756756756754, |
| "grad_norm": 0.6467192769050598, |
| "learning_rate": 4.6831081081081085e-05, |
| "loss": 0.1469, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.32432432432432434, |
| "grad_norm": 0.5906888246536255, |
| "learning_rate": 4.676351351351352e-05, |
| "loss": 0.1566, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.3310810810810811, |
| "grad_norm": 0.6035364866256714, |
| "learning_rate": 4.669594594594595e-05, |
| "loss": 0.1376, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.33783783783783783, |
| "grad_norm": 0.7139237523078918, |
| "learning_rate": 4.662837837837838e-05, |
| "loss": 0.1404, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.34459459459459457, |
| "grad_norm": 0.818231999874115, |
| "learning_rate": 4.6560810810810814e-05, |
| "loss": 0.1406, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.35135135135135137, |
| "grad_norm": 0.4550321102142334, |
| "learning_rate": 4.6493243243243246e-05, |
| "loss": 0.1283, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.3581081081081081, |
| "grad_norm": 0.7775529623031616, |
| "learning_rate": 4.642567567567568e-05, |
| "loss": 0.1203, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.36486486486486486, |
| "grad_norm": 0.6731743812561035, |
| "learning_rate": 4.635810810810811e-05, |
| "loss": 0.1187, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.3716216216216216, |
| "grad_norm": 0.5695653557777405, |
| "learning_rate": 4.629054054054054e-05, |
| "loss": 0.1206, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.3783783783783784, |
| "grad_norm": 1.0783650875091553, |
| "learning_rate": 4.6222972972972975e-05, |
| "loss": 0.1233, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.38513513513513514, |
| "grad_norm": 0.4626968801021576, |
| "learning_rate": 4.615540540540541e-05, |
| "loss": 0.1104, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.3918918918918919, |
| "grad_norm": 0.6568089723587036, |
| "learning_rate": 4.608783783783784e-05, |
| "loss": 0.1509, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.39864864864864863, |
| "grad_norm": 0.5651082396507263, |
| "learning_rate": 4.602027027027027e-05, |
| "loss": 0.1167, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.40540540540540543, |
| "grad_norm": 0.8560924530029297, |
| "learning_rate": 4.5952702702702704e-05, |
| "loss": 0.1245, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.41216216216216217, |
| "grad_norm": 0.8468086123466492, |
| "learning_rate": 4.5885135135135136e-05, |
| "loss": 0.1198, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.4189189189189189, |
| "grad_norm": 8.868446350097656, |
| "learning_rate": 4.581756756756757e-05, |
| "loss": 0.1022, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.42567567567567566, |
| "grad_norm": 0.7043650150299072, |
| "learning_rate": 4.575e-05, |
| "loss": 0.1227, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.43243243243243246, |
| "grad_norm": 0.6890026926994324, |
| "learning_rate": 4.568243243243244e-05, |
| "loss": 0.1158, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.4391891891891892, |
| "grad_norm": 0.48028209805488586, |
| "learning_rate": 4.5614864864864865e-05, |
| "loss": 0.0918, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.44594594594594594, |
| "grad_norm": 0.5559847354888916, |
| "learning_rate": 4.55472972972973e-05, |
| "loss": 0.0968, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.4527027027027027, |
| "grad_norm": 2.659057855606079, |
| "learning_rate": 4.547972972972973e-05, |
| "loss": 0.1308, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.4594594594594595, |
| "grad_norm": 0.3449746370315552, |
| "learning_rate": 4.541216216216216e-05, |
| "loss": 0.0965, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.46621621621621623, |
| "grad_norm": 0.4225215017795563, |
| "learning_rate": 4.5344594594594594e-05, |
| "loss": 0.0989, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.47297297297297297, |
| "grad_norm": 0.5477343201637268, |
| "learning_rate": 4.527702702702703e-05, |
| "loss": 0.1088, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.4797297297297297, |
| "grad_norm": 0.552731990814209, |
| "learning_rate": 4.5209459459459466e-05, |
| "loss": 0.0935, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.4864864864864865, |
| "grad_norm": 0.4366927444934845, |
| "learning_rate": 4.51418918918919e-05, |
| "loss": 0.101, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.49324324324324326, |
| "grad_norm": 0.49442631006240845, |
| "learning_rate": 4.507432432432432e-05, |
| "loss": 0.1172, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.43195462226867676, |
| "learning_rate": 4.5006756756756756e-05, |
| "loss": 0.1102, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.5067567567567568, |
| "grad_norm": 0.9043081998825073, |
| "learning_rate": 4.493918918918919e-05, |
| "loss": 0.1347, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.5135135135135135, |
| "grad_norm": 0.5600760579109192, |
| "learning_rate": 4.487162162162162e-05, |
| "loss": 0.0974, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.5202702702702703, |
| "grad_norm": 0.7427008152008057, |
| "learning_rate": 4.480405405405406e-05, |
| "loss": 0.0772, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.527027027027027, |
| "grad_norm": 1.1261482238769531, |
| "learning_rate": 4.473648648648649e-05, |
| "loss": 0.104, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.5337837837837838, |
| "grad_norm": 0.8397054672241211, |
| "learning_rate": 4.4668918918918924e-05, |
| "loss": 0.1021, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.5405405405405406, |
| "grad_norm": 0.4203931391239166, |
| "learning_rate": 4.4601351351351356e-05, |
| "loss": 0.1103, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.5472972972972973, |
| "grad_norm": 0.6614381670951843, |
| "learning_rate": 4.453378378378378e-05, |
| "loss": 0.1014, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.5540540540540541, |
| "grad_norm": 0.3424564301967621, |
| "learning_rate": 4.4466216216216214e-05, |
| "loss": 0.0834, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.5608108108108109, |
| "grad_norm": 0.6263936758041382, |
| "learning_rate": 4.439864864864865e-05, |
| "loss": 0.0942, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.5675675675675675, |
| "grad_norm": 0.5943464636802673, |
| "learning_rate": 4.4331081081081085e-05, |
| "loss": 0.085, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.5743243243243243, |
| "grad_norm": 0.4014379680156708, |
| "learning_rate": 4.426351351351352e-05, |
| "loss": 0.0879, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.581081081081081, |
| "grad_norm": 0.5546994209289551, |
| "learning_rate": 4.419594594594595e-05, |
| "loss": 0.0751, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.5878378378378378, |
| "grad_norm": 0.5836130380630493, |
| "learning_rate": 4.412837837837838e-05, |
| "loss": 0.1219, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.5945945945945946, |
| "grad_norm": 0.6041303873062134, |
| "learning_rate": 4.4060810810810814e-05, |
| "loss": 0.0982, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.6013513513513513, |
| "grad_norm": 1.5622942447662354, |
| "learning_rate": 4.3993243243243246e-05, |
| "loss": 0.0923, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.6081081081081081, |
| "grad_norm": 0.4068175256252289, |
| "learning_rate": 4.392567567567568e-05, |
| "loss": 0.075, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.6148648648648649, |
| "grad_norm": 0.4515470862388611, |
| "learning_rate": 4.385810810810811e-05, |
| "loss": 0.0845, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.6216216216216216, |
| "grad_norm": 0.500089168548584, |
| "learning_rate": 4.379054054054054e-05, |
| "loss": 0.0939, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.6283783783783784, |
| "grad_norm": 0.8668215274810791, |
| "learning_rate": 4.3722972972972975e-05, |
| "loss": 0.094, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.6351351351351351, |
| "grad_norm": 0.5503737926483154, |
| "learning_rate": 4.365540540540541e-05, |
| "loss": 0.0936, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.6418918918918919, |
| "grad_norm": 0.6960216760635376, |
| "learning_rate": 4.358783783783784e-05, |
| "loss": 0.0847, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.6486486486486487, |
| "grad_norm": 0.5142303705215454, |
| "learning_rate": 4.352027027027027e-05, |
| "loss": 0.0906, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.6554054054054054, |
| "grad_norm": 0.775428056716919, |
| "learning_rate": 4.3452702702702704e-05, |
| "loss": 0.0881, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.6621621621621622, |
| "grad_norm": 0.43797916173934937, |
| "learning_rate": 4.3385135135135136e-05, |
| "loss": 0.0898, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.668918918918919, |
| "grad_norm": 0.743198812007904, |
| "learning_rate": 4.331756756756757e-05, |
| "loss": 0.0751, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.6756756756756757, |
| "grad_norm": 1.0289562940597534, |
| "learning_rate": 4.325e-05, |
| "loss": 0.1113, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.6824324324324325, |
| "grad_norm": 0.5582059621810913, |
| "learning_rate": 4.318243243243243e-05, |
| "loss": 0.0817, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.6891891891891891, |
| "grad_norm": 0.42011767625808716, |
| "learning_rate": 4.3114864864864865e-05, |
| "loss": 0.0843, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.6959459459459459, |
| "grad_norm": 0.6274346709251404, |
| "learning_rate": 4.30472972972973e-05, |
| "loss": 0.0869, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.7027027027027027, |
| "grad_norm": 0.3823902904987335, |
| "learning_rate": 4.297972972972974e-05, |
| "loss": 0.0739, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.7094594594594594, |
| "grad_norm": 0.3813062608242035, |
| "learning_rate": 4.291216216216216e-05, |
| "loss": 0.0972, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.7162162162162162, |
| "grad_norm": 0.5466370582580566, |
| "learning_rate": 4.2844594594594594e-05, |
| "loss": 0.0955, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.722972972972973, |
| "grad_norm": 0.3452337980270386, |
| "learning_rate": 4.277702702702703e-05, |
| "loss": 0.0755, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.7297297297297297, |
| "grad_norm": 0.7460477948188782, |
| "learning_rate": 4.270945945945946e-05, |
| "loss": 0.0776, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.7364864864864865, |
| "grad_norm": 0.6906008124351501, |
| "learning_rate": 4.264189189189189e-05, |
| "loss": 0.0749, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.7432432432432432, |
| "grad_norm": 0.5057837963104248, |
| "learning_rate": 4.257432432432433e-05, |
| "loss": 0.0701, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.6320849061012268, |
| "learning_rate": 4.250675675675676e-05, |
| "loss": 0.0785, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.7567567567567568, |
| "grad_norm": 1.015070915222168, |
| "learning_rate": 4.2439189189189195e-05, |
| "loss": 0.0727, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.7635135135135135, |
| "grad_norm": 0.5887625217437744, |
| "learning_rate": 4.237162162162162e-05, |
| "loss": 0.0666, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.7702702702702703, |
| "grad_norm": 0.5994888544082642, |
| "learning_rate": 4.230405405405405e-05, |
| "loss": 0.071, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.777027027027027, |
| "grad_norm": 0.6155841946601868, |
| "learning_rate": 4.2236486486486485e-05, |
| "loss": 0.0735, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.7837837837837838, |
| "grad_norm": 0.8194956183433533, |
| "learning_rate": 4.2168918918918924e-05, |
| "loss": 0.0699, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.7905405405405406, |
| "grad_norm": 0.3856644034385681, |
| "learning_rate": 4.2101351351351356e-05, |
| "loss": 0.089, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.7972972972972973, |
| "grad_norm": 0.7099316120147705, |
| "learning_rate": 4.203378378378379e-05, |
| "loss": 0.0572, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.8040540540540541, |
| "grad_norm": 0.4837072789669037, |
| "learning_rate": 4.196621621621622e-05, |
| "loss": 0.0598, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.8108108108108109, |
| "grad_norm": 1.0372607707977295, |
| "learning_rate": 4.189864864864865e-05, |
| "loss": 0.0896, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.8175675675675675, |
| "grad_norm": 0.36857545375823975, |
| "learning_rate": 4.183108108108108e-05, |
| "loss": 0.0766, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.8243243243243243, |
| "grad_norm": 0.6140791177749634, |
| "learning_rate": 4.176351351351351e-05, |
| "loss": 0.0651, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.831081081081081, |
| "grad_norm": 0.4973204433917999, |
| "learning_rate": 4.169594594594595e-05, |
| "loss": 0.0771, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.8378378378378378, |
| "grad_norm": 0.8992588520050049, |
| "learning_rate": 4.162837837837838e-05, |
| "loss": 0.0803, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.8445945945945946, |
| "grad_norm": 0.4852398633956909, |
| "learning_rate": 4.1560810810810814e-05, |
| "loss": 0.0864, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.8513513513513513, |
| "grad_norm": 0.7607585787773132, |
| "learning_rate": 4.1493243243243246e-05, |
| "loss": 0.0726, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.8581081081081081, |
| "grad_norm": 0.7050380706787109, |
| "learning_rate": 4.142567567567568e-05, |
| "loss": 0.0736, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.8648648648648649, |
| "grad_norm": 0.6202526688575745, |
| "learning_rate": 4.135810810810811e-05, |
| "loss": 0.0632, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.8716216216216216, |
| "grad_norm": 1.4994258880615234, |
| "learning_rate": 4.129054054054054e-05, |
| "loss": 0.0745, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.8783783783783784, |
| "grad_norm": 0.4370647668838501, |
| "learning_rate": 4.1222972972972975e-05, |
| "loss": 0.0782, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.8851351351351351, |
| "grad_norm": 0.28213265538215637, |
| "learning_rate": 4.115540540540541e-05, |
| "loss": 0.0735, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.8918918918918919, |
| "grad_norm": 0.31794193387031555, |
| "learning_rate": 4.108783783783784e-05, |
| "loss": 0.0543, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.8986486486486487, |
| "grad_norm": 0.5807836055755615, |
| "learning_rate": 4.102027027027027e-05, |
| "loss": 0.0688, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.9054054054054054, |
| "grad_norm": 0.5014552474021912, |
| "learning_rate": 4.0952702702702704e-05, |
| "loss": 0.0659, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.9121621621621622, |
| "grad_norm": 0.3839617371559143, |
| "learning_rate": 4.088513513513514e-05, |
| "loss": 0.0655, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.918918918918919, |
| "grad_norm": 0.6149204969406128, |
| "learning_rate": 4.081756756756757e-05, |
| "loss": 0.0693, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.9256756756756757, |
| "grad_norm": 0.49168112874031067, |
| "learning_rate": 4.075e-05, |
| "loss": 0.06, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.9324324324324325, |
| "grad_norm": 0.6225050687789917, |
| "learning_rate": 4.0682432432432433e-05, |
| "loss": 0.0762, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.9391891891891891, |
| "grad_norm": 0.5382749438285828, |
| "learning_rate": 4.0614864864864866e-05, |
| "loss": 0.0695, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.9459459459459459, |
| "grad_norm": 0.8806264400482178, |
| "learning_rate": 4.05472972972973e-05, |
| "loss": 0.0847, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.9527027027027027, |
| "grad_norm": 0.6600827574729919, |
| "learning_rate": 4.047972972972973e-05, |
| "loss": 0.072, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.9594594594594594, |
| "grad_norm": 0.2923494279384613, |
| "learning_rate": 4.041216216216216e-05, |
| "loss": 0.0647, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.9662162162162162, |
| "grad_norm": 0.40617331862449646, |
| "learning_rate": 4.0344594594594595e-05, |
| "loss": 0.058, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.972972972972973, |
| "grad_norm": 0.5751014947891235, |
| "learning_rate": 4.0277027027027034e-05, |
| "loss": 0.0685, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.9797297297297297, |
| "grad_norm": 0.4234154224395752, |
| "learning_rate": 4.020945945945946e-05, |
| "loss": 0.0793, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.9864864864864865, |
| "grad_norm": 0.6290509700775146, |
| "learning_rate": 4.014189189189189e-05, |
| "loss": 0.0705, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.9932432432432432, |
| "grad_norm": 0.5429525375366211, |
| "learning_rate": 4.0074324324324324e-05, |
| "loss": 0.0694, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.3089545965194702, |
| "learning_rate": 4.0006756756756756e-05, |
| "loss": 0.0579, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 0.03719908744096756, |
| "eval_runtime": 267.8762, |
| "eval_samples_per_second": 3.901, |
| "eval_steps_per_second": 0.978, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.0067567567567568, |
| "grad_norm": 0.429411381483078, |
| "learning_rate": 3.993918918918919e-05, |
| "loss": 0.0615, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.0135135135135136, |
| "grad_norm": 0.4686361253261566, |
| "learning_rate": 3.987162162162163e-05, |
| "loss": 0.0774, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.0202702702702702, |
| "grad_norm": 0.38840433955192566, |
| "learning_rate": 3.980405405405406e-05, |
| "loss": 0.0499, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.027027027027027, |
| "grad_norm": 0.6909990906715393, |
| "learning_rate": 3.973648648648649e-05, |
| "loss": 0.0698, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.0337837837837838, |
| "grad_norm": 1.1144709587097168, |
| "learning_rate": 3.966891891891892e-05, |
| "loss": 0.0735, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.0405405405405406, |
| "grad_norm": 0.37668439745903015, |
| "learning_rate": 3.960135135135135e-05, |
| "loss": 0.0529, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.0472972972972974, |
| "grad_norm": 0.5073342323303223, |
| "learning_rate": 3.953378378378378e-05, |
| "loss": 0.0667, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.054054054054054, |
| "grad_norm": 0.4015592038631439, |
| "learning_rate": 3.946621621621622e-05, |
| "loss": 0.0625, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.0608108108108107, |
| "grad_norm": 0.7579006552696228, |
| "learning_rate": 3.939864864864865e-05, |
| "loss": 0.06, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.0675675675675675, |
| "grad_norm": 0.7990680932998657, |
| "learning_rate": 3.9331081081081085e-05, |
| "loss": 0.0593, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.0743243243243243, |
| "grad_norm": 0.6557222604751587, |
| "learning_rate": 3.926351351351352e-05, |
| "loss": 0.0726, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.0810810810810811, |
| "grad_norm": 0.5203543305397034, |
| "learning_rate": 3.919594594594595e-05, |
| "loss": 0.0918, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.087837837837838, |
| "grad_norm": 0.5405670404434204, |
| "learning_rate": 3.9128378378378375e-05, |
| "loss": 0.0712, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.0945945945945945, |
| "grad_norm": 0.36374953389167786, |
| "learning_rate": 3.906081081081081e-05, |
| "loss": 0.0608, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.1013513513513513, |
| "grad_norm": 0.3525446653366089, |
| "learning_rate": 3.8993243243243247e-05, |
| "loss": 0.051, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.1081081081081081, |
| "grad_norm": 0.38730859756469727, |
| "learning_rate": 3.892567567567568e-05, |
| "loss": 0.0652, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.114864864864865, |
| "grad_norm": 0.3817211389541626, |
| "learning_rate": 3.885810810810811e-05, |
| "loss": 0.0702, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.1216216216216217, |
| "grad_norm": 0.29431015253067017, |
| "learning_rate": 3.879054054054054e-05, |
| "loss": 0.0607, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.1283783783783783, |
| "grad_norm": 0.5619912147521973, |
| "learning_rate": 3.8722972972972976e-05, |
| "loss": 0.0564, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.135135135135135, |
| "grad_norm": 0.5539348125457764, |
| "learning_rate": 3.865540540540541e-05, |
| "loss": 0.0683, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.1418918918918919, |
| "grad_norm": 0.7165750861167908, |
| "learning_rate": 3.858783783783784e-05, |
| "loss": 0.0701, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.1486486486486487, |
| "grad_norm": 0.3067423403263092, |
| "learning_rate": 3.852027027027027e-05, |
| "loss": 0.0594, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.1554054054054055, |
| "grad_norm": 0.33841240406036377, |
| "learning_rate": 3.8452702702702705e-05, |
| "loss": 0.0446, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.1621621621621623, |
| "grad_norm": 0.3610815405845642, |
| "learning_rate": 3.838513513513514e-05, |
| "loss": 0.0607, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.1689189189189189, |
| "grad_norm": 0.5924748778343201, |
| "learning_rate": 3.831756756756757e-05, |
| "loss": 0.0641, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.1756756756756757, |
| "grad_norm": 0.39871370792388916, |
| "learning_rate": 3.825e-05, |
| "loss": 0.0558, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.1824324324324325, |
| "grad_norm": 0.5780407190322876, |
| "learning_rate": 3.8182432432432434e-05, |
| "loss": 0.0603, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.1891891891891893, |
| "grad_norm": 0.6301029920578003, |
| "learning_rate": 3.8114864864864866e-05, |
| "loss": 0.0553, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.195945945945946, |
| "grad_norm": 0.4417901039123535, |
| "learning_rate": 3.8047297297297305e-05, |
| "loss": 0.0706, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.2027027027027026, |
| "grad_norm": 0.5784780979156494, |
| "learning_rate": 3.797972972972973e-05, |
| "loss": 0.0552, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.2094594594594594, |
| "grad_norm": 0.3300984799861908, |
| "learning_rate": 3.791216216216216e-05, |
| "loss": 0.0539, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.2162162162162162, |
| "grad_norm": 0.28047096729278564, |
| "learning_rate": 3.7844594594594595e-05, |
| "loss": 0.0599, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.222972972972973, |
| "grad_norm": 0.3282848000526428, |
| "learning_rate": 3.777702702702703e-05, |
| "loss": 0.0757, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.2297297297297298, |
| "grad_norm": 0.5447771549224854, |
| "learning_rate": 3.770945945945946e-05, |
| "loss": 0.054, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.2364864864864864, |
| "grad_norm": 0.38858866691589355, |
| "learning_rate": 3.764189189189189e-05, |
| "loss": 0.0568, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.2432432432432432, |
| "grad_norm": 0.553166925907135, |
| "learning_rate": 3.757432432432433e-05, |
| "loss": 0.0417, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.3401406705379486, |
| "learning_rate": 3.750675675675676e-05, |
| "loss": 0.081, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.2567567567567568, |
| "grad_norm": 0.43225792050361633, |
| "learning_rate": 3.743918918918919e-05, |
| "loss": 0.0487, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.2635135135135136, |
| "grad_norm": 0.33819809556007385, |
| "learning_rate": 3.737162162162162e-05, |
| "loss": 0.0567, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.2702702702702702, |
| "grad_norm": 0.3383878469467163, |
| "learning_rate": 3.730405405405405e-05, |
| "loss": 0.0474, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.277027027027027, |
| "grad_norm": 0.5455739498138428, |
| "learning_rate": 3.7236486486486485e-05, |
| "loss": 0.0508, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.2837837837837838, |
| "grad_norm": 0.6346222758293152, |
| "learning_rate": 3.7168918918918924e-05, |
| "loss": 0.0531, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.2905405405405406, |
| "grad_norm": 0.3384900689125061, |
| "learning_rate": 3.7101351351351357e-05, |
| "loss": 0.0604, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.2972972972972974, |
| "grad_norm": 0.44982558488845825, |
| "learning_rate": 3.703378378378379e-05, |
| "loss": 0.0663, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.304054054054054, |
| "grad_norm": 0.49966755509376526, |
| "learning_rate": 3.696621621621622e-05, |
| "loss": 0.0506, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.3108108108108107, |
| "grad_norm": 0.38622167706489563, |
| "learning_rate": 3.6898648648648646e-05, |
| "loss": 0.053, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.3175675675675675, |
| "grad_norm": 0.37733376026153564, |
| "learning_rate": 3.683108108108108e-05, |
| "loss": 0.0548, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.3243243243243243, |
| "grad_norm": 0.525426983833313, |
| "learning_rate": 3.676351351351352e-05, |
| "loss": 0.0621, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.3310810810810811, |
| "grad_norm": 0.6133748292922974, |
| "learning_rate": 3.669594594594595e-05, |
| "loss": 0.0556, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.3378378378378377, |
| "grad_norm": 0.5005325078964233, |
| "learning_rate": 3.662837837837838e-05, |
| "loss": 0.0633, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.3445945945945945, |
| "grad_norm": 0.45884352922439575, |
| "learning_rate": 3.6560810810810815e-05, |
| "loss": 0.0584, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.3513513513513513, |
| "grad_norm": 0.3934214115142822, |
| "learning_rate": 3.649324324324325e-05, |
| "loss": 0.0522, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.3581081081081081, |
| "grad_norm": 0.3990820050239563, |
| "learning_rate": 3.642567567567568e-05, |
| "loss": 0.0596, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.364864864864865, |
| "grad_norm": 0.43622463941574097, |
| "learning_rate": 3.6358108108108105e-05, |
| "loss": 0.069, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.3716216216216215, |
| "grad_norm": 0.4469757378101349, |
| "learning_rate": 3.6290540540540544e-05, |
| "loss": 0.0591, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.3783783783783785, |
| "grad_norm": 0.3687591552734375, |
| "learning_rate": 3.6222972972972976e-05, |
| "loss": 0.0517, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.385135135135135, |
| "grad_norm": 0.24021026492118835, |
| "learning_rate": 3.615540540540541e-05, |
| "loss": 0.0429, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.3918918918918919, |
| "grad_norm": 0.38636326789855957, |
| "learning_rate": 3.608783783783784e-05, |
| "loss": 0.0481, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.3986486486486487, |
| "grad_norm": 0.7737833857536316, |
| "learning_rate": 3.602027027027027e-05, |
| "loss": 0.0482, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.4054054054054055, |
| "grad_norm": 0.5627581477165222, |
| "learning_rate": 3.5952702702702705e-05, |
| "loss": 0.0627, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.4121621621621623, |
| "grad_norm": 0.33393439650535583, |
| "learning_rate": 3.588513513513514e-05, |
| "loss": 0.0556, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.4189189189189189, |
| "grad_norm": 0.47681981325149536, |
| "learning_rate": 3.581756756756757e-05, |
| "loss": 0.0621, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.4256756756756757, |
| "grad_norm": 0.4113961160182953, |
| "learning_rate": 3.575e-05, |
| "loss": 0.0559, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.4324324324324325, |
| "grad_norm": 0.3725287616252899, |
| "learning_rate": 3.5682432432432434e-05, |
| "loss": 0.0487, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.4391891891891893, |
| "grad_norm": 0.36960911750793457, |
| "learning_rate": 3.5614864864864866e-05, |
| "loss": 0.0496, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.445945945945946, |
| "grad_norm": 0.3826832175254822, |
| "learning_rate": 3.55472972972973e-05, |
| "loss": 0.0499, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.4527027027027026, |
| "grad_norm": 0.5639827847480774, |
| "learning_rate": 3.547972972972973e-05, |
| "loss": 0.0647, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.4594594594594594, |
| "grad_norm": 0.7879467606544495, |
| "learning_rate": 3.541216216216216e-05, |
| "loss": 0.0631, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.4662162162162162, |
| "grad_norm": 0.27783432602882385, |
| "learning_rate": 3.53445945945946e-05, |
| "loss": 0.0534, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.472972972972973, |
| "grad_norm": 0.4450521171092987, |
| "learning_rate": 3.527702702702703e-05, |
| "loss": 0.0578, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.4797297297297298, |
| "grad_norm": 0.27280521392822266, |
| "learning_rate": 3.520945945945946e-05, |
| "loss": 0.0508, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.4864864864864864, |
| "grad_norm": 0.2709004878997803, |
| "learning_rate": 3.514189189189189e-05, |
| "loss": 0.0466, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.4932432432432432, |
| "grad_norm": 0.3932722806930542, |
| "learning_rate": 3.5074324324324324e-05, |
| "loss": 0.0531, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.4892805516719818, |
| "learning_rate": 3.5006756756756756e-05, |
| "loss": 0.0457, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.5067567567567568, |
| "grad_norm": 0.5069631934165955, |
| "learning_rate": 3.493918918918919e-05, |
| "loss": 0.0593, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.5135135135135136, |
| "grad_norm": 0.4157819449901581, |
| "learning_rate": 3.487162162162163e-05, |
| "loss": 0.0562, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.5202702702702702, |
| "grad_norm": 0.31272444128990173, |
| "learning_rate": 3.480405405405406e-05, |
| "loss": 0.0493, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.527027027027027, |
| "grad_norm": 0.5022543668746948, |
| "learning_rate": 3.4736486486486485e-05, |
| "loss": 0.0518, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.5337837837837838, |
| "grad_norm": 0.6875999569892883, |
| "learning_rate": 3.466891891891892e-05, |
| "loss": 0.0403, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.5405405405405406, |
| "grad_norm": 0.4183546006679535, |
| "learning_rate": 3.460135135135135e-05, |
| "loss": 0.0505, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.5472972972972974, |
| "grad_norm": 0.4362141788005829, |
| "learning_rate": 3.453378378378378e-05, |
| "loss": 0.0584, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.554054054054054, |
| "grad_norm": 0.5941928029060364, |
| "learning_rate": 3.446621621621622e-05, |
| "loss": 0.069, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.560810810810811, |
| "grad_norm": 0.9973124265670776, |
| "learning_rate": 3.4398648648648653e-05, |
| "loss": 0.0676, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.5675675675675675, |
| "grad_norm": 0.40244096517562866, |
| "learning_rate": 3.4331081081081086e-05, |
| "loss": 0.061, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.5743243243243243, |
| "grad_norm": 0.6185030937194824, |
| "learning_rate": 3.426351351351352e-05, |
| "loss": 0.0512, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.5810810810810811, |
| "grad_norm": 0.41134995222091675, |
| "learning_rate": 3.4195945945945943e-05, |
| "loss": 0.0557, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.5878378378378377, |
| "grad_norm": 0.34551721811294556, |
| "learning_rate": 3.4128378378378376e-05, |
| "loss": 0.0396, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.5945945945945947, |
| "grad_norm": 0.6904975175857544, |
| "learning_rate": 3.4060810810810815e-05, |
| "loss": 0.0503, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.6013513513513513, |
| "grad_norm": 0.35074079036712646, |
| "learning_rate": 3.399324324324325e-05, |
| "loss": 0.047, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.6081081081081081, |
| "grad_norm": 0.4111129641532898, |
| "learning_rate": 3.392567567567568e-05, |
| "loss": 0.0485, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.614864864864865, |
| "grad_norm": 0.597917914390564, |
| "learning_rate": 3.385810810810811e-05, |
| "loss": 0.0473, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.6216216216216215, |
| "grad_norm": 0.3226557970046997, |
| "learning_rate": 3.3790540540540544e-05, |
| "loss": 0.0438, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.6283783783783785, |
| "grad_norm": 0.31680727005004883, |
| "learning_rate": 3.3722972972972976e-05, |
| "loss": 0.0442, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.635135135135135, |
| "grad_norm": 0.5353063941001892, |
| "learning_rate": 3.36554054054054e-05, |
| "loss": 0.0566, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.6418918918918919, |
| "grad_norm": 0.29757750034332275, |
| "learning_rate": 3.358783783783784e-05, |
| "loss": 0.0372, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.6486486486486487, |
| "grad_norm": 0.32976236939430237, |
| "learning_rate": 3.352027027027027e-05, |
| "loss": 0.0499, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.6554054054054053, |
| "grad_norm": 0.45679962635040283, |
| "learning_rate": 3.3452702702702705e-05, |
| "loss": 0.0494, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.6621621621621623, |
| "grad_norm": 0.37637847661972046, |
| "learning_rate": 3.338513513513514e-05, |
| "loss": 0.0511, |
| "step": 2460 |
| }, |
| { |
| "epoch": 1.6689189189189189, |
| "grad_norm": 0.24155497550964355, |
| "learning_rate": 3.331756756756757e-05, |
| "loss": 0.0488, |
| "step": 2470 |
| }, |
| { |
| "epoch": 1.6756756756756757, |
| "grad_norm": 0.24483831226825714, |
| "learning_rate": 3.325e-05, |
| "loss": 0.0621, |
| "step": 2480 |
| }, |
| { |
| "epoch": 1.6824324324324325, |
| "grad_norm": 0.3654753863811493, |
| "learning_rate": 3.3182432432432434e-05, |
| "loss": 0.0478, |
| "step": 2490 |
| }, |
| { |
| "epoch": 1.689189189189189, |
| "grad_norm": 0.8125627040863037, |
| "learning_rate": 3.3114864864864866e-05, |
| "loss": 0.061, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.695945945945946, |
| "grad_norm": 0.2318248152732849, |
| "learning_rate": 3.30472972972973e-05, |
| "loss": 0.0485, |
| "step": 2510 |
| }, |
| { |
| "epoch": 1.7027027027027026, |
| "grad_norm": 0.5518670082092285, |
| "learning_rate": 3.297972972972973e-05, |
| "loss": 0.0535, |
| "step": 2520 |
| }, |
| { |
| "epoch": 1.7094594594594594, |
| "grad_norm": 0.3407860994338989, |
| "learning_rate": 3.291216216216216e-05, |
| "loss": 0.0423, |
| "step": 2530 |
| }, |
| { |
| "epoch": 1.7162162162162162, |
| "grad_norm": 0.17148450016975403, |
| "learning_rate": 3.2844594594594595e-05, |
| "loss": 0.0419, |
| "step": 2540 |
| }, |
| { |
| "epoch": 1.722972972972973, |
| "grad_norm": 0.34441882371902466, |
| "learning_rate": 3.277702702702703e-05, |
| "loss": 0.0443, |
| "step": 2550 |
| }, |
| { |
| "epoch": 1.7297297297297298, |
| "grad_norm": 0.516750693321228, |
| "learning_rate": 3.270945945945946e-05, |
| "loss": 0.0643, |
| "step": 2560 |
| }, |
| { |
| "epoch": 1.7364864864864864, |
| "grad_norm": 0.5338383316993713, |
| "learning_rate": 3.26418918918919e-05, |
| "loss": 0.0937, |
| "step": 2570 |
| }, |
| { |
| "epoch": 1.7432432432432432, |
| "grad_norm": 0.44282281398773193, |
| "learning_rate": 3.2574324324324324e-05, |
| "loss": 0.0434, |
| "step": 2580 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.20837600529193878, |
| "learning_rate": 3.250675675675676e-05, |
| "loss": 0.0518, |
| "step": 2590 |
| }, |
| { |
| "epoch": 1.7567567567567568, |
| "grad_norm": 0.28318697214126587, |
| "learning_rate": 3.243918918918919e-05, |
| "loss": 0.0521, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.7635135135135136, |
| "grad_norm": 0.3730570077896118, |
| "learning_rate": 3.237162162162162e-05, |
| "loss": 0.0557, |
| "step": 2610 |
| }, |
| { |
| "epoch": 1.7702702702702702, |
| "grad_norm": 0.3967922329902649, |
| "learning_rate": 3.2304054054054053e-05, |
| "loss": 0.0645, |
| "step": 2620 |
| }, |
| { |
| "epoch": 1.777027027027027, |
| "grad_norm": 0.43898531794548035, |
| "learning_rate": 3.223648648648649e-05, |
| "loss": 0.0524, |
| "step": 2630 |
| }, |
| { |
| "epoch": 1.7837837837837838, |
| "grad_norm": 0.13809113204479218, |
| "learning_rate": 3.2168918918918925e-05, |
| "loss": 0.0416, |
| "step": 2640 |
| }, |
| { |
| "epoch": 1.7905405405405406, |
| "grad_norm": 0.24318750202655792, |
| "learning_rate": 3.210135135135136e-05, |
| "loss": 0.0543, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.7972972972972974, |
| "grad_norm": 0.3348706066608429, |
| "learning_rate": 3.203378378378378e-05, |
| "loss": 0.0364, |
| "step": 2660 |
| }, |
| { |
| "epoch": 1.804054054054054, |
| "grad_norm": 0.29943642020225525, |
| "learning_rate": 3.1966216216216215e-05, |
| "loss": 0.0586, |
| "step": 2670 |
| }, |
| { |
| "epoch": 1.810810810810811, |
| "grad_norm": 0.40303003787994385, |
| "learning_rate": 3.189864864864865e-05, |
| "loss": 0.0574, |
| "step": 2680 |
| }, |
| { |
| "epoch": 1.8175675675675675, |
| "grad_norm": 29.34443473815918, |
| "learning_rate": 3.183108108108108e-05, |
| "loss": 0.0561, |
| "step": 2690 |
| }, |
| { |
| "epoch": 1.8243243243243243, |
| "grad_norm": 0.5370333790779114, |
| "learning_rate": 3.176351351351352e-05, |
| "loss": 0.0378, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.8310810810810811, |
| "grad_norm": 0.30435851216316223, |
| "learning_rate": 3.169594594594595e-05, |
| "loss": 0.0485, |
| "step": 2710 |
| }, |
| { |
| "epoch": 1.8378378378378377, |
| "grad_norm": 0.3103540539741516, |
| "learning_rate": 3.162837837837838e-05, |
| "loss": 0.0467, |
| "step": 2720 |
| }, |
| { |
| "epoch": 1.8445945945945947, |
| "grad_norm": 0.2695455551147461, |
| "learning_rate": 3.1560810810810815e-05, |
| "loss": 0.0437, |
| "step": 2730 |
| }, |
| { |
| "epoch": 1.8513513513513513, |
| "grad_norm": 0.19030825793743134, |
| "learning_rate": 3.149324324324324e-05, |
| "loss": 0.0349, |
| "step": 2740 |
| }, |
| { |
| "epoch": 1.8581081081081081, |
| "grad_norm": 0.49268749356269836, |
| "learning_rate": 3.142567567567567e-05, |
| "loss": 0.0425, |
| "step": 2750 |
| }, |
| { |
| "epoch": 1.864864864864865, |
| "grad_norm": 0.22729387879371643, |
| "learning_rate": 3.135810810810811e-05, |
| "loss": 0.0374, |
| "step": 2760 |
| }, |
| { |
| "epoch": 1.8716216216216215, |
| "grad_norm": 0.4349585473537445, |
| "learning_rate": 3.1290540540540544e-05, |
| "loss": 0.0554, |
| "step": 2770 |
| }, |
| { |
| "epoch": 1.8783783783783785, |
| "grad_norm": 0.6437013149261475, |
| "learning_rate": 3.1222972972972976e-05, |
| "loss": 0.0479, |
| "step": 2780 |
| }, |
| { |
| "epoch": 1.885135135135135, |
| "grad_norm": 0.377573698759079, |
| "learning_rate": 3.115540540540541e-05, |
| "loss": 0.0434, |
| "step": 2790 |
| }, |
| { |
| "epoch": 1.8918918918918919, |
| "grad_norm": 0.4241003692150116, |
| "learning_rate": 3.108783783783784e-05, |
| "loss": 0.0523, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.8986486486486487, |
| "grad_norm": 0.5633030533790588, |
| "learning_rate": 3.102027027027027e-05, |
| "loss": 0.0496, |
| "step": 2810 |
| }, |
| { |
| "epoch": 1.9054054054054053, |
| "grad_norm": 0.4770616292953491, |
| "learning_rate": 3.09527027027027e-05, |
| "loss": 0.0411, |
| "step": 2820 |
| }, |
| { |
| "epoch": 1.9121621621621623, |
| "grad_norm": 2.8804574012756348, |
| "learning_rate": 3.088513513513514e-05, |
| "loss": 0.0462, |
| "step": 2830 |
| }, |
| { |
| "epoch": 1.9189189189189189, |
| "grad_norm": 0.4047267436981201, |
| "learning_rate": 3.081756756756757e-05, |
| "loss": 0.0407, |
| "step": 2840 |
| }, |
| { |
| "epoch": 1.9256756756756757, |
| "grad_norm": 0.20034578442573547, |
| "learning_rate": 3.075e-05, |
| "loss": 0.0553, |
| "step": 2850 |
| }, |
| { |
| "epoch": 1.9324324324324325, |
| "grad_norm": 0.5225114226341248, |
| "learning_rate": 3.0682432432432434e-05, |
| "loss": 0.0451, |
| "step": 2860 |
| }, |
| { |
| "epoch": 1.939189189189189, |
| "grad_norm": 0.5536220669746399, |
| "learning_rate": 3.0614864864864867e-05, |
| "loss": 0.0499, |
| "step": 2870 |
| }, |
| { |
| "epoch": 1.945945945945946, |
| "grad_norm": 0.29939669370651245, |
| "learning_rate": 3.05472972972973e-05, |
| "loss": 0.039, |
| "step": 2880 |
| }, |
| { |
| "epoch": 1.9527027027027026, |
| "grad_norm": 0.40010547637939453, |
| "learning_rate": 3.0479729729729734e-05, |
| "loss": 0.049, |
| "step": 2890 |
| }, |
| { |
| "epoch": 1.9594594594594594, |
| "grad_norm": 0.24146144092082977, |
| "learning_rate": 3.041216216216216e-05, |
| "loss": 0.0555, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.9662162162162162, |
| "grad_norm": 0.3182704746723175, |
| "learning_rate": 3.0344594594594596e-05, |
| "loss": 0.0425, |
| "step": 2910 |
| }, |
| { |
| "epoch": 1.972972972972973, |
| "grad_norm": 0.5397040247917175, |
| "learning_rate": 3.0277027027027028e-05, |
| "loss": 0.0479, |
| "step": 2920 |
| }, |
| { |
| "epoch": 1.9797297297297298, |
| "grad_norm": 0.31318461894989014, |
| "learning_rate": 3.020945945945946e-05, |
| "loss": 0.0528, |
| "step": 2930 |
| }, |
| { |
| "epoch": 1.9864864864864864, |
| "grad_norm": 0.2979433238506317, |
| "learning_rate": 3.0141891891891892e-05, |
| "loss": 0.0396, |
| "step": 2940 |
| }, |
| { |
| "epoch": 1.9932432432432432, |
| "grad_norm": 0.329349160194397, |
| "learning_rate": 3.0074324324324328e-05, |
| "loss": 0.0644, |
| "step": 2950 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.2042832374572754, |
| "learning_rate": 3.000675675675676e-05, |
| "loss": 0.0446, |
| "step": 2960 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_loss": 0.02900381200015545, |
| "eval_runtime": 232.4586, |
| "eval_samples_per_second": 4.495, |
| "eval_steps_per_second": 1.127, |
| "step": 2960 |
| }, |
| { |
| "epoch": 2.0067567567567566, |
| "grad_norm": 0.41488581895828247, |
| "learning_rate": 2.9939189189189193e-05, |
| "loss": 0.0478, |
| "step": 2970 |
| }, |
| { |
| "epoch": 2.0135135135135136, |
| "grad_norm": 0.46764683723449707, |
| "learning_rate": 2.987162162162162e-05, |
| "loss": 0.0565, |
| "step": 2980 |
| }, |
| { |
| "epoch": 2.02027027027027, |
| "grad_norm": 0.2321554720401764, |
| "learning_rate": 2.9804054054054054e-05, |
| "loss": 0.0286, |
| "step": 2990 |
| }, |
| { |
| "epoch": 2.027027027027027, |
| "grad_norm": 0.10871370881795883, |
| "learning_rate": 2.9736486486486486e-05, |
| "loss": 0.0461, |
| "step": 3000 |
| }, |
| { |
| "epoch": 2.0337837837837838, |
| "grad_norm": 0.42073723673820496, |
| "learning_rate": 2.966891891891892e-05, |
| "loss": 0.0396, |
| "step": 3010 |
| }, |
| { |
| "epoch": 2.0405405405405403, |
| "grad_norm": 0.684948205947876, |
| "learning_rate": 2.9601351351351354e-05, |
| "loss": 0.0789, |
| "step": 3020 |
| }, |
| { |
| "epoch": 2.0472972972972974, |
| "grad_norm": 2.56610107421875, |
| "learning_rate": 2.9533783783783786e-05, |
| "loss": 0.0561, |
| "step": 3030 |
| }, |
| { |
| "epoch": 2.054054054054054, |
| "grad_norm": 0.28919854760169983, |
| "learning_rate": 2.9466216216216218e-05, |
| "loss": 0.0486, |
| "step": 3040 |
| }, |
| { |
| "epoch": 2.060810810810811, |
| "grad_norm": 0.28242143988609314, |
| "learning_rate": 2.9398648648648654e-05, |
| "loss": 0.0511, |
| "step": 3050 |
| }, |
| { |
| "epoch": 2.0675675675675675, |
| "grad_norm": 0.5599525570869446, |
| "learning_rate": 2.933108108108108e-05, |
| "loss": 0.0501, |
| "step": 3060 |
| }, |
| { |
| "epoch": 2.074324324324324, |
| "grad_norm": 0.314981609582901, |
| "learning_rate": 2.926351351351351e-05, |
| "loss": 0.036, |
| "step": 3070 |
| }, |
| { |
| "epoch": 2.081081081081081, |
| "grad_norm": 0.38009947538375854, |
| "learning_rate": 2.9195945945945947e-05, |
| "loss": 0.0449, |
| "step": 3080 |
| }, |
| { |
| "epoch": 2.0878378378378377, |
| "grad_norm": 0.46030741930007935, |
| "learning_rate": 2.912837837837838e-05, |
| "loss": 0.0485, |
| "step": 3090 |
| }, |
| { |
| "epoch": 2.0945945945945947, |
| "grad_norm": 0.2718926966190338, |
| "learning_rate": 2.9060810810810812e-05, |
| "loss": 0.0429, |
| "step": 3100 |
| }, |
| { |
| "epoch": 2.1013513513513513, |
| "grad_norm": 0.41588521003723145, |
| "learning_rate": 2.8993243243243244e-05, |
| "loss": 0.0492, |
| "step": 3110 |
| }, |
| { |
| "epoch": 2.108108108108108, |
| "grad_norm": 0.503211259841919, |
| "learning_rate": 2.892567567567568e-05, |
| "loss": 0.0399, |
| "step": 3120 |
| }, |
| { |
| "epoch": 2.114864864864865, |
| "grad_norm": 0.39988529682159424, |
| "learning_rate": 2.8858108108108112e-05, |
| "loss": 0.0528, |
| "step": 3130 |
| }, |
| { |
| "epoch": 2.1216216216216215, |
| "grad_norm": 0.2753487229347229, |
| "learning_rate": 2.879054054054054e-05, |
| "loss": 0.0554, |
| "step": 3140 |
| }, |
| { |
| "epoch": 2.1283783783783785, |
| "grad_norm": 0.5936213135719299, |
| "learning_rate": 2.8722972972972973e-05, |
| "loss": 0.0486, |
| "step": 3150 |
| }, |
| { |
| "epoch": 2.135135135135135, |
| "grad_norm": 4.782855033874512, |
| "learning_rate": 2.8655405405405405e-05, |
| "loss": 0.0544, |
| "step": 3160 |
| }, |
| { |
| "epoch": 2.141891891891892, |
| "grad_norm": 0.9043190479278564, |
| "learning_rate": 2.8587837837837838e-05, |
| "loss": 0.0422, |
| "step": 3170 |
| }, |
| { |
| "epoch": 2.1486486486486487, |
| "grad_norm": 0.28013184666633606, |
| "learning_rate": 2.8520270270270273e-05, |
| "loss": 0.0385, |
| "step": 3180 |
| }, |
| { |
| "epoch": 2.1554054054054053, |
| "grad_norm": 0.5391634106636047, |
| "learning_rate": 2.8452702702702706e-05, |
| "loss": 0.0432, |
| "step": 3190 |
| }, |
| { |
| "epoch": 2.1621621621621623, |
| "grad_norm": 0.28761211037635803, |
| "learning_rate": 2.8385135135135138e-05, |
| "loss": 0.045, |
| "step": 3200 |
| }, |
| { |
| "epoch": 2.168918918918919, |
| "grad_norm": 0.43844741582870483, |
| "learning_rate": 2.831756756756757e-05, |
| "loss": 0.051, |
| "step": 3210 |
| }, |
| { |
| "epoch": 2.175675675675676, |
| "grad_norm": 0.9262340664863586, |
| "learning_rate": 2.825e-05, |
| "loss": 0.0618, |
| "step": 3220 |
| }, |
| { |
| "epoch": 2.1824324324324325, |
| "grad_norm": 0.2561081349849701, |
| "learning_rate": 2.818243243243243e-05, |
| "loss": 0.0514, |
| "step": 3230 |
| }, |
| { |
| "epoch": 2.189189189189189, |
| "grad_norm": 0.25118082761764526, |
| "learning_rate": 2.8114864864864863e-05, |
| "loss": 0.0487, |
| "step": 3240 |
| }, |
| { |
| "epoch": 2.195945945945946, |
| "grad_norm": 0.2230692356824875, |
| "learning_rate": 2.80472972972973e-05, |
| "loss": 0.0324, |
| "step": 3250 |
| }, |
| { |
| "epoch": 2.2027027027027026, |
| "grad_norm": 0.27720406651496887, |
| "learning_rate": 2.797972972972973e-05, |
| "loss": 0.0473, |
| "step": 3260 |
| }, |
| { |
| "epoch": 2.2094594594594597, |
| "grad_norm": 0.4395008087158203, |
| "learning_rate": 2.7912162162162164e-05, |
| "loss": 0.0451, |
| "step": 3270 |
| }, |
| { |
| "epoch": 2.2162162162162162, |
| "grad_norm": 0.34843841195106506, |
| "learning_rate": 2.7844594594594596e-05, |
| "loss": 0.0433, |
| "step": 3280 |
| }, |
| { |
| "epoch": 2.222972972972973, |
| "grad_norm": 0.3215220868587494, |
| "learning_rate": 2.777702702702703e-05, |
| "loss": 0.0423, |
| "step": 3290 |
| }, |
| { |
| "epoch": 2.22972972972973, |
| "grad_norm": 0.44971078634262085, |
| "learning_rate": 2.7709459459459457e-05, |
| "loss": 0.0474, |
| "step": 3300 |
| }, |
| { |
| "epoch": 2.2364864864864864, |
| "grad_norm": 0.4757413864135742, |
| "learning_rate": 2.7641891891891893e-05, |
| "loss": 0.0416, |
| "step": 3310 |
| }, |
| { |
| "epoch": 2.2432432432432434, |
| "grad_norm": 0.5553408861160278, |
| "learning_rate": 2.7574324324324325e-05, |
| "loss": 0.0491, |
| "step": 3320 |
| }, |
| { |
| "epoch": 2.25, |
| "grad_norm": 0.3604501187801361, |
| "learning_rate": 2.7506756756756757e-05, |
| "loss": 0.041, |
| "step": 3330 |
| }, |
| { |
| "epoch": 2.2567567567567566, |
| "grad_norm": 0.23111610114574432, |
| "learning_rate": 2.743918918918919e-05, |
| "loss": 0.0437, |
| "step": 3340 |
| }, |
| { |
| "epoch": 2.2635135135135136, |
| "grad_norm": 0.288889080286026, |
| "learning_rate": 2.7371621621621625e-05, |
| "loss": 0.0375, |
| "step": 3350 |
| }, |
| { |
| "epoch": 2.27027027027027, |
| "grad_norm": 0.5279484987258911, |
| "learning_rate": 2.7304054054054057e-05, |
| "loss": 0.04, |
| "step": 3360 |
| }, |
| { |
| "epoch": 2.277027027027027, |
| "grad_norm": 0.259581983089447, |
| "learning_rate": 2.723648648648649e-05, |
| "loss": 0.0337, |
| "step": 3370 |
| }, |
| { |
| "epoch": 2.2837837837837838, |
| "grad_norm": 0.37831178307533264, |
| "learning_rate": 2.716891891891892e-05, |
| "loss": 0.0427, |
| "step": 3380 |
| }, |
| { |
| "epoch": 2.2905405405405403, |
| "grad_norm": 0.3985559940338135, |
| "learning_rate": 2.710135135135135e-05, |
| "loss": 0.0394, |
| "step": 3390 |
| }, |
| { |
| "epoch": 2.2972972972972974, |
| "grad_norm": 0.32648199796676636, |
| "learning_rate": 2.7033783783783783e-05, |
| "loss": 0.035, |
| "step": 3400 |
| }, |
| { |
| "epoch": 2.304054054054054, |
| "grad_norm": 0.20723649859428406, |
| "learning_rate": 2.696621621621622e-05, |
| "loss": 0.046, |
| "step": 3410 |
| }, |
| { |
| "epoch": 2.310810810810811, |
| "grad_norm": 0.5824639201164246, |
| "learning_rate": 2.689864864864865e-05, |
| "loss": 0.0445, |
| "step": 3420 |
| }, |
| { |
| "epoch": 2.3175675675675675, |
| "grad_norm": 0.3186579644680023, |
| "learning_rate": 2.6831081081081083e-05, |
| "loss": 0.0475, |
| "step": 3430 |
| }, |
| { |
| "epoch": 2.3243243243243246, |
| "grad_norm": 0.40933048725128174, |
| "learning_rate": 2.6763513513513515e-05, |
| "loss": 0.0587, |
| "step": 3440 |
| }, |
| { |
| "epoch": 2.331081081081081, |
| "grad_norm": 0.48833781480789185, |
| "learning_rate": 2.669594594594595e-05, |
| "loss": 0.0481, |
| "step": 3450 |
| }, |
| { |
| "epoch": 2.3378378378378377, |
| "grad_norm": 0.2573770582675934, |
| "learning_rate": 2.6628378378378376e-05, |
| "loss": 0.0453, |
| "step": 3460 |
| }, |
| { |
| "epoch": 2.3445945945945947, |
| "grad_norm": 0.2869914174079895, |
| "learning_rate": 2.656081081081081e-05, |
| "loss": 0.0407, |
| "step": 3470 |
| }, |
| { |
| "epoch": 2.3513513513513513, |
| "grad_norm": 0.35150277614593506, |
| "learning_rate": 2.6493243243243244e-05, |
| "loss": 0.0326, |
| "step": 3480 |
| }, |
| { |
| "epoch": 2.358108108108108, |
| "grad_norm": 0.3215469419956207, |
| "learning_rate": 2.6425675675675677e-05, |
| "loss": 0.0612, |
| "step": 3490 |
| }, |
| { |
| "epoch": 2.364864864864865, |
| "grad_norm": 0.17056582868099213, |
| "learning_rate": 2.635810810810811e-05, |
| "loss": 0.0521, |
| "step": 3500 |
| }, |
| { |
| "epoch": 2.3716216216216215, |
| "grad_norm": 0.24536234140396118, |
| "learning_rate": 2.629054054054054e-05, |
| "loss": 0.0522, |
| "step": 3510 |
| }, |
| { |
| "epoch": 2.3783783783783785, |
| "grad_norm": 0.49538880586624146, |
| "learning_rate": 2.6222972972972977e-05, |
| "loss": 0.0328, |
| "step": 3520 |
| }, |
| { |
| "epoch": 2.385135135135135, |
| "grad_norm": 0.3088550567626953, |
| "learning_rate": 2.615540540540541e-05, |
| "loss": 0.0673, |
| "step": 3530 |
| }, |
| { |
| "epoch": 2.391891891891892, |
| "grad_norm": 0.29182663559913635, |
| "learning_rate": 2.608783783783784e-05, |
| "loss": 0.0359, |
| "step": 3540 |
| }, |
| { |
| "epoch": 2.3986486486486487, |
| "grad_norm": 0.3735537528991699, |
| "learning_rate": 2.602027027027027e-05, |
| "loss": 0.0396, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.4054054054054053, |
| "grad_norm": 0.9158993363380432, |
| "learning_rate": 2.5952702702702702e-05, |
| "loss": 0.0419, |
| "step": 3560 |
| }, |
| { |
| "epoch": 2.4121621621621623, |
| "grad_norm": 0.47085681557655334, |
| "learning_rate": 2.5885135135135135e-05, |
| "loss": 0.0474, |
| "step": 3570 |
| }, |
| { |
| "epoch": 2.418918918918919, |
| "grad_norm": 0.5768277049064636, |
| "learning_rate": 2.581756756756757e-05, |
| "loss": 0.0516, |
| "step": 3580 |
| }, |
| { |
| "epoch": 2.4256756756756754, |
| "grad_norm": 0.21016757190227509, |
| "learning_rate": 2.5750000000000002e-05, |
| "loss": 0.0513, |
| "step": 3590 |
| }, |
| { |
| "epoch": 2.4324324324324325, |
| "grad_norm": 0.5895877480506897, |
| "learning_rate": 2.5682432432432435e-05, |
| "loss": 0.0424, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.439189189189189, |
| "grad_norm": 0.37401655316352844, |
| "learning_rate": 2.5614864864864867e-05, |
| "loss": 0.0467, |
| "step": 3610 |
| }, |
| { |
| "epoch": 2.445945945945946, |
| "grad_norm": 0.2630750238895416, |
| "learning_rate": 2.5547297297297303e-05, |
| "loss": 0.0525, |
| "step": 3620 |
| }, |
| { |
| "epoch": 2.4527027027027026, |
| "grad_norm": 0.25770536065101624, |
| "learning_rate": 2.5479729729729728e-05, |
| "loss": 0.041, |
| "step": 3630 |
| }, |
| { |
| "epoch": 2.4594594594594597, |
| "grad_norm": 0.5026077032089233, |
| "learning_rate": 2.541216216216216e-05, |
| "loss": 0.0497, |
| "step": 3640 |
| }, |
| { |
| "epoch": 2.4662162162162162, |
| "grad_norm": 0.3355426788330078, |
| "learning_rate": 2.5344594594594596e-05, |
| "loss": 0.0338, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.472972972972973, |
| "grad_norm": 0.1499488651752472, |
| "learning_rate": 2.5277027027027028e-05, |
| "loss": 0.0517, |
| "step": 3660 |
| }, |
| { |
| "epoch": 2.47972972972973, |
| "grad_norm": 0.5145780444145203, |
| "learning_rate": 2.520945945945946e-05, |
| "loss": 0.0565, |
| "step": 3670 |
| }, |
| { |
| "epoch": 2.4864864864864864, |
| "grad_norm": 0.503230631351471, |
| "learning_rate": 2.5141891891891893e-05, |
| "loss": 0.053, |
| "step": 3680 |
| }, |
| { |
| "epoch": 2.4932432432432434, |
| "grad_norm": 0.3355911374092102, |
| "learning_rate": 2.507432432432433e-05, |
| "loss": 0.0375, |
| "step": 3690 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.17536339163780212, |
| "learning_rate": 2.500675675675676e-05, |
| "loss": 0.0367, |
| "step": 3700 |
| }, |
| { |
| "epoch": 2.506756756756757, |
| "grad_norm": 0.4582735598087311, |
| "learning_rate": 2.493918918918919e-05, |
| "loss": 0.0433, |
| "step": 3710 |
| }, |
| { |
| "epoch": 2.5135135135135136, |
| "grad_norm": 0.2658029794692993, |
| "learning_rate": 2.4871621621621625e-05, |
| "loss": 0.0446, |
| "step": 3720 |
| }, |
| { |
| "epoch": 2.52027027027027, |
| "grad_norm": 0.22042320668697357, |
| "learning_rate": 2.4804054054054054e-05, |
| "loss": 0.0409, |
| "step": 3730 |
| }, |
| { |
| "epoch": 2.527027027027027, |
| "grad_norm": 0.42648428678512573, |
| "learning_rate": 2.4736486486486486e-05, |
| "loss": 0.0503, |
| "step": 3740 |
| }, |
| { |
| "epoch": 2.5337837837837838, |
| "grad_norm": 0.49982187151908875, |
| "learning_rate": 2.4668918918918922e-05, |
| "loss": 0.0401, |
| "step": 3750 |
| }, |
| { |
| "epoch": 2.5405405405405403, |
| "grad_norm": 0.3434715270996094, |
| "learning_rate": 2.4601351351351354e-05, |
| "loss": 0.0383, |
| "step": 3760 |
| }, |
| { |
| "epoch": 2.5472972972972974, |
| "grad_norm": 0.2308432161808014, |
| "learning_rate": 2.4533783783783783e-05, |
| "loss": 0.0399, |
| "step": 3770 |
| }, |
| { |
| "epoch": 2.554054054054054, |
| "grad_norm": 0.5601356029510498, |
| "learning_rate": 2.446621621621622e-05, |
| "loss": 0.0377, |
| "step": 3780 |
| }, |
| { |
| "epoch": 2.560810810810811, |
| "grad_norm": 0.31023895740509033, |
| "learning_rate": 2.439864864864865e-05, |
| "loss": 0.054, |
| "step": 3790 |
| }, |
| { |
| "epoch": 2.5675675675675675, |
| "grad_norm": 0.39844200015068054, |
| "learning_rate": 2.4331081081081083e-05, |
| "loss": 0.0409, |
| "step": 3800 |
| }, |
| { |
| "epoch": 2.5743243243243246, |
| "grad_norm": 0.44077757000923157, |
| "learning_rate": 2.4263513513513515e-05, |
| "loss": 0.0404, |
| "step": 3810 |
| }, |
| { |
| "epoch": 2.581081081081081, |
| "grad_norm": 0.3423317074775696, |
| "learning_rate": 2.4195945945945948e-05, |
| "loss": 0.0403, |
| "step": 3820 |
| }, |
| { |
| "epoch": 2.5878378378378377, |
| "grad_norm": 0.25777336955070496, |
| "learning_rate": 2.412837837837838e-05, |
| "loss": 0.0343, |
| "step": 3830 |
| }, |
| { |
| "epoch": 2.5945945945945947, |
| "grad_norm": 0.4902068078517914, |
| "learning_rate": 2.4060810810810812e-05, |
| "loss": 0.044, |
| "step": 3840 |
| }, |
| { |
| "epoch": 2.6013513513513513, |
| "grad_norm": 0.27901652455329895, |
| "learning_rate": 2.3993243243243245e-05, |
| "loss": 0.0516, |
| "step": 3850 |
| }, |
| { |
| "epoch": 2.608108108108108, |
| "grad_norm": 0.2561982274055481, |
| "learning_rate": 2.3925675675675677e-05, |
| "loss": 0.032, |
| "step": 3860 |
| }, |
| { |
| "epoch": 2.614864864864865, |
| "grad_norm": 0.6690441370010376, |
| "learning_rate": 2.385810810810811e-05, |
| "loss": 0.0438, |
| "step": 3870 |
| }, |
| { |
| "epoch": 2.6216216216216215, |
| "grad_norm": 0.8403464555740356, |
| "learning_rate": 2.379054054054054e-05, |
| "loss": 0.0524, |
| "step": 3880 |
| }, |
| { |
| "epoch": 2.6283783783783785, |
| "grad_norm": 0.24565739929676056, |
| "learning_rate": 2.3722972972972974e-05, |
| "loss": 0.0305, |
| "step": 3890 |
| }, |
| { |
| "epoch": 2.635135135135135, |
| "grad_norm": 0.39929670095443726, |
| "learning_rate": 2.3655405405405406e-05, |
| "loss": 0.0382, |
| "step": 3900 |
| }, |
| { |
| "epoch": 2.641891891891892, |
| "grad_norm": 0.48382842540740967, |
| "learning_rate": 2.3587837837837838e-05, |
| "loss": 0.0498, |
| "step": 3910 |
| }, |
| { |
| "epoch": 2.6486486486486487, |
| "grad_norm": 0.27263057231903076, |
| "learning_rate": 2.3520270270270274e-05, |
| "loss": 0.0337, |
| "step": 3920 |
| }, |
| { |
| "epoch": 2.6554054054054053, |
| "grad_norm": 0.29816973209381104, |
| "learning_rate": 2.3452702702702703e-05, |
| "loss": 0.0322, |
| "step": 3930 |
| }, |
| { |
| "epoch": 2.6621621621621623, |
| "grad_norm": 0.35650044679641724, |
| "learning_rate": 2.3385135135135135e-05, |
| "loss": 0.0408, |
| "step": 3940 |
| }, |
| { |
| "epoch": 2.668918918918919, |
| "grad_norm": 0.22148212790489197, |
| "learning_rate": 2.331756756756757e-05, |
| "loss": 0.0506, |
| "step": 3950 |
| }, |
| { |
| "epoch": 2.6756756756756754, |
| "grad_norm": 0.330817312002182, |
| "learning_rate": 2.3250000000000003e-05, |
| "loss": 0.0458, |
| "step": 3960 |
| }, |
| { |
| "epoch": 2.6824324324324325, |
| "grad_norm": 0.3396255373954773, |
| "learning_rate": 2.318243243243243e-05, |
| "loss": 0.0349, |
| "step": 3970 |
| }, |
| { |
| "epoch": 2.689189189189189, |
| "grad_norm": 0.27983909845352173, |
| "learning_rate": 2.3114864864864867e-05, |
| "loss": 0.0555, |
| "step": 3980 |
| }, |
| { |
| "epoch": 2.695945945945946, |
| "grad_norm": 0.21532095968723297, |
| "learning_rate": 2.30472972972973e-05, |
| "loss": 0.0414, |
| "step": 3990 |
| }, |
| { |
| "epoch": 2.7027027027027026, |
| "grad_norm": 0.4602966904640198, |
| "learning_rate": 2.2979729729729732e-05, |
| "loss": 0.0416, |
| "step": 4000 |
| }, |
| { |
| "epoch": 2.7094594594594597, |
| "grad_norm": 0.37232860922813416, |
| "learning_rate": 2.2912162162162164e-05, |
| "loss": 0.0376, |
| "step": 4010 |
| }, |
| { |
| "epoch": 2.7162162162162162, |
| "grad_norm": 0.27139970660209656, |
| "learning_rate": 2.2844594594594596e-05, |
| "loss": 0.0457, |
| "step": 4020 |
| }, |
| { |
| "epoch": 2.722972972972973, |
| "grad_norm": 0.3222922682762146, |
| "learning_rate": 2.277702702702703e-05, |
| "loss": 0.0433, |
| "step": 4030 |
| }, |
| { |
| "epoch": 2.72972972972973, |
| "grad_norm": 0.5624486207962036, |
| "learning_rate": 2.270945945945946e-05, |
| "loss": 0.0417, |
| "step": 4040 |
| }, |
| { |
| "epoch": 2.7364864864864864, |
| "grad_norm": 0.7472530603408813, |
| "learning_rate": 2.2641891891891893e-05, |
| "loss": 0.0394, |
| "step": 4050 |
| }, |
| { |
| "epoch": 2.743243243243243, |
| "grad_norm": 0.38589775562286377, |
| "learning_rate": 2.2574324324324325e-05, |
| "loss": 0.0316, |
| "step": 4060 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.34411191940307617, |
| "learning_rate": 2.2506756756756758e-05, |
| "loss": 0.0361, |
| "step": 4070 |
| }, |
| { |
| "epoch": 2.756756756756757, |
| "grad_norm": 0.7759385108947754, |
| "learning_rate": 2.243918918918919e-05, |
| "loss": 0.0385, |
| "step": 4080 |
| }, |
| { |
| "epoch": 2.7635135135135136, |
| "grad_norm": 0.25599023699760437, |
| "learning_rate": 2.2371621621621622e-05, |
| "loss": 0.042, |
| "step": 4090 |
| }, |
| { |
| "epoch": 2.77027027027027, |
| "grad_norm": 0.44792553782463074, |
| "learning_rate": 2.2304054054054054e-05, |
| "loss": 0.0431, |
| "step": 4100 |
| }, |
| { |
| "epoch": 2.777027027027027, |
| "grad_norm": 0.32037609815597534, |
| "learning_rate": 2.2236486486486487e-05, |
| "loss": 0.0354, |
| "step": 4110 |
| }, |
| { |
| "epoch": 2.7837837837837838, |
| "grad_norm": 0.3700566291809082, |
| "learning_rate": 2.2168918918918922e-05, |
| "loss": 0.0371, |
| "step": 4120 |
| }, |
| { |
| "epoch": 2.7905405405405403, |
| "grad_norm": 0.3106760084629059, |
| "learning_rate": 2.210135135135135e-05, |
| "loss": 0.0294, |
| "step": 4130 |
| }, |
| { |
| "epoch": 2.7972972972972974, |
| "grad_norm": 0.29399368166923523, |
| "learning_rate": 2.2033783783783783e-05, |
| "loss": 0.045, |
| "step": 4140 |
| }, |
| { |
| "epoch": 2.804054054054054, |
| "grad_norm": 0.2866055965423584, |
| "learning_rate": 2.196621621621622e-05, |
| "loss": 0.0351, |
| "step": 4150 |
| }, |
| { |
| "epoch": 2.810810810810811, |
| "grad_norm": 0.18367105722427368, |
| "learning_rate": 2.189864864864865e-05, |
| "loss": 0.035, |
| "step": 4160 |
| }, |
| { |
| "epoch": 2.8175675675675675, |
| "grad_norm": 0.2915230393409729, |
| "learning_rate": 2.183108108108108e-05, |
| "loss": 0.0362, |
| "step": 4170 |
| }, |
| { |
| "epoch": 2.8243243243243246, |
| "grad_norm": 0.4409267008304596, |
| "learning_rate": 2.1763513513513516e-05, |
| "loss": 0.0536, |
| "step": 4180 |
| }, |
| { |
| "epoch": 2.831081081081081, |
| "grad_norm": 0.3458040952682495, |
| "learning_rate": 2.1695945945945948e-05, |
| "loss": 0.0356, |
| "step": 4190 |
| }, |
| { |
| "epoch": 2.8378378378378377, |
| "grad_norm": 0.2696792781352997, |
| "learning_rate": 2.162837837837838e-05, |
| "loss": 0.0448, |
| "step": 4200 |
| }, |
| { |
| "epoch": 2.8445945945945947, |
| "grad_norm": 0.3584458827972412, |
| "learning_rate": 2.1560810810810812e-05, |
| "loss": 0.0424, |
| "step": 4210 |
| }, |
| { |
| "epoch": 2.8513513513513513, |
| "grad_norm": 0.23099841177463531, |
| "learning_rate": 2.1493243243243245e-05, |
| "loss": 0.0378, |
| "step": 4220 |
| }, |
| { |
| "epoch": 2.858108108108108, |
| "grad_norm": 0.7650930285453796, |
| "learning_rate": 2.1425675675675677e-05, |
| "loss": 0.044, |
| "step": 4230 |
| }, |
| { |
| "epoch": 2.864864864864865, |
| "grad_norm": 0.49164295196533203, |
| "learning_rate": 2.135810810810811e-05, |
| "loss": 0.0535, |
| "step": 4240 |
| }, |
| { |
| "epoch": 2.8716216216216215, |
| "grad_norm": 0.4650588631629944, |
| "learning_rate": 2.129054054054054e-05, |
| "loss": 0.0358, |
| "step": 4250 |
| }, |
| { |
| "epoch": 2.8783783783783785, |
| "grad_norm": 0.4031845033168793, |
| "learning_rate": 2.1222972972972974e-05, |
| "loss": 0.0372, |
| "step": 4260 |
| }, |
| { |
| "epoch": 2.885135135135135, |
| "grad_norm": 0.2923171818256378, |
| "learning_rate": 2.1155405405405406e-05, |
| "loss": 0.0428, |
| "step": 4270 |
| }, |
| { |
| "epoch": 2.891891891891892, |
| "grad_norm": 0.16636265814304352, |
| "learning_rate": 2.1087837837837838e-05, |
| "loss": 0.0487, |
| "step": 4280 |
| }, |
| { |
| "epoch": 2.8986486486486487, |
| "grad_norm": 0.7599406838417053, |
| "learning_rate": 2.102027027027027e-05, |
| "loss": 0.0401, |
| "step": 4290 |
| }, |
| { |
| "epoch": 2.9054054054054053, |
| "grad_norm": 0.25301793217658997, |
| "learning_rate": 2.0952702702702703e-05, |
| "loss": 0.0338, |
| "step": 4300 |
| }, |
| { |
| "epoch": 2.9121621621621623, |
| "grad_norm": 0.1861271858215332, |
| "learning_rate": 2.0885135135135135e-05, |
| "loss": 0.0334, |
| "step": 4310 |
| }, |
| { |
| "epoch": 2.918918918918919, |
| "grad_norm": 0.25895753502845764, |
| "learning_rate": 2.081756756756757e-05, |
| "loss": 0.0311, |
| "step": 4320 |
| }, |
| { |
| "epoch": 2.9256756756756754, |
| "grad_norm": 0.42530789971351624, |
| "learning_rate": 2.075e-05, |
| "loss": 0.0398, |
| "step": 4330 |
| }, |
| { |
| "epoch": 2.9324324324324325, |
| "grad_norm": 0.463356614112854, |
| "learning_rate": 2.0682432432432432e-05, |
| "loss": 0.0366, |
| "step": 4340 |
| }, |
| { |
| "epoch": 2.939189189189189, |
| "grad_norm": 0.4635908007621765, |
| "learning_rate": 2.0614864864864867e-05, |
| "loss": 0.0457, |
| "step": 4350 |
| }, |
| { |
| "epoch": 2.945945945945946, |
| "grad_norm": 0.5640028715133667, |
| "learning_rate": 2.05472972972973e-05, |
| "loss": 0.0374, |
| "step": 4360 |
| }, |
| { |
| "epoch": 2.9527027027027026, |
| "grad_norm": 0.5728152394294739, |
| "learning_rate": 2.047972972972973e-05, |
| "loss": 0.0388, |
| "step": 4370 |
| }, |
| { |
| "epoch": 2.9594594594594597, |
| "grad_norm": 0.25366121530532837, |
| "learning_rate": 2.0412162162162164e-05, |
| "loss": 0.0491, |
| "step": 4380 |
| }, |
| { |
| "epoch": 2.9662162162162162, |
| "grad_norm": 0.415521502494812, |
| "learning_rate": 2.0344594594594596e-05, |
| "loss": 0.0365, |
| "step": 4390 |
| }, |
| { |
| "epoch": 2.972972972972973, |
| "grad_norm": 0.24194630980491638, |
| "learning_rate": 2.027702702702703e-05, |
| "loss": 0.0356, |
| "step": 4400 |
| }, |
| { |
| "epoch": 2.97972972972973, |
| "grad_norm": 0.39360618591308594, |
| "learning_rate": 2.020945945945946e-05, |
| "loss": 0.0359, |
| "step": 4410 |
| }, |
| { |
| "epoch": 2.9864864864864864, |
| "grad_norm": 0.21369539201259613, |
| "learning_rate": 2.0141891891891893e-05, |
| "loss": 0.0439, |
| "step": 4420 |
| }, |
| { |
| "epoch": 2.993243243243243, |
| "grad_norm": 0.22118353843688965, |
| "learning_rate": 2.0074324324324325e-05, |
| "loss": 0.0315, |
| "step": 4430 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.870568037033081, |
| "learning_rate": 2.0006756756756758e-05, |
| "loss": 0.04, |
| "step": 4440 |
| }, |
| { |
| "epoch": 3.0, |
| "eval_loss": 0.02539176121354103, |
| "eval_runtime": 375.2111, |
| "eval_samples_per_second": 2.785, |
| "eval_steps_per_second": 0.698, |
| "step": 4440 |
| }, |
| { |
| "epoch": 3.0067567567567566, |
| "grad_norm": 0.40895864367485046, |
| "learning_rate": 1.993918918918919e-05, |
| "loss": 0.0406, |
| "step": 4450 |
| }, |
| { |
| "epoch": 3.0135135135135136, |
| "grad_norm": 0.26677393913269043, |
| "learning_rate": 1.9871621621621622e-05, |
| "loss": 0.0296, |
| "step": 4460 |
| }, |
| { |
| "epoch": 3.02027027027027, |
| "grad_norm": 0.29075056314468384, |
| "learning_rate": 1.9804054054054054e-05, |
| "loss": 0.0343, |
| "step": 4470 |
| }, |
| { |
| "epoch": 3.027027027027027, |
| "grad_norm": 0.31686338782310486, |
| "learning_rate": 1.973648648648649e-05, |
| "loss": 0.0341, |
| "step": 4480 |
| }, |
| { |
| "epoch": 3.0337837837837838, |
| "grad_norm": 0.3891538083553314, |
| "learning_rate": 1.966891891891892e-05, |
| "loss": 0.0383, |
| "step": 4490 |
| }, |
| { |
| "epoch": 3.0405405405405403, |
| "grad_norm": 0.29185813665390015, |
| "learning_rate": 1.960135135135135e-05, |
| "loss": 0.0642, |
| "step": 4500 |
| }, |
| { |
| "epoch": 3.0472972972972974, |
| "grad_norm": 0.4038207530975342, |
| "learning_rate": 1.9533783783783784e-05, |
| "loss": 0.0353, |
| "step": 4510 |
| }, |
| { |
| "epoch": 3.054054054054054, |
| "grad_norm": 0.12403042614459991, |
| "learning_rate": 1.946621621621622e-05, |
| "loss": 0.033, |
| "step": 4520 |
| }, |
| { |
| "epoch": 3.060810810810811, |
| "grad_norm": 0.32900774478912354, |
| "learning_rate": 1.9398648648648648e-05, |
| "loss": 0.0375, |
| "step": 4530 |
| }, |
| { |
| "epoch": 3.0675675675675675, |
| "grad_norm": 0.5344650149345398, |
| "learning_rate": 1.933108108108108e-05, |
| "loss": 0.0512, |
| "step": 4540 |
| }, |
| { |
| "epoch": 3.074324324324324, |
| "grad_norm": 0.2389213591814041, |
| "learning_rate": 1.9263513513513516e-05, |
| "loss": 0.0318, |
| "step": 4550 |
| }, |
| { |
| "epoch": 3.081081081081081, |
| "grad_norm": 0.30284884572029114, |
| "learning_rate": 1.9195945945945948e-05, |
| "loss": 0.0264, |
| "step": 4560 |
| }, |
| { |
| "epoch": 3.0878378378378377, |
| "grad_norm": 0.32080718874931335, |
| "learning_rate": 1.9128378378378377e-05, |
| "loss": 0.0413, |
| "step": 4570 |
| }, |
| { |
| "epoch": 3.0945945945945947, |
| "grad_norm": 0.3493003845214844, |
| "learning_rate": 1.9060810810810813e-05, |
| "loss": 0.0407, |
| "step": 4580 |
| }, |
| { |
| "epoch": 3.1013513513513513, |
| "grad_norm": 0.17225845158100128, |
| "learning_rate": 1.8993243243243245e-05, |
| "loss": 0.028, |
| "step": 4590 |
| }, |
| { |
| "epoch": 3.108108108108108, |
| "grad_norm": 0.7874749302864075, |
| "learning_rate": 1.8925675675675677e-05, |
| "loss": 0.037, |
| "step": 4600 |
| }, |
| { |
| "epoch": 3.114864864864865, |
| "grad_norm": 0.5345508456230164, |
| "learning_rate": 1.885810810810811e-05, |
| "loss": 0.0372, |
| "step": 4610 |
| }, |
| { |
| "epoch": 3.1216216216216215, |
| "grad_norm": 0.5025699734687805, |
| "learning_rate": 1.8790540540540542e-05, |
| "loss": 0.0374, |
| "step": 4620 |
| }, |
| { |
| "epoch": 3.1283783783783785, |
| "grad_norm": 0.3688373565673828, |
| "learning_rate": 1.8722972972972974e-05, |
| "loss": 0.0648, |
| "step": 4630 |
| }, |
| { |
| "epoch": 3.135135135135135, |
| "grad_norm": 0.29858043789863586, |
| "learning_rate": 1.8655405405405406e-05, |
| "loss": 0.0375, |
| "step": 4640 |
| }, |
| { |
| "epoch": 3.141891891891892, |
| "grad_norm": 0.5293399691581726, |
| "learning_rate": 1.858783783783784e-05, |
| "loss": 0.0411, |
| "step": 4650 |
| }, |
| { |
| "epoch": 3.1486486486486487, |
| "grad_norm": 0.3941536545753479, |
| "learning_rate": 1.852027027027027e-05, |
| "loss": 0.0379, |
| "step": 4660 |
| }, |
| { |
| "epoch": 3.1554054054054053, |
| "grad_norm": 0.2352730631828308, |
| "learning_rate": 1.8452702702702703e-05, |
| "loss": 0.0453, |
| "step": 4670 |
| }, |
| { |
| "epoch": 3.1621621621621623, |
| "grad_norm": 0.418706476688385, |
| "learning_rate": 1.838513513513514e-05, |
| "loss": 0.0363, |
| "step": 4680 |
| }, |
| { |
| "epoch": 3.168918918918919, |
| "grad_norm": 0.29120874404907227, |
| "learning_rate": 1.8317567567567568e-05, |
| "loss": 0.0305, |
| "step": 4690 |
| }, |
| { |
| "epoch": 3.175675675675676, |
| "grad_norm": 0.22650307416915894, |
| "learning_rate": 1.825e-05, |
| "loss": 0.0324, |
| "step": 4700 |
| }, |
| { |
| "epoch": 3.1824324324324325, |
| "grad_norm": 0.26235389709472656, |
| "learning_rate": 1.8182432432432432e-05, |
| "loss": 0.0455, |
| "step": 4710 |
| }, |
| { |
| "epoch": 3.189189189189189, |
| "grad_norm": 0.1589524745941162, |
| "learning_rate": 1.8114864864864868e-05, |
| "loss": 0.0413, |
| "step": 4720 |
| }, |
| { |
| "epoch": 3.195945945945946, |
| "grad_norm": 0.43323051929473877, |
| "learning_rate": 1.8047297297297297e-05, |
| "loss": 0.0482, |
| "step": 4730 |
| }, |
| { |
| "epoch": 3.2027027027027026, |
| "grad_norm": 0.14209793508052826, |
| "learning_rate": 1.797972972972973e-05, |
| "loss": 0.0304, |
| "step": 4740 |
| }, |
| { |
| "epoch": 3.2094594594594597, |
| "grad_norm": 0.601881742477417, |
| "learning_rate": 1.7912162162162164e-05, |
| "loss": 0.0445, |
| "step": 4750 |
| }, |
| { |
| "epoch": 3.2162162162162162, |
| "grad_norm": 0.36841222643852234, |
| "learning_rate": 1.7844594594594597e-05, |
| "loss": 0.0409, |
| "step": 4760 |
| }, |
| { |
| "epoch": 3.222972972972973, |
| "grad_norm": 0.6512994170188904, |
| "learning_rate": 1.7777027027027026e-05, |
| "loss": 0.0325, |
| "step": 4770 |
| }, |
| { |
| "epoch": 3.22972972972973, |
| "grad_norm": 0.27603307366371155, |
| "learning_rate": 1.770945945945946e-05, |
| "loss": 0.0398, |
| "step": 4780 |
| }, |
| { |
| "epoch": 3.2364864864864864, |
| "grad_norm": 0.49570050835609436, |
| "learning_rate": 1.7641891891891893e-05, |
| "loss": 0.049, |
| "step": 4790 |
| }, |
| { |
| "epoch": 3.2432432432432434, |
| "grad_norm": 0.453434020280838, |
| "learning_rate": 1.7574324324324326e-05, |
| "loss": 0.0397, |
| "step": 4800 |
| }, |
| { |
| "epoch": 3.25, |
| "grad_norm": 0.7412545084953308, |
| "learning_rate": 1.7506756756756758e-05, |
| "loss": 0.0369, |
| "step": 4810 |
| }, |
| { |
| "epoch": 3.2567567567567566, |
| "grad_norm": 0.13515311479568481, |
| "learning_rate": 1.743918918918919e-05, |
| "loss": 0.0329, |
| "step": 4820 |
| }, |
| { |
| "epoch": 3.2635135135135136, |
| "grad_norm": 0.26775646209716797, |
| "learning_rate": 1.7371621621621622e-05, |
| "loss": 0.0321, |
| "step": 4830 |
| }, |
| { |
| "epoch": 3.27027027027027, |
| "grad_norm": 0.4981396496295929, |
| "learning_rate": 1.7304054054054055e-05, |
| "loss": 0.0443, |
| "step": 4840 |
| }, |
| { |
| "epoch": 3.277027027027027, |
| "grad_norm": 0.33926641941070557, |
| "learning_rate": 1.7236486486486487e-05, |
| "loss": 0.0548, |
| "step": 4850 |
| }, |
| { |
| "epoch": 3.2837837837837838, |
| "grad_norm": 0.23301415145397186, |
| "learning_rate": 1.716891891891892e-05, |
| "loss": 0.0425, |
| "step": 4860 |
| }, |
| { |
| "epoch": 3.2905405405405403, |
| "grad_norm": 0.2623809576034546, |
| "learning_rate": 1.710135135135135e-05, |
| "loss": 0.0319, |
| "step": 4870 |
| }, |
| { |
| "epoch": 3.2972972972972974, |
| "grad_norm": 0.33261558413505554, |
| "learning_rate": 1.7033783783783787e-05, |
| "loss": 0.0268, |
| "step": 4880 |
| }, |
| { |
| "epoch": 3.304054054054054, |
| "grad_norm": 0.24392476677894592, |
| "learning_rate": 1.6966216216216216e-05, |
| "loss": 0.0399, |
| "step": 4890 |
| }, |
| { |
| "epoch": 3.310810810810811, |
| "grad_norm": 0.5819364190101624, |
| "learning_rate": 1.6898648648648648e-05, |
| "loss": 0.0471, |
| "step": 4900 |
| }, |
| { |
| "epoch": 3.3175675675675675, |
| "grad_norm": 0.5962876677513123, |
| "learning_rate": 1.683108108108108e-05, |
| "loss": 0.0438, |
| "step": 4910 |
| }, |
| { |
| "epoch": 3.3243243243243246, |
| "grad_norm": 0.226814866065979, |
| "learning_rate": 1.6763513513513516e-05, |
| "loss": 0.029, |
| "step": 4920 |
| }, |
| { |
| "epoch": 3.331081081081081, |
| "grad_norm": 0.5372828841209412, |
| "learning_rate": 1.6695945945945945e-05, |
| "loss": 0.0413, |
| "step": 4930 |
| }, |
| { |
| "epoch": 3.3378378378378377, |
| "grad_norm": 0.11127971112728119, |
| "learning_rate": 1.6628378378378377e-05, |
| "loss": 0.0561, |
| "step": 4940 |
| }, |
| { |
| "epoch": 3.3445945945945947, |
| "grad_norm": 0.24800080060958862, |
| "learning_rate": 1.6560810810810813e-05, |
| "loss": 0.0315, |
| "step": 4950 |
| }, |
| { |
| "epoch": 3.3513513513513513, |
| "grad_norm": 0.624832272529602, |
| "learning_rate": 1.6493243243243245e-05, |
| "loss": 0.0344, |
| "step": 4960 |
| }, |
| { |
| "epoch": 3.358108108108108, |
| "grad_norm": 0.37135404348373413, |
| "learning_rate": 1.6425675675675674e-05, |
| "loss": 0.0389, |
| "step": 4970 |
| }, |
| { |
| "epoch": 3.364864864864865, |
| "grad_norm": 0.5197146534919739, |
| "learning_rate": 1.635810810810811e-05, |
| "loss": 0.0359, |
| "step": 4980 |
| }, |
| { |
| "epoch": 3.3716216216216215, |
| "grad_norm": 0.34491223096847534, |
| "learning_rate": 1.6290540540540542e-05, |
| "loss": 0.0319, |
| "step": 4990 |
| }, |
| { |
| "epoch": 3.3783783783783785, |
| "grad_norm": 0.3033651113510132, |
| "learning_rate": 1.6222972972972974e-05, |
| "loss": 0.0584, |
| "step": 5000 |
| }, |
| { |
| "epoch": 3.385135135135135, |
| "grad_norm": 0.3707546591758728, |
| "learning_rate": 1.6155405405405406e-05, |
| "loss": 0.0361, |
| "step": 5010 |
| }, |
| { |
| "epoch": 3.391891891891892, |
| "grad_norm": 0.32084357738494873, |
| "learning_rate": 1.608783783783784e-05, |
| "loss": 0.0447, |
| "step": 5020 |
| }, |
| { |
| "epoch": 3.3986486486486487, |
| "grad_norm": 0.24217896163463593, |
| "learning_rate": 1.602027027027027e-05, |
| "loss": 0.0351, |
| "step": 5030 |
| }, |
| { |
| "epoch": 3.4054054054054053, |
| "grad_norm": 0.24498729407787323, |
| "learning_rate": 1.5952702702702703e-05, |
| "loss": 0.0511, |
| "step": 5040 |
| }, |
| { |
| "epoch": 3.4121621621621623, |
| "grad_norm": 0.3093695342540741, |
| "learning_rate": 1.5885135135135135e-05, |
| "loss": 0.0358, |
| "step": 5050 |
| }, |
| { |
| "epoch": 3.418918918918919, |
| "grad_norm": 0.23047859966754913, |
| "learning_rate": 1.5817567567567568e-05, |
| "loss": 0.035, |
| "step": 5060 |
| }, |
| { |
| "epoch": 3.4256756756756754, |
| "grad_norm": 0.2470131516456604, |
| "learning_rate": 1.575e-05, |
| "loss": 0.036, |
| "step": 5070 |
| }, |
| { |
| "epoch": 3.4324324324324325, |
| "grad_norm": 0.22567608952522278, |
| "learning_rate": 1.5682432432432436e-05, |
| "loss": 0.042, |
| "step": 5080 |
| }, |
| { |
| "epoch": 3.439189189189189, |
| "grad_norm": 0.4833430051803589, |
| "learning_rate": 1.5614864864864864e-05, |
| "loss": 0.0328, |
| "step": 5090 |
| }, |
| { |
| "epoch": 3.445945945945946, |
| "grad_norm": 0.2073460817337036, |
| "learning_rate": 1.5547297297297297e-05, |
| "loss": 0.0324, |
| "step": 5100 |
| }, |
| { |
| "epoch": 3.4527027027027026, |
| "grad_norm": 0.4520673453807831, |
| "learning_rate": 1.547972972972973e-05, |
| "loss": 0.0425, |
| "step": 5110 |
| }, |
| { |
| "epoch": 3.4594594594594597, |
| "grad_norm": 0.387984961271286, |
| "learning_rate": 1.5412162162162165e-05, |
| "loss": 0.0377, |
| "step": 5120 |
| }, |
| { |
| "epoch": 3.4662162162162162, |
| "grad_norm": 0.1993335336446762, |
| "learning_rate": 1.5344594594594594e-05, |
| "loss": 0.0418, |
| "step": 5130 |
| }, |
| { |
| "epoch": 3.472972972972973, |
| "grad_norm": 0.17682744562625885, |
| "learning_rate": 1.5277027027027026e-05, |
| "loss": 0.0458, |
| "step": 5140 |
| }, |
| { |
| "epoch": 3.47972972972973, |
| "grad_norm": 0.2657202184200287, |
| "learning_rate": 1.520945945945946e-05, |
| "loss": 0.0377, |
| "step": 5150 |
| }, |
| { |
| "epoch": 3.4864864864864864, |
| "grad_norm": 0.21221712231636047, |
| "learning_rate": 1.5141891891891894e-05, |
| "loss": 0.0418, |
| "step": 5160 |
| }, |
| { |
| "epoch": 3.4932432432432434, |
| "grad_norm": 0.2672747075557709, |
| "learning_rate": 1.5074324324324324e-05, |
| "loss": 0.0336, |
| "step": 5170 |
| }, |
| { |
| "epoch": 3.5, |
| "grad_norm": 0.22585569322109222, |
| "learning_rate": 1.5006756756756756e-05, |
| "loss": 0.0353, |
| "step": 5180 |
| }, |
| { |
| "epoch": 3.506756756756757, |
| "grad_norm": 0.4884142279624939, |
| "learning_rate": 1.493918918918919e-05, |
| "loss": 0.031, |
| "step": 5190 |
| }, |
| { |
| "epoch": 3.5135135135135136, |
| "grad_norm": 0.2212933450937271, |
| "learning_rate": 1.4871621621621623e-05, |
| "loss": 0.0333, |
| "step": 5200 |
| }, |
| { |
| "epoch": 3.52027027027027, |
| "grad_norm": 0.22906561195850372, |
| "learning_rate": 1.4804054054054053e-05, |
| "loss": 0.0353, |
| "step": 5210 |
| }, |
| { |
| "epoch": 3.527027027027027, |
| "grad_norm": 0.2564111053943634, |
| "learning_rate": 1.4736486486486487e-05, |
| "loss": 0.0334, |
| "step": 5220 |
| }, |
| { |
| "epoch": 3.5337837837837838, |
| "grad_norm": 0.25793004035949707, |
| "learning_rate": 1.466891891891892e-05, |
| "loss": 0.0296, |
| "step": 5230 |
| }, |
| { |
| "epoch": 3.5405405405405403, |
| "grad_norm": 0.27988219261169434, |
| "learning_rate": 1.4601351351351353e-05, |
| "loss": 0.0323, |
| "step": 5240 |
| }, |
| { |
| "epoch": 3.5472972972972974, |
| "grad_norm": 0.2663809657096863, |
| "learning_rate": 1.4533783783783784e-05, |
| "loss": 0.027, |
| "step": 5250 |
| }, |
| { |
| "epoch": 3.554054054054054, |
| "grad_norm": 0.30205437541007996, |
| "learning_rate": 1.4466216216216216e-05, |
| "loss": 0.0441, |
| "step": 5260 |
| }, |
| { |
| "epoch": 3.560810810810811, |
| "grad_norm": 0.47929883003234863, |
| "learning_rate": 1.439864864864865e-05, |
| "loss": 0.0389, |
| "step": 5270 |
| }, |
| { |
| "epoch": 3.5675675675675675, |
| "grad_norm": 0.20964276790618896, |
| "learning_rate": 1.4331081081081082e-05, |
| "loss": 0.0393, |
| "step": 5280 |
| }, |
| { |
| "epoch": 3.5743243243243246, |
| "grad_norm": 0.4340321123600006, |
| "learning_rate": 1.4263513513513513e-05, |
| "loss": 0.0411, |
| "step": 5290 |
| }, |
| { |
| "epoch": 3.581081081081081, |
| "grad_norm": 0.338561087846756, |
| "learning_rate": 1.4195945945945945e-05, |
| "loss": 0.0364, |
| "step": 5300 |
| }, |
| { |
| "epoch": 3.5878378378378377, |
| "grad_norm": 0.4030533730983734, |
| "learning_rate": 1.412837837837838e-05, |
| "loss": 0.0371, |
| "step": 5310 |
| }, |
| { |
| "epoch": 3.5945945945945947, |
| "grad_norm": 0.33323097229003906, |
| "learning_rate": 1.4060810810810811e-05, |
| "loss": 0.0457, |
| "step": 5320 |
| }, |
| { |
| "epoch": 3.6013513513513513, |
| "grad_norm": 0.22286130487918854, |
| "learning_rate": 1.3993243243243242e-05, |
| "loss": 0.0351, |
| "step": 5330 |
| }, |
| { |
| "epoch": 3.608108108108108, |
| "grad_norm": 0.16391167044639587, |
| "learning_rate": 1.3925675675675676e-05, |
| "loss": 0.0287, |
| "step": 5340 |
| }, |
| { |
| "epoch": 3.614864864864865, |
| "grad_norm": 0.2558617889881134, |
| "learning_rate": 1.3858108108108108e-05, |
| "loss": 0.0401, |
| "step": 5350 |
| }, |
| { |
| "epoch": 3.6216216216216215, |
| "grad_norm": 0.17443585395812988, |
| "learning_rate": 1.3790540540540542e-05, |
| "loss": 0.0383, |
| "step": 5360 |
| }, |
| { |
| "epoch": 3.6283783783783785, |
| "grad_norm": 0.4235523045063019, |
| "learning_rate": 1.3722972972972974e-05, |
| "loss": 0.0459, |
| "step": 5370 |
| }, |
| { |
| "epoch": 3.635135135135135, |
| "grad_norm": 0.2450476437807083, |
| "learning_rate": 1.3655405405405405e-05, |
| "loss": 0.0442, |
| "step": 5380 |
| }, |
| { |
| "epoch": 3.641891891891892, |
| "grad_norm": 0.4506691098213196, |
| "learning_rate": 1.3587837837837839e-05, |
| "loss": 0.0397, |
| "step": 5390 |
| }, |
| { |
| "epoch": 3.6486486486486487, |
| "grad_norm": 0.29354047775268555, |
| "learning_rate": 1.3520270270270271e-05, |
| "loss": 0.0364, |
| "step": 5400 |
| }, |
| { |
| "epoch": 3.6554054054054053, |
| "grad_norm": 0.27163824439048767, |
| "learning_rate": 1.3452702702702705e-05, |
| "loss": 0.0354, |
| "step": 5410 |
| }, |
| { |
| "epoch": 3.6621621621621623, |
| "grad_norm": 0.19688227772712708, |
| "learning_rate": 1.3385135135135136e-05, |
| "loss": 0.0312, |
| "step": 5420 |
| }, |
| { |
| "epoch": 3.668918918918919, |
| "grad_norm": 0.4131430983543396, |
| "learning_rate": 1.3317567567567568e-05, |
| "loss": 0.0321, |
| "step": 5430 |
| }, |
| { |
| "epoch": 3.6756756756756754, |
| "grad_norm": 0.31735169887542725, |
| "learning_rate": 1.3250000000000002e-05, |
| "loss": 0.0349, |
| "step": 5440 |
| }, |
| { |
| "epoch": 3.6824324324324325, |
| "grad_norm": 0.29376840591430664, |
| "learning_rate": 1.3182432432432434e-05, |
| "loss": 0.0337, |
| "step": 5450 |
| }, |
| { |
| "epoch": 3.689189189189189, |
| "grad_norm": 0.22491705417633057, |
| "learning_rate": 1.3114864864864865e-05, |
| "loss": 0.0422, |
| "step": 5460 |
| }, |
| { |
| "epoch": 3.695945945945946, |
| "grad_norm": 0.3273302912712097, |
| "learning_rate": 1.3047297297297299e-05, |
| "loss": 0.0334, |
| "step": 5470 |
| }, |
| { |
| "epoch": 3.7027027027027026, |
| "grad_norm": 0.2812623977661133, |
| "learning_rate": 1.2979729729729731e-05, |
| "loss": 0.0418, |
| "step": 5480 |
| }, |
| { |
| "epoch": 3.7094594594594597, |
| "grad_norm": 0.34500834345817566, |
| "learning_rate": 1.2912162162162165e-05, |
| "loss": 0.0345, |
| "step": 5490 |
| }, |
| { |
| "epoch": 3.7162162162162162, |
| "grad_norm": 0.40858641266822815, |
| "learning_rate": 1.2844594594594594e-05, |
| "loss": 0.0445, |
| "step": 5500 |
| }, |
| { |
| "epoch": 3.722972972972973, |
| "grad_norm": 0.0810331404209137, |
| "learning_rate": 1.2777027027027028e-05, |
| "loss": 0.0505, |
| "step": 5510 |
| }, |
| { |
| "epoch": 3.72972972972973, |
| "grad_norm": 0.35677656531333923, |
| "learning_rate": 1.270945945945946e-05, |
| "loss": 0.0398, |
| "step": 5520 |
| }, |
| { |
| "epoch": 3.7364864864864864, |
| "grad_norm": 0.2926190495491028, |
| "learning_rate": 1.2641891891891894e-05, |
| "loss": 0.0385, |
| "step": 5530 |
| }, |
| { |
| "epoch": 3.743243243243243, |
| "grad_norm": 0.26496851444244385, |
| "learning_rate": 1.2574324324324324e-05, |
| "loss": 0.0334, |
| "step": 5540 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.27788522839546204, |
| "learning_rate": 1.2506756756756757e-05, |
| "loss": 0.046, |
| "step": 5550 |
| }, |
| { |
| "epoch": 3.756756756756757, |
| "grad_norm": 0.2423742562532425, |
| "learning_rate": 1.243918918918919e-05, |
| "loss": 0.0305, |
| "step": 5560 |
| }, |
| { |
| "epoch": 3.7635135135135136, |
| "grad_norm": 0.19838376343250275, |
| "learning_rate": 1.2371621621621623e-05, |
| "loss": 0.0257, |
| "step": 5570 |
| }, |
| { |
| "epoch": 3.77027027027027, |
| "grad_norm": 0.4191855490207672, |
| "learning_rate": 1.2304054054054055e-05, |
| "loss": 0.043, |
| "step": 5580 |
| }, |
| { |
| "epoch": 3.777027027027027, |
| "grad_norm": 0.19260361790657043, |
| "learning_rate": 1.2236486486486487e-05, |
| "loss": 0.0349, |
| "step": 5590 |
| }, |
| { |
| "epoch": 3.7837837837837838, |
| "grad_norm": 0.14565953612327576, |
| "learning_rate": 1.216891891891892e-05, |
| "loss": 0.0398, |
| "step": 5600 |
| }, |
| { |
| "epoch": 3.7905405405405403, |
| "grad_norm": 0.37692561745643616, |
| "learning_rate": 1.2101351351351352e-05, |
| "loss": 0.0408, |
| "step": 5610 |
| }, |
| { |
| "epoch": 3.7972972972972974, |
| "grad_norm": 0.18849320709705353, |
| "learning_rate": 1.2033783783783784e-05, |
| "loss": 0.0325, |
| "step": 5620 |
| }, |
| { |
| "epoch": 3.804054054054054, |
| "grad_norm": 0.3735746145248413, |
| "learning_rate": 1.1966216216216216e-05, |
| "loss": 0.044, |
| "step": 5630 |
| }, |
| { |
| "epoch": 3.810810810810811, |
| "grad_norm": 0.1767602264881134, |
| "learning_rate": 1.189864864864865e-05, |
| "loss": 0.0348, |
| "step": 5640 |
| }, |
| { |
| "epoch": 3.8175675675675675, |
| "grad_norm": 0.30753883719444275, |
| "learning_rate": 1.1831081081081081e-05, |
| "loss": 0.0326, |
| "step": 5650 |
| }, |
| { |
| "epoch": 3.8243243243243246, |
| "grad_norm": 0.2788727879524231, |
| "learning_rate": 1.1763513513513515e-05, |
| "loss": 0.0366, |
| "step": 5660 |
| }, |
| { |
| "epoch": 3.831081081081081, |
| "grad_norm": 0.27036169171333313, |
| "learning_rate": 1.1695945945945947e-05, |
| "loss": 0.0291, |
| "step": 5670 |
| }, |
| { |
| "epoch": 3.8378378378378377, |
| "grad_norm": 0.23913073539733887, |
| "learning_rate": 1.162837837837838e-05, |
| "loss": 0.0297, |
| "step": 5680 |
| }, |
| { |
| "epoch": 3.8445945945945947, |
| "grad_norm": 0.296947181224823, |
| "learning_rate": 1.1560810810810812e-05, |
| "loss": 0.029, |
| "step": 5690 |
| }, |
| { |
| "epoch": 3.8513513513513513, |
| "grad_norm": 0.16844850778579712, |
| "learning_rate": 1.1493243243243244e-05, |
| "loss": 0.0383, |
| "step": 5700 |
| }, |
| { |
| "epoch": 3.858108108108108, |
| "grad_norm": 0.275686115026474, |
| "learning_rate": 1.1425675675675676e-05, |
| "loss": 0.033, |
| "step": 5710 |
| }, |
| { |
| "epoch": 3.864864864864865, |
| "grad_norm": 0.19295139610767365, |
| "learning_rate": 1.1358108108108108e-05, |
| "loss": 0.0401, |
| "step": 5720 |
| }, |
| { |
| "epoch": 3.8716216216216215, |
| "grad_norm": 0.4507526159286499, |
| "learning_rate": 1.129054054054054e-05, |
| "loss": 0.0423, |
| "step": 5730 |
| }, |
| { |
| "epoch": 3.8783783783783785, |
| "grad_norm": 0.16554881632328033, |
| "learning_rate": 1.1222972972972975e-05, |
| "loss": 0.0468, |
| "step": 5740 |
| }, |
| { |
| "epoch": 3.885135135135135, |
| "grad_norm": 0.22985632717609406, |
| "learning_rate": 1.1155405405405405e-05, |
| "loss": 0.0275, |
| "step": 5750 |
| }, |
| { |
| "epoch": 3.891891891891892, |
| "grad_norm": 0.13783544301986694, |
| "learning_rate": 1.1087837837837839e-05, |
| "loss": 0.037, |
| "step": 5760 |
| }, |
| { |
| "epoch": 3.8986486486486487, |
| "grad_norm": 0.4291518032550812, |
| "learning_rate": 1.1020270270270271e-05, |
| "loss": 0.0549, |
| "step": 5770 |
| }, |
| { |
| "epoch": 3.9054054054054053, |
| "grad_norm": 0.21507710218429565, |
| "learning_rate": 1.0952702702702704e-05, |
| "loss": 0.0376, |
| "step": 5780 |
| }, |
| { |
| "epoch": 3.9121621621621623, |
| "grad_norm": 0.2827785015106201, |
| "learning_rate": 1.0885135135135136e-05, |
| "loss": 0.0366, |
| "step": 5790 |
| }, |
| { |
| "epoch": 3.918918918918919, |
| "grad_norm": 0.3014868199825287, |
| "learning_rate": 1.0817567567567568e-05, |
| "loss": 0.0289, |
| "step": 5800 |
| }, |
| { |
| "epoch": 3.9256756756756754, |
| "grad_norm": 0.14509421586990356, |
| "learning_rate": 1.075e-05, |
| "loss": 0.0293, |
| "step": 5810 |
| }, |
| { |
| "epoch": 3.9324324324324325, |
| "grad_norm": 0.20983301103115082, |
| "learning_rate": 1.0682432432432433e-05, |
| "loss": 0.0318, |
| "step": 5820 |
| }, |
| { |
| "epoch": 3.939189189189189, |
| "grad_norm": 0.1960182636976242, |
| "learning_rate": 1.0614864864864865e-05, |
| "loss": 0.0411, |
| "step": 5830 |
| }, |
| { |
| "epoch": 3.945945945945946, |
| "grad_norm": 0.41651207208633423, |
| "learning_rate": 1.0547297297297299e-05, |
| "loss": 0.0293, |
| "step": 5840 |
| }, |
| { |
| "epoch": 3.9527027027027026, |
| "grad_norm": 0.3032841980457306, |
| "learning_rate": 1.047972972972973e-05, |
| "loss": 0.0413, |
| "step": 5850 |
| }, |
| { |
| "epoch": 3.9594594594594597, |
| "grad_norm": 0.2482951283454895, |
| "learning_rate": 1.0412162162162163e-05, |
| "loss": 0.0281, |
| "step": 5860 |
| }, |
| { |
| "epoch": 3.9662162162162162, |
| "grad_norm": 0.1527014523744583, |
| "learning_rate": 1.0344594594594596e-05, |
| "loss": 0.0407, |
| "step": 5870 |
| }, |
| { |
| "epoch": 3.972972972972973, |
| "grad_norm": 0.19820819795131683, |
| "learning_rate": 1.0277027027027028e-05, |
| "loss": 0.0312, |
| "step": 5880 |
| }, |
| { |
| "epoch": 3.97972972972973, |
| "grad_norm": 0.2963590621948242, |
| "learning_rate": 1.020945945945946e-05, |
| "loss": 0.0359, |
| "step": 5890 |
| }, |
| { |
| "epoch": 3.9864864864864864, |
| "grad_norm": 0.4077160358428955, |
| "learning_rate": 1.0141891891891892e-05, |
| "loss": 0.0439, |
| "step": 5900 |
| }, |
| { |
| "epoch": 3.993243243243243, |
| "grad_norm": 0.2682552635669708, |
| "learning_rate": 1.0074324324324325e-05, |
| "loss": 0.026, |
| "step": 5910 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.41804584860801697, |
| "learning_rate": 1.0006756756756757e-05, |
| "loss": 0.0525, |
| "step": 5920 |
| }, |
| { |
| "epoch": 4.0, |
| "eval_loss": 0.02366216853260994, |
| "eval_runtime": 246.5939, |
| "eval_samples_per_second": 4.238, |
| "eval_steps_per_second": 1.062, |
| "step": 5920 |
| }, |
| { |
| "epoch": 4.006756756756757, |
| "grad_norm": 0.3444773554801941, |
| "learning_rate": 9.93918918918919e-06, |
| "loss": 0.0437, |
| "step": 5930 |
| }, |
| { |
| "epoch": 4.013513513513513, |
| "grad_norm": 0.3024560213088989, |
| "learning_rate": 9.871621621621623e-06, |
| "loss": 0.0465, |
| "step": 5940 |
| }, |
| { |
| "epoch": 4.02027027027027, |
| "grad_norm": 0.25490209460258484, |
| "learning_rate": 9.804054054054054e-06, |
| "loss": 0.0451, |
| "step": 5950 |
| }, |
| { |
| "epoch": 4.027027027027027, |
| "grad_norm": 0.23857267200946808, |
| "learning_rate": 9.736486486486488e-06, |
| "loss": 0.0338, |
| "step": 5960 |
| }, |
| { |
| "epoch": 4.033783783783784, |
| "grad_norm": 0.22253508865833282, |
| "learning_rate": 9.66891891891892e-06, |
| "loss": 0.0307, |
| "step": 5970 |
| }, |
| { |
| "epoch": 4.04054054054054, |
| "grad_norm": 0.3308720588684082, |
| "learning_rate": 9.601351351351352e-06, |
| "loss": 0.0363, |
| "step": 5980 |
| }, |
| { |
| "epoch": 4.047297297297297, |
| "grad_norm": 0.4275621473789215, |
| "learning_rate": 9.533783783783784e-06, |
| "loss": 0.0433, |
| "step": 5990 |
| }, |
| { |
| "epoch": 4.054054054054054, |
| "grad_norm": 0.2730522155761719, |
| "learning_rate": 9.466216216216217e-06, |
| "loss": 0.0395, |
| "step": 6000 |
| }, |
| { |
| "epoch": 4.0608108108108105, |
| "grad_norm": 0.5394574999809265, |
| "learning_rate": 9.398648648648649e-06, |
| "loss": 0.0308, |
| "step": 6010 |
| }, |
| { |
| "epoch": 4.0675675675675675, |
| "grad_norm": 0.4886311888694763, |
| "learning_rate": 9.331081081081081e-06, |
| "loss": 0.0304, |
| "step": 6020 |
| }, |
| { |
| "epoch": 4.074324324324325, |
| "grad_norm": 0.34788379073143005, |
| "learning_rate": 9.263513513513513e-06, |
| "loss": 0.0463, |
| "step": 6030 |
| }, |
| { |
| "epoch": 4.081081081081081, |
| "grad_norm": 0.6314213275909424, |
| "learning_rate": 9.195945945945947e-06, |
| "loss": 0.0339, |
| "step": 6040 |
| }, |
| { |
| "epoch": 4.087837837837838, |
| "grad_norm": 0.11922051012516022, |
| "learning_rate": 9.128378378378378e-06, |
| "loss": 0.0275, |
| "step": 6050 |
| }, |
| { |
| "epoch": 4.094594594594595, |
| "grad_norm": 0.321501225233078, |
| "learning_rate": 9.060810810810812e-06, |
| "loss": 0.0306, |
| "step": 6060 |
| }, |
| { |
| "epoch": 4.101351351351352, |
| "grad_norm": 0.6931231617927551, |
| "learning_rate": 8.993243243243244e-06, |
| "loss": 0.0373, |
| "step": 6070 |
| }, |
| { |
| "epoch": 4.108108108108108, |
| "grad_norm": 0.32703468203544617, |
| "learning_rate": 8.925675675675676e-06, |
| "loss": 0.0279, |
| "step": 6080 |
| }, |
| { |
| "epoch": 4.114864864864865, |
| "grad_norm": 0.5395735502243042, |
| "learning_rate": 8.858108108108109e-06, |
| "loss": 0.0384, |
| "step": 6090 |
| }, |
| { |
| "epoch": 4.121621621621622, |
| "grad_norm": 0.32894161343574524, |
| "learning_rate": 8.790540540540541e-06, |
| "loss": 0.0372, |
| "step": 6100 |
| }, |
| { |
| "epoch": 4.128378378378378, |
| "grad_norm": 0.24975889921188354, |
| "learning_rate": 8.722972972972973e-06, |
| "loss": 0.0346, |
| "step": 6110 |
| }, |
| { |
| "epoch": 4.135135135135135, |
| "grad_norm": 0.36998119950294495, |
| "learning_rate": 8.655405405405405e-06, |
| "loss": 0.0354, |
| "step": 6120 |
| }, |
| { |
| "epoch": 4.141891891891892, |
| "grad_norm": 0.4094553589820862, |
| "learning_rate": 8.587837837837838e-06, |
| "loss": 0.0367, |
| "step": 6130 |
| }, |
| { |
| "epoch": 4.148648648648648, |
| "grad_norm": 0.2307623326778412, |
| "learning_rate": 8.520270270270272e-06, |
| "loss": 0.0341, |
| "step": 6140 |
| }, |
| { |
| "epoch": 4.155405405405405, |
| "grad_norm": 0.3480986952781677, |
| "learning_rate": 8.452702702702702e-06, |
| "loss": 0.0456, |
| "step": 6150 |
| }, |
| { |
| "epoch": 4.162162162162162, |
| "grad_norm": 0.22716574370861053, |
| "learning_rate": 8.385135135135136e-06, |
| "loss": 0.0391, |
| "step": 6160 |
| }, |
| { |
| "epoch": 4.168918918918919, |
| "grad_norm": 0.4268530607223511, |
| "learning_rate": 8.317567567567568e-06, |
| "loss": 0.0425, |
| "step": 6170 |
| }, |
| { |
| "epoch": 4.175675675675675, |
| "grad_norm": 0.29732945561408997, |
| "learning_rate": 8.25e-06, |
| "loss": 0.0363, |
| "step": 6180 |
| }, |
| { |
| "epoch": 4.1824324324324325, |
| "grad_norm": 0.49700286984443665, |
| "learning_rate": 8.182432432432433e-06, |
| "loss": 0.0432, |
| "step": 6190 |
| }, |
| { |
| "epoch": 4.1891891891891895, |
| "grad_norm": 0.20685577392578125, |
| "learning_rate": 8.114864864864865e-06, |
| "loss": 0.0288, |
| "step": 6200 |
| }, |
| { |
| "epoch": 4.195945945945946, |
| "grad_norm": 0.15559187531471252, |
| "learning_rate": 8.047297297297297e-06, |
| "loss": 0.0307, |
| "step": 6210 |
| }, |
| { |
| "epoch": 4.202702702702703, |
| "grad_norm": 0.3090328872203827, |
| "learning_rate": 7.97972972972973e-06, |
| "loss": 0.0371, |
| "step": 6220 |
| }, |
| { |
| "epoch": 4.20945945945946, |
| "grad_norm": 1.0721955299377441, |
| "learning_rate": 7.912162162162162e-06, |
| "loss": 0.0471, |
| "step": 6230 |
| }, |
| { |
| "epoch": 4.216216216216216, |
| "grad_norm": 0.24389462172985077, |
| "learning_rate": 7.844594594594596e-06, |
| "loss": 0.0362, |
| "step": 6240 |
| }, |
| { |
| "epoch": 4.222972972972973, |
| "grad_norm": 0.22328566014766693, |
| "learning_rate": 7.777027027027026e-06, |
| "loss": 0.0416, |
| "step": 6250 |
| }, |
| { |
| "epoch": 4.22972972972973, |
| "grad_norm": 0.17206421494483948, |
| "learning_rate": 7.70945945945946e-06, |
| "loss": 0.0267, |
| "step": 6260 |
| }, |
| { |
| "epoch": 4.236486486486487, |
| "grad_norm": 0.2622784674167633, |
| "learning_rate": 7.641891891891893e-06, |
| "loss": 0.0317, |
| "step": 6270 |
| }, |
| { |
| "epoch": 4.243243243243243, |
| "grad_norm": 0.561418354511261, |
| "learning_rate": 7.574324324324325e-06, |
| "loss": 0.0487, |
| "step": 6280 |
| }, |
| { |
| "epoch": 4.25, |
| "grad_norm": 0.39344513416290283, |
| "learning_rate": 7.506756756756757e-06, |
| "loss": 0.0491, |
| "step": 6290 |
| }, |
| { |
| "epoch": 4.256756756756757, |
| "grad_norm": 0.24725763499736786, |
| "learning_rate": 7.43918918918919e-06, |
| "loss": 0.0302, |
| "step": 6300 |
| }, |
| { |
| "epoch": 4.263513513513513, |
| "grad_norm": 0.23941102623939514, |
| "learning_rate": 7.371621621621622e-06, |
| "loss": 0.0282, |
| "step": 6310 |
| }, |
| { |
| "epoch": 4.27027027027027, |
| "grad_norm": 0.2501400113105774, |
| "learning_rate": 7.304054054054055e-06, |
| "loss": 0.0366, |
| "step": 6320 |
| }, |
| { |
| "epoch": 4.277027027027027, |
| "grad_norm": 0.3194361627101898, |
| "learning_rate": 7.236486486486486e-06, |
| "loss": 0.0336, |
| "step": 6330 |
| }, |
| { |
| "epoch": 4.283783783783784, |
| "grad_norm": 0.5751128792762756, |
| "learning_rate": 7.168918918918919e-06, |
| "loss": 0.043, |
| "step": 6340 |
| }, |
| { |
| "epoch": 4.29054054054054, |
| "grad_norm": 0.5228712558746338, |
| "learning_rate": 7.1013513513513515e-06, |
| "loss": 0.0327, |
| "step": 6350 |
| }, |
| { |
| "epoch": 4.297297297297297, |
| "grad_norm": 0.2010771632194519, |
| "learning_rate": 7.033783783783785e-06, |
| "loss": 0.0314, |
| "step": 6360 |
| }, |
| { |
| "epoch": 4.304054054054054, |
| "grad_norm": 0.2496619075536728, |
| "learning_rate": 6.966216216216216e-06, |
| "loss": 0.0305, |
| "step": 6370 |
| }, |
| { |
| "epoch": 4.3108108108108105, |
| "grad_norm": 0.21699196100234985, |
| "learning_rate": 6.898648648648649e-06, |
| "loss": 0.0306, |
| "step": 6380 |
| }, |
| { |
| "epoch": 4.3175675675675675, |
| "grad_norm": 0.30333423614501953, |
| "learning_rate": 6.831081081081081e-06, |
| "loss": 0.0343, |
| "step": 6390 |
| }, |
| { |
| "epoch": 4.324324324324325, |
| "grad_norm": 0.39921465516090393, |
| "learning_rate": 6.7635135135135145e-06, |
| "loss": 0.0339, |
| "step": 6400 |
| }, |
| { |
| "epoch": 4.331081081081081, |
| "grad_norm": 0.5513906478881836, |
| "learning_rate": 6.695945945945946e-06, |
| "loss": 0.0311, |
| "step": 6410 |
| }, |
| { |
| "epoch": 4.337837837837838, |
| "grad_norm": 0.11431396752595901, |
| "learning_rate": 6.628378378378379e-06, |
| "loss": 0.0284, |
| "step": 6420 |
| }, |
| { |
| "epoch": 4.344594594594595, |
| "grad_norm": 0.2621834874153137, |
| "learning_rate": 6.56081081081081e-06, |
| "loss": 0.0299, |
| "step": 6430 |
| }, |
| { |
| "epoch": 4.351351351351352, |
| "grad_norm": 0.6052500605583191, |
| "learning_rate": 6.4932432432432435e-06, |
| "loss": 0.0357, |
| "step": 6440 |
| }, |
| { |
| "epoch": 4.358108108108108, |
| "grad_norm": 0.19118449091911316, |
| "learning_rate": 6.425675675675676e-06, |
| "loss": 0.0331, |
| "step": 6450 |
| }, |
| { |
| "epoch": 4.364864864864865, |
| "grad_norm": 0.1641031801700592, |
| "learning_rate": 6.358108108108109e-06, |
| "loss": 0.0409, |
| "step": 6460 |
| }, |
| { |
| "epoch": 4.371621621621622, |
| "grad_norm": 0.30924248695373535, |
| "learning_rate": 6.29054054054054e-06, |
| "loss": 0.0381, |
| "step": 6470 |
| }, |
| { |
| "epoch": 4.378378378378378, |
| "grad_norm": 0.33302533626556396, |
| "learning_rate": 6.222972972972973e-06, |
| "loss": 0.0425, |
| "step": 6480 |
| }, |
| { |
| "epoch": 4.385135135135135, |
| "grad_norm": 0.2112291157245636, |
| "learning_rate": 6.155405405405406e-06, |
| "loss": 0.035, |
| "step": 6490 |
| }, |
| { |
| "epoch": 4.391891891891892, |
| "grad_norm": 0.5250951051712036, |
| "learning_rate": 6.087837837837839e-06, |
| "loss": 0.0307, |
| "step": 6500 |
| }, |
| { |
| "epoch": 4.398648648648648, |
| "grad_norm": 0.30093303322792053, |
| "learning_rate": 6.020270270270271e-06, |
| "loss": 0.0362, |
| "step": 6510 |
| }, |
| { |
| "epoch": 4.405405405405405, |
| "grad_norm": 0.18295438587665558, |
| "learning_rate": 5.952702702702703e-06, |
| "loss": 0.0395, |
| "step": 6520 |
| }, |
| { |
| "epoch": 4.412162162162162, |
| "grad_norm": 0.2848648130893707, |
| "learning_rate": 5.8851351351351355e-06, |
| "loss": 0.027, |
| "step": 6530 |
| }, |
| { |
| "epoch": 4.418918918918919, |
| "grad_norm": 0.5116801261901855, |
| "learning_rate": 5.817567567567568e-06, |
| "loss": 0.0349, |
| "step": 6540 |
| }, |
| { |
| "epoch": 4.425675675675675, |
| "grad_norm": 0.36234357953071594, |
| "learning_rate": 5.750000000000001e-06, |
| "loss": 0.0341, |
| "step": 6550 |
| }, |
| { |
| "epoch": 4.4324324324324325, |
| "grad_norm": 0.29395073652267456, |
| "learning_rate": 5.682432432432433e-06, |
| "loss": 0.0398, |
| "step": 6560 |
| }, |
| { |
| "epoch": 4.4391891891891895, |
| "grad_norm": 0.2595879137516022, |
| "learning_rate": 5.614864864864865e-06, |
| "loss": 0.0374, |
| "step": 6570 |
| }, |
| { |
| "epoch": 4.445945945945946, |
| "grad_norm": 0.18848879635334015, |
| "learning_rate": 5.547297297297298e-06, |
| "loss": 0.0286, |
| "step": 6580 |
| }, |
| { |
| "epoch": 4.452702702702703, |
| "grad_norm": 0.34463363885879517, |
| "learning_rate": 5.47972972972973e-06, |
| "loss": 0.0275, |
| "step": 6590 |
| }, |
| { |
| "epoch": 4.45945945945946, |
| "grad_norm": 0.7043229937553406, |
| "learning_rate": 5.412162162162163e-06, |
| "loss": 0.0393, |
| "step": 6600 |
| }, |
| { |
| "epoch": 4.466216216216216, |
| "grad_norm": 0.33575713634490967, |
| "learning_rate": 5.344594594594595e-06, |
| "loss": 0.0401, |
| "step": 6610 |
| }, |
| { |
| "epoch": 4.472972972972973, |
| "grad_norm": 0.2630554437637329, |
| "learning_rate": 5.2770270270270275e-06, |
| "loss": 0.0293, |
| "step": 6620 |
| }, |
| { |
| "epoch": 4.47972972972973, |
| "grad_norm": 0.20551495254039764, |
| "learning_rate": 5.20945945945946e-06, |
| "loss": 0.0269, |
| "step": 6630 |
| }, |
| { |
| "epoch": 4.486486486486487, |
| "grad_norm": 0.13341334462165833, |
| "learning_rate": 5.141891891891892e-06, |
| "loss": 0.0398, |
| "step": 6640 |
| }, |
| { |
| "epoch": 4.493243243243243, |
| "grad_norm": 0.2936372458934784, |
| "learning_rate": 5.074324324324325e-06, |
| "loss": 0.0265, |
| "step": 6650 |
| }, |
| { |
| "epoch": 4.5, |
| "grad_norm": 0.2781808376312256, |
| "learning_rate": 5.006756756756757e-06, |
| "loss": 0.0456, |
| "step": 6660 |
| }, |
| { |
| "epoch": 4.506756756756757, |
| "grad_norm": 0.17368100583553314, |
| "learning_rate": 4.93918918918919e-06, |
| "loss": 0.0369, |
| "step": 6670 |
| }, |
| { |
| "epoch": 4.513513513513513, |
| "grad_norm": 0.31108617782592773, |
| "learning_rate": 4.871621621621622e-06, |
| "loss": 0.0527, |
| "step": 6680 |
| }, |
| { |
| "epoch": 4.52027027027027, |
| "grad_norm": 0.13732381165027618, |
| "learning_rate": 4.804054054054054e-06, |
| "loss": 0.0273, |
| "step": 6690 |
| }, |
| { |
| "epoch": 4.527027027027027, |
| "grad_norm": 0.32313647866249084, |
| "learning_rate": 4.736486486486487e-06, |
| "loss": 0.0276, |
| "step": 6700 |
| }, |
| { |
| "epoch": 4.533783783783784, |
| "grad_norm": 0.23118528723716736, |
| "learning_rate": 4.6689189189189195e-06, |
| "loss": 0.0314, |
| "step": 6710 |
| }, |
| { |
| "epoch": 4.54054054054054, |
| "grad_norm": 0.1537824273109436, |
| "learning_rate": 4.601351351351352e-06, |
| "loss": 0.0313, |
| "step": 6720 |
| }, |
| { |
| "epoch": 4.547297297297297, |
| "grad_norm": 0.21601910889148712, |
| "learning_rate": 4.533783783783784e-06, |
| "loss": 0.0311, |
| "step": 6730 |
| }, |
| { |
| "epoch": 4.554054054054054, |
| "grad_norm": 0.2066461145877838, |
| "learning_rate": 4.466216216216216e-06, |
| "loss": 0.0377, |
| "step": 6740 |
| }, |
| { |
| "epoch": 4.5608108108108105, |
| "grad_norm": 10.0748929977417, |
| "learning_rate": 4.398648648648649e-06, |
| "loss": 0.0614, |
| "step": 6750 |
| }, |
| { |
| "epoch": 4.5675675675675675, |
| "grad_norm": 0.4590441584587097, |
| "learning_rate": 4.331081081081082e-06, |
| "loss": 0.0416, |
| "step": 6760 |
| }, |
| { |
| "epoch": 4.574324324324325, |
| "grad_norm": 0.2324705421924591, |
| "learning_rate": 4.263513513513514e-06, |
| "loss": 0.0352, |
| "step": 6770 |
| }, |
| { |
| "epoch": 4.581081081081081, |
| "grad_norm": 0.16836494207382202, |
| "learning_rate": 4.195945945945946e-06, |
| "loss": 0.0269, |
| "step": 6780 |
| }, |
| { |
| "epoch": 4.587837837837838, |
| "grad_norm": 0.12347856163978577, |
| "learning_rate": 4.128378378378378e-06, |
| "loss": 0.0306, |
| "step": 6790 |
| }, |
| { |
| "epoch": 4.594594594594595, |
| "grad_norm": 0.3169582486152649, |
| "learning_rate": 4.0608108108108115e-06, |
| "loss": 0.0412, |
| "step": 6800 |
| }, |
| { |
| "epoch": 4.601351351351351, |
| "grad_norm": 0.42407071590423584, |
| "learning_rate": 3.993243243243244e-06, |
| "loss": 0.0386, |
| "step": 6810 |
| }, |
| { |
| "epoch": 4.608108108108108, |
| "grad_norm": 0.3871265649795532, |
| "learning_rate": 3.925675675675676e-06, |
| "loss": 0.0337, |
| "step": 6820 |
| }, |
| { |
| "epoch": 4.614864864864865, |
| "grad_norm": 0.38576745986938477, |
| "learning_rate": 3.858108108108108e-06, |
| "loss": 0.039, |
| "step": 6830 |
| }, |
| { |
| "epoch": 4.621621621621622, |
| "grad_norm": 0.23177988827228546, |
| "learning_rate": 3.790540540540541e-06, |
| "loss": 0.0324, |
| "step": 6840 |
| }, |
| { |
| "epoch": 4.628378378378378, |
| "grad_norm": 0.22654855251312256, |
| "learning_rate": 3.722972972972973e-06, |
| "loss": 0.0583, |
| "step": 6850 |
| }, |
| { |
| "epoch": 4.635135135135135, |
| "grad_norm": 0.4358038008213043, |
| "learning_rate": 3.655405405405406e-06, |
| "loss": 0.0376, |
| "step": 6860 |
| }, |
| { |
| "epoch": 4.641891891891892, |
| "grad_norm": 0.11105912178754807, |
| "learning_rate": 3.587837837837838e-06, |
| "loss": 0.0294, |
| "step": 6870 |
| }, |
| { |
| "epoch": 4.648648648648649, |
| "grad_norm": 0.36878806352615356, |
| "learning_rate": 3.5202702702702704e-06, |
| "loss": 0.0384, |
| "step": 6880 |
| }, |
| { |
| "epoch": 4.655405405405405, |
| "grad_norm": 0.1898292452096939, |
| "learning_rate": 3.452702702702703e-06, |
| "loss": 0.0299, |
| "step": 6890 |
| }, |
| { |
| "epoch": 4.662162162162162, |
| "grad_norm": 0.21792760491371155, |
| "learning_rate": 3.3851351351351353e-06, |
| "loss": 0.0396, |
| "step": 6900 |
| }, |
| { |
| "epoch": 4.668918918918919, |
| "grad_norm": 0.5358079075813293, |
| "learning_rate": 3.317567567567568e-06, |
| "loss": 0.0329, |
| "step": 6910 |
| }, |
| { |
| "epoch": 4.675675675675675, |
| "grad_norm": 0.19601544737815857, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0434, |
| "step": 6920 |
| }, |
| { |
| "epoch": 4.6824324324324325, |
| "grad_norm": 0.24563315510749817, |
| "learning_rate": 3.182432432432433e-06, |
| "loss": 0.0292, |
| "step": 6930 |
| }, |
| { |
| "epoch": 4.6891891891891895, |
| "grad_norm": 0.3209024667739868, |
| "learning_rate": 3.114864864864865e-06, |
| "loss": 0.0529, |
| "step": 6940 |
| }, |
| { |
| "epoch": 4.695945945945946, |
| "grad_norm": 0.18662333488464355, |
| "learning_rate": 3.0472972972972974e-06, |
| "loss": 0.0305, |
| "step": 6950 |
| }, |
| { |
| "epoch": 4.702702702702703, |
| "grad_norm": 0.18789201974868774, |
| "learning_rate": 2.97972972972973e-06, |
| "loss": 0.0296, |
| "step": 6960 |
| }, |
| { |
| "epoch": 4.70945945945946, |
| "grad_norm": 0.18934795260429382, |
| "learning_rate": 2.9121621621621623e-06, |
| "loss": 0.0368, |
| "step": 6970 |
| }, |
| { |
| "epoch": 4.716216216216216, |
| "grad_norm": 0.2662386894226074, |
| "learning_rate": 2.844594594594595e-06, |
| "loss": 0.0335, |
| "step": 6980 |
| }, |
| { |
| "epoch": 4.722972972972973, |
| "grad_norm": 0.4388953447341919, |
| "learning_rate": 2.7770270270270273e-06, |
| "loss": 0.0316, |
| "step": 6990 |
| }, |
| { |
| "epoch": 4.72972972972973, |
| "grad_norm": 0.26200464367866516, |
| "learning_rate": 2.7094594594594595e-06, |
| "loss": 0.031, |
| "step": 7000 |
| }, |
| { |
| "epoch": 4.736486486486487, |
| "grad_norm": 0.27926555275917053, |
| "learning_rate": 2.6418918918918922e-06, |
| "loss": 0.0387, |
| "step": 7010 |
| }, |
| { |
| "epoch": 4.743243243243243, |
| "grad_norm": 0.28896600008010864, |
| "learning_rate": 2.5743243243243245e-06, |
| "loss": 0.0347, |
| "step": 7020 |
| }, |
| { |
| "epoch": 4.75, |
| "grad_norm": 0.27116715908050537, |
| "learning_rate": 2.506756756756757e-06, |
| "loss": 0.0307, |
| "step": 7030 |
| }, |
| { |
| "epoch": 4.756756756756757, |
| "grad_norm": 0.29698431491851807, |
| "learning_rate": 2.4391891891891894e-06, |
| "loss": 0.0331, |
| "step": 7040 |
| }, |
| { |
| "epoch": 4.763513513513513, |
| "grad_norm": 0.42871206998825073, |
| "learning_rate": 2.3716216216216217e-06, |
| "loss": 0.0337, |
| "step": 7050 |
| }, |
| { |
| "epoch": 4.77027027027027, |
| "grad_norm": 0.45836636424064636, |
| "learning_rate": 2.3040540540540543e-06, |
| "loss": 0.0314, |
| "step": 7060 |
| }, |
| { |
| "epoch": 4.777027027027027, |
| "grad_norm": 0.31179630756378174, |
| "learning_rate": 2.2364864864864866e-06, |
| "loss": 0.0404, |
| "step": 7070 |
| }, |
| { |
| "epoch": 4.783783783783784, |
| "grad_norm": 0.261393278837204, |
| "learning_rate": 2.1689189189189193e-06, |
| "loss": 0.0371, |
| "step": 7080 |
| }, |
| { |
| "epoch": 4.79054054054054, |
| "grad_norm": 0.5424347519874573, |
| "learning_rate": 2.1013513513513515e-06, |
| "loss": 0.0324, |
| "step": 7090 |
| }, |
| { |
| "epoch": 4.797297297297297, |
| "grad_norm": 0.44092580676078796, |
| "learning_rate": 2.0337837837837838e-06, |
| "loss": 0.0371, |
| "step": 7100 |
| }, |
| { |
| "epoch": 4.804054054054054, |
| "grad_norm": 0.2997783422470093, |
| "learning_rate": 1.9662162162162165e-06, |
| "loss": 0.038, |
| "step": 7110 |
| }, |
| { |
| "epoch": 4.8108108108108105, |
| "grad_norm": 0.270183801651001, |
| "learning_rate": 1.8986486486486487e-06, |
| "loss": 0.0353, |
| "step": 7120 |
| }, |
| { |
| "epoch": 4.8175675675675675, |
| "grad_norm": 0.5547716021537781, |
| "learning_rate": 1.8310810810810812e-06, |
| "loss": 0.0388, |
| "step": 7130 |
| }, |
| { |
| "epoch": 4.824324324324325, |
| "grad_norm": 0.3656560182571411, |
| "learning_rate": 1.7635135135135136e-06, |
| "loss": 0.0338, |
| "step": 7140 |
| }, |
| { |
| "epoch": 4.831081081081081, |
| "grad_norm": 0.17846781015396118, |
| "learning_rate": 1.6959459459459461e-06, |
| "loss": 0.0394, |
| "step": 7150 |
| }, |
| { |
| "epoch": 4.837837837837838, |
| "grad_norm": 0.4710524380207062, |
| "learning_rate": 1.6283783783783784e-06, |
| "loss": 0.0315, |
| "step": 7160 |
| }, |
| { |
| "epoch": 4.844594594594595, |
| "grad_norm": 0.3607494831085205, |
| "learning_rate": 1.5608108108108108e-06, |
| "loss": 0.0313, |
| "step": 7170 |
| }, |
| { |
| "epoch": 4.851351351351351, |
| "grad_norm": 0.3811328709125519, |
| "learning_rate": 1.4932432432432433e-06, |
| "loss": 0.0383, |
| "step": 7180 |
| }, |
| { |
| "epoch": 4.858108108108108, |
| "grad_norm": 0.23322060704231262, |
| "learning_rate": 1.4256756756756758e-06, |
| "loss": 0.0339, |
| "step": 7190 |
| }, |
| { |
| "epoch": 4.864864864864865, |
| "grad_norm": 0.2853488028049469, |
| "learning_rate": 1.3581081081081082e-06, |
| "loss": 0.0327, |
| "step": 7200 |
| }, |
| { |
| "epoch": 4.871621621621622, |
| "grad_norm": 0.3856007158756256, |
| "learning_rate": 1.2905405405405407e-06, |
| "loss": 0.0328, |
| "step": 7210 |
| }, |
| { |
| "epoch": 4.878378378378378, |
| "grad_norm": 0.3511975109577179, |
| "learning_rate": 1.222972972972973e-06, |
| "loss": 0.0525, |
| "step": 7220 |
| }, |
| { |
| "epoch": 4.885135135135135, |
| "grad_norm": 0.3545476198196411, |
| "learning_rate": 1.1554054054054054e-06, |
| "loss": 0.0379, |
| "step": 7230 |
| }, |
| { |
| "epoch": 4.891891891891892, |
| "grad_norm": 0.1747957319021225, |
| "learning_rate": 1.087837837837838e-06, |
| "loss": 0.0246, |
| "step": 7240 |
| }, |
| { |
| "epoch": 4.898648648648649, |
| "grad_norm": 0.3207055926322937, |
| "learning_rate": 1.0202702702702704e-06, |
| "loss": 0.03, |
| "step": 7250 |
| }, |
| { |
| "epoch": 4.905405405405405, |
| "grad_norm": 0.1513839215040207, |
| "learning_rate": 9.527027027027027e-07, |
| "loss": 0.0366, |
| "step": 7260 |
| }, |
| { |
| "epoch": 4.912162162162162, |
| "grad_norm": 0.5031633377075195, |
| "learning_rate": 8.851351351351352e-07, |
| "loss": 0.0289, |
| "step": 7270 |
| }, |
| { |
| "epoch": 4.918918918918919, |
| "grad_norm": 0.27607399225234985, |
| "learning_rate": 8.175675675675676e-07, |
| "loss": 0.0387, |
| "step": 7280 |
| }, |
| { |
| "epoch": 4.925675675675675, |
| "grad_norm": 0.2901621460914612, |
| "learning_rate": 7.5e-07, |
| "loss": 0.039, |
| "step": 7290 |
| }, |
| { |
| "epoch": 4.9324324324324325, |
| "grad_norm": 0.22750946879386902, |
| "learning_rate": 6.824324324324325e-07, |
| "loss": 0.0443, |
| "step": 7300 |
| }, |
| { |
| "epoch": 4.9391891891891895, |
| "grad_norm": 0.3207789361476898, |
| "learning_rate": 6.148648648648648e-07, |
| "loss": 0.0367, |
| "step": 7310 |
| }, |
| { |
| "epoch": 4.945945945945946, |
| "grad_norm": 0.28913548588752747, |
| "learning_rate": 5.472972972972973e-07, |
| "loss": 0.0281, |
| "step": 7320 |
| }, |
| { |
| "epoch": 4.952702702702703, |
| "grad_norm": 0.19479107856750488, |
| "learning_rate": 4.797297297297297e-07, |
| "loss": 0.0303, |
| "step": 7330 |
| }, |
| { |
| "epoch": 4.95945945945946, |
| "grad_norm": 0.4693133234977722, |
| "learning_rate": 4.1216216216216225e-07, |
| "loss": 0.035, |
| "step": 7340 |
| }, |
| { |
| "epoch": 4.966216216216216, |
| "grad_norm": 0.1580437868833542, |
| "learning_rate": 3.445945945945946e-07, |
| "loss": 0.0396, |
| "step": 7350 |
| }, |
| { |
| "epoch": 4.972972972972973, |
| "grad_norm": 0.2763003706932068, |
| "learning_rate": 2.77027027027027e-07, |
| "loss": 0.0332, |
| "step": 7360 |
| }, |
| { |
| "epoch": 4.97972972972973, |
| "grad_norm": 0.26670750975608826, |
| "learning_rate": 2.0945945945945946e-07, |
| "loss": 0.0388, |
| "step": 7370 |
| }, |
| { |
| "epoch": 4.986486486486487, |
| "grad_norm": 0.7256395816802979, |
| "learning_rate": 1.418918918918919e-07, |
| "loss": 0.0325, |
| "step": 7380 |
| }, |
| { |
| "epoch": 4.993243243243243, |
| "grad_norm": 0.47905421257019043, |
| "learning_rate": 7.432432432432433e-08, |
| "loss": 0.0383, |
| "step": 7390 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.29960232973098755, |
| "learning_rate": 6.756756756756757e-09, |
| "loss": 0.0319, |
| "step": 7400 |
| }, |
| { |
| "epoch": 5.0, |
| "eval_loss": 0.02321171946823597, |
| "eval_runtime": 241.491, |
| "eval_samples_per_second": 4.327, |
| "eval_steps_per_second": 1.085, |
| "step": 7400 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 7400, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1235384799068160.0, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|