| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.989690721649485, |
| "eval_steps": 500, |
| "global_step": 1815, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0027491408934707906, |
| "grad_norm": 6.006607135034925, |
| "learning_rate": 4.395604395604396e-07, |
| "loss": 1.1009, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.005498281786941581, |
| "grad_norm": 6.018597667128207, |
| "learning_rate": 8.791208791208792e-07, |
| "loss": 1.1005, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.008247422680412371, |
| "grad_norm": 6.073361462181881, |
| "learning_rate": 1.3186813186813187e-06, |
| "loss": 1.1055, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.010996563573883162, |
| "grad_norm": 5.961879860389406, |
| "learning_rate": 1.7582417582417585e-06, |
| "loss": 1.1099, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.013745704467353952, |
| "grad_norm": 5.617774190916827, |
| "learning_rate": 2.197802197802198e-06, |
| "loss": 1.081, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.016494845360824743, |
| "grad_norm": 4.531649230749474, |
| "learning_rate": 2.6373626373626375e-06, |
| "loss": 1.0487, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.019243986254295534, |
| "grad_norm": 4.217358609239622, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.0556, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.021993127147766325, |
| "grad_norm": 2.3655606096533908, |
| "learning_rate": 3.516483516483517e-06, |
| "loss": 1.0049, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.024742268041237112, |
| "grad_norm": 2.0449739951596695, |
| "learning_rate": 3.9560439560439565e-06, |
| "loss": 0.9803, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.027491408934707903, |
| "grad_norm": 2.8045358100308406, |
| "learning_rate": 4.395604395604396e-06, |
| "loss": 0.9589, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.030240549828178694, |
| "grad_norm": 3.662777544001548, |
| "learning_rate": 4.8351648351648355e-06, |
| "loss": 0.9677, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.032989690721649485, |
| "grad_norm": 3.5905401528260374, |
| "learning_rate": 5.274725274725275e-06, |
| "loss": 0.96, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.03573883161512027, |
| "grad_norm": 3.3209223305008964, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 0.9481, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.03848797250859107, |
| "grad_norm": 2.3880367341332343, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.9057, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.041237113402061855, |
| "grad_norm": 2.269345734711135, |
| "learning_rate": 6.5934065934065935e-06, |
| "loss": 0.9048, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.04398625429553265, |
| "grad_norm": 1.7351295951376182, |
| "learning_rate": 7.032967032967034e-06, |
| "loss": 0.8869, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.04673539518900344, |
| "grad_norm": 1.5080912194950085, |
| "learning_rate": 7.472527472527473e-06, |
| "loss": 0.8503, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.049484536082474224, |
| "grad_norm": 1.4431136736456789, |
| "learning_rate": 7.912087912087913e-06, |
| "loss": 0.8405, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.05223367697594502, |
| "grad_norm": 1.4712422848105347, |
| "learning_rate": 8.351648351648353e-06, |
| "loss": 0.8274, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.054982817869415807, |
| "grad_norm": 1.3178930536171367, |
| "learning_rate": 8.791208791208792e-06, |
| "loss": 0.8428, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0577319587628866, |
| "grad_norm": 1.2703340647552794, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.8211, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.06048109965635739, |
| "grad_norm": 1.2871317510857054, |
| "learning_rate": 9.670329670329671e-06, |
| "loss": 0.8053, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.06323024054982818, |
| "grad_norm": 1.1237165728026322, |
| "learning_rate": 1.010989010989011e-05, |
| "loss": 0.8101, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.06597938144329897, |
| "grad_norm": 0.9168689840127058, |
| "learning_rate": 1.054945054945055e-05, |
| "loss": 0.7837, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.06872852233676977, |
| "grad_norm": 0.9355697137818322, |
| "learning_rate": 1.098901098901099e-05, |
| "loss": 0.79, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.07147766323024055, |
| "grad_norm": 0.8919194529855764, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 0.7927, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.07422680412371134, |
| "grad_norm": 0.8013095068191387, |
| "learning_rate": 1.186813186813187e-05, |
| "loss": 0.7826, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.07697594501718213, |
| "grad_norm": 0.7074089625670721, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.77, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.07972508591065292, |
| "grad_norm": 0.5939851673949501, |
| "learning_rate": 1.2747252747252747e-05, |
| "loss": 0.764, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.08247422680412371, |
| "grad_norm": 0.7291769706418423, |
| "learning_rate": 1.3186813186813187e-05, |
| "loss": 0.7667, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0852233676975945, |
| "grad_norm": 0.5476356706171729, |
| "learning_rate": 1.3626373626373627e-05, |
| "loss": 0.7669, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.0879725085910653, |
| "grad_norm": 0.496258207605204, |
| "learning_rate": 1.4065934065934068e-05, |
| "loss": 0.7617, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09072164948453608, |
| "grad_norm": 0.5382769338196303, |
| "learning_rate": 1.4505494505494506e-05, |
| "loss": 0.7554, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.09347079037800687, |
| "grad_norm": 0.4580628373124881, |
| "learning_rate": 1.4945054945054947e-05, |
| "loss": 0.75, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.09621993127147767, |
| "grad_norm": 0.40847997114284523, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.7467, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.09896907216494845, |
| "grad_norm": 0.5031600722774845, |
| "learning_rate": 1.5824175824175826e-05, |
| "loss": 0.7383, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.10171821305841924, |
| "grad_norm": 0.3529000118564782, |
| "learning_rate": 1.6263736263736265e-05, |
| "loss": 0.7524, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.10446735395189004, |
| "grad_norm": 0.40270165373486655, |
| "learning_rate": 1.6703296703296707e-05, |
| "loss": 0.7371, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.10721649484536082, |
| "grad_norm": 0.3743730526645748, |
| "learning_rate": 1.7142857142857142e-05, |
| "loss": 0.7332, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.10996563573883161, |
| "grad_norm": 0.3802324817797124, |
| "learning_rate": 1.7582417582417584e-05, |
| "loss": 0.7301, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.11271477663230241, |
| "grad_norm": 0.38122013544181327, |
| "learning_rate": 1.8021978021978023e-05, |
| "loss": 0.7481, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1154639175257732, |
| "grad_norm": 0.3807629715206352, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.7234, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.11821305841924398, |
| "grad_norm": 0.3690276896533298, |
| "learning_rate": 1.8901098901098903e-05, |
| "loss": 0.7382, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.12096219931271478, |
| "grad_norm": 0.39354181368093777, |
| "learning_rate": 1.9340659340659342e-05, |
| "loss": 0.7274, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.12371134020618557, |
| "grad_norm": 0.45930210506482494, |
| "learning_rate": 1.9780219780219784e-05, |
| "loss": 0.7222, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.12646048109965635, |
| "grad_norm": 0.3257242749282543, |
| "learning_rate": 2.021978021978022e-05, |
| "loss": 0.7062, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.12920962199312716, |
| "grad_norm": 0.4894322683246899, |
| "learning_rate": 2.0659340659340665e-05, |
| "loss": 0.7273, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.13195876288659794, |
| "grad_norm": 0.4217505425727167, |
| "learning_rate": 2.10989010989011e-05, |
| "loss": 0.7144, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.13470790378006872, |
| "grad_norm": 0.34926375879792276, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 0.7284, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.13745704467353953, |
| "grad_norm": 0.37706775972489026, |
| "learning_rate": 2.197802197802198e-05, |
| "loss": 0.7197, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.1402061855670103, |
| "grad_norm": 0.4461248020784725, |
| "learning_rate": 2.241758241758242e-05, |
| "loss": 0.7168, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.1429553264604811, |
| "grad_norm": 0.45683903751388155, |
| "learning_rate": 2.2857142857142858e-05, |
| "loss": 0.7047, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.1457044673539519, |
| "grad_norm": 0.4941785029111688, |
| "learning_rate": 2.32967032967033e-05, |
| "loss": 0.7203, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.14845360824742268, |
| "grad_norm": 0.7101247658482969, |
| "learning_rate": 2.373626373626374e-05, |
| "loss": 0.7132, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.15120274914089346, |
| "grad_norm": 1.3845226821923962, |
| "learning_rate": 2.4175824175824177e-05, |
| "loss": 0.7139, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.15395189003436427, |
| "grad_norm": 0.9145522126443197, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 0.7201, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.15670103092783505, |
| "grad_norm": 0.7316554124403013, |
| "learning_rate": 2.5054945054945058e-05, |
| "loss": 0.6997, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.15945017182130583, |
| "grad_norm": 1.525604886016204, |
| "learning_rate": 2.5494505494505493e-05, |
| "loss": 0.708, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.16219931271477664, |
| "grad_norm": 0.9569983399077112, |
| "learning_rate": 2.593406593406594e-05, |
| "loss": 0.7023, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.16494845360824742, |
| "grad_norm": 1.222421966279637, |
| "learning_rate": 2.6373626373626374e-05, |
| "loss": 0.699, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1676975945017182, |
| "grad_norm": 1.1284003579139785, |
| "learning_rate": 2.6813186813186813e-05, |
| "loss": 0.6944, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.170446735395189, |
| "grad_norm": 1.323080634931126, |
| "learning_rate": 2.7252747252747255e-05, |
| "loss": 0.6897, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.1731958762886598, |
| "grad_norm": 0.9918797937768813, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 0.6937, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.1759450171821306, |
| "grad_norm": 1.4485583433380003, |
| "learning_rate": 2.8131868131868136e-05, |
| "loss": 0.6924, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.17869415807560138, |
| "grad_norm": 0.9586990948210379, |
| "learning_rate": 2.8571428571428574e-05, |
| "loss": 0.7036, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.18144329896907216, |
| "grad_norm": 1.5401125351907554, |
| "learning_rate": 2.9010989010989013e-05, |
| "loss": 0.7067, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.18419243986254297, |
| "grad_norm": 1.149941673251316, |
| "learning_rate": 2.9450549450549455e-05, |
| "loss": 0.6846, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.18694158075601375, |
| "grad_norm": 1.398728105276103, |
| "learning_rate": 2.9890109890109894e-05, |
| "loss": 0.7001, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.18969072164948453, |
| "grad_norm": 1.3380685614311483, |
| "learning_rate": 3.0329670329670332e-05, |
| "loss": 0.7017, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.19243986254295534, |
| "grad_norm": 0.966967948159332, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.6945, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.19518900343642612, |
| "grad_norm": 1.2949156130535089, |
| "learning_rate": 3.120879120879121e-05, |
| "loss": 0.6983, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.1979381443298969, |
| "grad_norm": 0.9121501279786434, |
| "learning_rate": 3.164835164835165e-05, |
| "loss": 0.6944, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2006872852233677, |
| "grad_norm": 0.7744380611271706, |
| "learning_rate": 3.2087912087912094e-05, |
| "loss": 0.6797, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.2034364261168385, |
| "grad_norm": 1.171893254170078, |
| "learning_rate": 3.252747252747253e-05, |
| "loss": 0.6916, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.20618556701030927, |
| "grad_norm": 1.432067598703686, |
| "learning_rate": 3.296703296703297e-05, |
| "loss": 0.696, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.20893470790378008, |
| "grad_norm": 2.015800500571526, |
| "learning_rate": 3.340659340659341e-05, |
| "loss": 0.6946, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.21168384879725086, |
| "grad_norm": 1.1450267893677495, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 0.6938, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.21443298969072164, |
| "grad_norm": 3.012444154989314, |
| "learning_rate": 3.4285714285714284e-05, |
| "loss": 0.708, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.21718213058419245, |
| "grad_norm": 2.938842593069413, |
| "learning_rate": 3.4725274725274726e-05, |
| "loss": 0.7043, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.21993127147766323, |
| "grad_norm": 1.2620210326118846, |
| "learning_rate": 3.516483516483517e-05, |
| "loss": 0.7015, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.22268041237113403, |
| "grad_norm": 1.8136600496263213, |
| "learning_rate": 3.56043956043956e-05, |
| "loss": 0.6934, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.22542955326460482, |
| "grad_norm": 1.4037155277378568, |
| "learning_rate": 3.6043956043956045e-05, |
| "loss": 0.6923, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.2281786941580756, |
| "grad_norm": 1.3955889629454836, |
| "learning_rate": 3.648351648351649e-05, |
| "loss": 0.6932, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.2309278350515464, |
| "grad_norm": 1.4401203218805492, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 0.6837, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.23367697594501718, |
| "grad_norm": 1.1506286717804464, |
| "learning_rate": 3.7362637362637365e-05, |
| "loss": 0.6824, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.23642611683848797, |
| "grad_norm": 1.4015178509079977, |
| "learning_rate": 3.7802197802197807e-05, |
| "loss": 0.6818, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.23917525773195877, |
| "grad_norm": 0.8632967007668437, |
| "learning_rate": 3.824175824175825e-05, |
| "loss": 0.6881, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.24192439862542955, |
| "grad_norm": 1.7108832289817475, |
| "learning_rate": 3.8681318681318684e-05, |
| "loss": 0.694, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.24467353951890033, |
| "grad_norm": 1.2330487896649007, |
| "learning_rate": 3.9120879120879126e-05, |
| "loss": 0.682, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.24742268041237114, |
| "grad_norm": 1.2807204902880718, |
| "learning_rate": 3.956043956043957e-05, |
| "loss": 0.6851, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2501718213058419, |
| "grad_norm": 1.327068957190879, |
| "learning_rate": 4e-05, |
| "loss": 0.6807, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.2529209621993127, |
| "grad_norm": 1.0022125576592875, |
| "learning_rate": 4.043956043956044e-05, |
| "loss": 0.6782, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.2556701030927835, |
| "grad_norm": 1.3221553663136452, |
| "learning_rate": 4.087912087912088e-05, |
| "loss": 0.6759, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.2584192439862543, |
| "grad_norm": 0.9568766990099716, |
| "learning_rate": 4.131868131868133e-05, |
| "loss": 0.6873, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.2611683848797251, |
| "grad_norm": 1.7172395359463215, |
| "learning_rate": 4.1758241758241765e-05, |
| "loss": 0.6814, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.2639175257731959, |
| "grad_norm": 1.5142722720152748, |
| "learning_rate": 4.21978021978022e-05, |
| "loss": 0.682, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.26666666666666666, |
| "grad_norm": 0.8979115368433407, |
| "learning_rate": 4.263736263736264e-05, |
| "loss": 0.6828, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.26941580756013744, |
| "grad_norm": 1.3321313501935168, |
| "learning_rate": 4.307692307692308e-05, |
| "loss": 0.687, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.2721649484536082, |
| "grad_norm": 1.6997841277714232, |
| "learning_rate": 4.351648351648352e-05, |
| "loss": 0.6759, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.27491408934707906, |
| "grad_norm": 1.0580779198597339, |
| "learning_rate": 4.395604395604396e-05, |
| "loss": 0.6743, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.27766323024054984, |
| "grad_norm": 2.3143610998320896, |
| "learning_rate": 4.4395604395604403e-05, |
| "loss": 0.6684, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.2804123711340206, |
| "grad_norm": 1.370198490883026, |
| "learning_rate": 4.483516483516484e-05, |
| "loss": 0.6643, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.2831615120274914, |
| "grad_norm": 2.7976008957802088, |
| "learning_rate": 4.527472527472528e-05, |
| "loss": 0.6878, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.2859106529209622, |
| "grad_norm": 2.539570133020213, |
| "learning_rate": 4.5714285714285716e-05, |
| "loss": 0.6889, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.28865979381443296, |
| "grad_norm": 1.7701837403812288, |
| "learning_rate": 4.615384615384615e-05, |
| "loss": 0.6794, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.2914089347079038, |
| "grad_norm": 1.6244664002638955, |
| "learning_rate": 4.65934065934066e-05, |
| "loss": 0.6798, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.2941580756013746, |
| "grad_norm": 1.658296656226508, |
| "learning_rate": 4.7032967032967035e-05, |
| "loss": 0.6852, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.29690721649484536, |
| "grad_norm": 1.3133889484198134, |
| "learning_rate": 4.747252747252748e-05, |
| "loss": 0.6695, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.29965635738831614, |
| "grad_norm": 1.5315701499343, |
| "learning_rate": 4.791208791208791e-05, |
| "loss": 0.6842, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.3024054982817869, |
| "grad_norm": 1.5106029998832964, |
| "learning_rate": 4.8351648351648355e-05, |
| "loss": 0.6728, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.30515463917525776, |
| "grad_norm": 1.2198017486153612, |
| "learning_rate": 4.87912087912088e-05, |
| "loss": 0.6804, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.30790378006872854, |
| "grad_norm": 0.9408294254788567, |
| "learning_rate": 4.923076923076924e-05, |
| "loss": 0.6673, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.3106529209621993, |
| "grad_norm": 1.4299436206183789, |
| "learning_rate": 4.9670329670329674e-05, |
| "loss": 0.6782, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.3134020618556701, |
| "grad_norm": 1.3010524254683744, |
| "learning_rate": 5.0109890109890116e-05, |
| "loss": 0.6719, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.3161512027491409, |
| "grad_norm": 1.0294736794964319, |
| "learning_rate": 5.054945054945055e-05, |
| "loss": 0.6734, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.31890034364261166, |
| "grad_norm": 1.9844841183716888, |
| "learning_rate": 5.098901098901099e-05, |
| "loss": 0.6689, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.3216494845360825, |
| "grad_norm": 1.0769609751269622, |
| "learning_rate": 5.1428571428571436e-05, |
| "loss": 0.6725, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.3243986254295533, |
| "grad_norm": 1.9412353253336612, |
| "learning_rate": 5.186813186813188e-05, |
| "loss": 0.6788, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.32714776632302406, |
| "grad_norm": 1.2983006823972247, |
| "learning_rate": 5.230769230769231e-05, |
| "loss": 0.6717, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.32989690721649484, |
| "grad_norm": 1.78349719868111, |
| "learning_rate": 5.274725274725275e-05, |
| "loss": 0.6754, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3326460481099656, |
| "grad_norm": 1.2685409855209113, |
| "learning_rate": 5.318681318681319e-05, |
| "loss": 0.6689, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.3353951890034364, |
| "grad_norm": 1.601626046067794, |
| "learning_rate": 5.3626373626373626e-05, |
| "loss": 0.6822, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.33814432989690724, |
| "grad_norm": 1.6399160217312283, |
| "learning_rate": 5.4065934065934074e-05, |
| "loss": 0.6939, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.340893470790378, |
| "grad_norm": 1.0496938208520434, |
| "learning_rate": 5.450549450549451e-05, |
| "loss": 0.6685, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.3436426116838488, |
| "grad_norm": 1.8210641674833647, |
| "learning_rate": 5.494505494505495e-05, |
| "loss": 0.6726, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.3463917525773196, |
| "grad_norm": 1.0994677896279192, |
| "learning_rate": 5.538461538461539e-05, |
| "loss": 0.6609, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.34914089347079036, |
| "grad_norm": 1.7375889267050222, |
| "learning_rate": 5.582417582417583e-05, |
| "loss": 0.68, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.3518900343642612, |
| "grad_norm": 1.1687845090860516, |
| "learning_rate": 5.626373626373627e-05, |
| "loss": 0.6715, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.354639175257732, |
| "grad_norm": 1.395445256796641, |
| "learning_rate": 5.670329670329671e-05, |
| "loss": 0.6681, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.35738831615120276, |
| "grad_norm": 1.355862476688157, |
| "learning_rate": 5.714285714285715e-05, |
| "loss": 0.6818, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.36013745704467354, |
| "grad_norm": 1.0924489864173972, |
| "learning_rate": 5.7582417582417584e-05, |
| "loss": 0.6605, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.3628865979381443, |
| "grad_norm": 1.9384854103627827, |
| "learning_rate": 5.8021978021978026e-05, |
| "loss": 0.6731, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.3656357388316151, |
| "grad_norm": 1.5529834412680505, |
| "learning_rate": 5.846153846153846e-05, |
| "loss": 0.675, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.36838487972508593, |
| "grad_norm": 0.9671293876862966, |
| "learning_rate": 5.890109890109891e-05, |
| "loss": 0.661, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.3711340206185567, |
| "grad_norm": 1.5434547051402783, |
| "learning_rate": 5.9340659340659345e-05, |
| "loss": 0.673, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.3738831615120275, |
| "grad_norm": 1.466260971358487, |
| "learning_rate": 5.978021978021979e-05, |
| "loss": 0.6799, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.3766323024054983, |
| "grad_norm": 1.3025182971790092, |
| "learning_rate": 6.021978021978022e-05, |
| "loss": 0.656, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.37938144329896906, |
| "grad_norm": 1.5407519214603846, |
| "learning_rate": 6.0659340659340665e-05, |
| "loss": 0.6703, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.38213058419243984, |
| "grad_norm": 1.2435864649597408, |
| "learning_rate": 6.10989010989011e-05, |
| "loss": 0.6699, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.3848797250859107, |
| "grad_norm": 1.32415071563785, |
| "learning_rate": 6.153846153846155e-05, |
| "loss": 0.6557, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.38762886597938145, |
| "grad_norm": 1.5121087379761295, |
| "learning_rate": 6.197802197802199e-05, |
| "loss": 0.6659, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.39037800687285223, |
| "grad_norm": 1.3570253258903069, |
| "learning_rate": 6.241758241758242e-05, |
| "loss": 0.6568, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.393127147766323, |
| "grad_norm": 1.2775583571637077, |
| "learning_rate": 6.285714285714286e-05, |
| "loss": 0.6558, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.3958762886597938, |
| "grad_norm": 1.029079383256909, |
| "learning_rate": 6.32967032967033e-05, |
| "loss": 0.6569, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.39862542955326463, |
| "grad_norm": 1.379942185445747, |
| "learning_rate": 6.373626373626373e-05, |
| "loss": 0.6664, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.4013745704467354, |
| "grad_norm": 1.7317621636752105, |
| "learning_rate": 6.417582417582419e-05, |
| "loss": 0.6675, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.4041237113402062, |
| "grad_norm": 1.0553856530228192, |
| "learning_rate": 6.461538461538462e-05, |
| "loss": 0.6623, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.406872852233677, |
| "grad_norm": 1.310533202428338, |
| "learning_rate": 6.505494505494506e-05, |
| "loss": 0.6658, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.40962199312714775, |
| "grad_norm": 1.0365623836624396, |
| "learning_rate": 6.54945054945055e-05, |
| "loss": 0.6539, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.41237113402061853, |
| "grad_norm": 1.5824321098093788, |
| "learning_rate": 6.593406593406594e-05, |
| "loss": 0.672, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.41512027491408937, |
| "grad_norm": 1.2342253922087731, |
| "learning_rate": 6.637362637362638e-05, |
| "loss": 0.6609, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.41786941580756015, |
| "grad_norm": 2.2157482513280797, |
| "learning_rate": 6.681318681318683e-05, |
| "loss": 0.6614, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.42061855670103093, |
| "grad_norm": 0.8519496697821355, |
| "learning_rate": 6.725274725274725e-05, |
| "loss": 0.6558, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.4233676975945017, |
| "grad_norm": 2.585667143290517, |
| "learning_rate": 6.76923076923077e-05, |
| "loss": 0.6712, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.4261168384879725, |
| "grad_norm": 1.561949044082342, |
| "learning_rate": 6.813186813186814e-05, |
| "loss": 0.6606, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.4288659793814433, |
| "grad_norm": 1.6541712978836747, |
| "learning_rate": 6.857142857142857e-05, |
| "loss": 0.6662, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.4316151202749141, |
| "grad_norm": 1.471980620525164, |
| "learning_rate": 6.901098901098902e-05, |
| "loss": 0.6688, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.4343642611683849, |
| "grad_norm": 1.4565326176621909, |
| "learning_rate": 6.945054945054945e-05, |
| "loss": 0.668, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.43711340206185567, |
| "grad_norm": 1.3548150534245453, |
| "learning_rate": 6.98901098901099e-05, |
| "loss": 0.6628, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.43986254295532645, |
| "grad_norm": 1.2045551451888672, |
| "learning_rate": 7.032967032967034e-05, |
| "loss": 0.6616, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.44261168384879723, |
| "grad_norm": 1.7494121190993142, |
| "learning_rate": 7.076923076923078e-05, |
| "loss": 0.6581, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.44536082474226807, |
| "grad_norm": 1.0147994550817765, |
| "learning_rate": 7.12087912087912e-05, |
| "loss": 0.6608, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.44810996563573885, |
| "grad_norm": 1.0859192635780746, |
| "learning_rate": 7.164835164835166e-05, |
| "loss": 0.6451, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.45085910652920963, |
| "grad_norm": 1.0492942035079653, |
| "learning_rate": 7.208791208791209e-05, |
| "loss": 0.6591, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.4536082474226804, |
| "grad_norm": 2.379607115922538, |
| "learning_rate": 7.252747252747253e-05, |
| "loss": 0.6634, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.4563573883161512, |
| "grad_norm": 1.112381416151378, |
| "learning_rate": 7.296703296703297e-05, |
| "loss": 0.6611, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.45910652920962197, |
| "grad_norm": 2.0850617091598886, |
| "learning_rate": 7.34065934065934e-05, |
| "loss": 0.6569, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.4618556701030928, |
| "grad_norm": 1.5623269496352417, |
| "learning_rate": 7.384615384615386e-05, |
| "loss": 0.672, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.4646048109965636, |
| "grad_norm": 1.6783665454838335, |
| "learning_rate": 7.42857142857143e-05, |
| "loss": 0.6567, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.46735395189003437, |
| "grad_norm": 1.0376996339879248, |
| "learning_rate": 7.472527472527473e-05, |
| "loss": 0.6606, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.47010309278350515, |
| "grad_norm": 2.2216262071159356, |
| "learning_rate": 7.516483516483517e-05, |
| "loss": 0.6636, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.47285223367697593, |
| "grad_norm": 1.3858824784808275, |
| "learning_rate": 7.560439560439561e-05, |
| "loss": 0.6673, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.4756013745704467, |
| "grad_norm": 1.9605283100011852, |
| "learning_rate": 7.604395604395604e-05, |
| "loss": 0.6572, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.47835051546391755, |
| "grad_norm": 1.4278083885748436, |
| "learning_rate": 7.64835164835165e-05, |
| "loss": 0.6645, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.48109965635738833, |
| "grad_norm": 1.191754742216612, |
| "learning_rate": 7.692307692307693e-05, |
| "loss": 0.6631, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.4838487972508591, |
| "grad_norm": 0.9597081578943443, |
| "learning_rate": 7.736263736263737e-05, |
| "loss": 0.655, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.4865979381443299, |
| "grad_norm": 1.9487418698749246, |
| "learning_rate": 7.780219780219781e-05, |
| "loss": 0.6617, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.48934707903780067, |
| "grad_norm": 1.4578651378155227, |
| "learning_rate": 7.824175824175825e-05, |
| "loss": 0.665, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.49209621993127145, |
| "grad_norm": 0.9204633011470749, |
| "learning_rate": 7.868131868131868e-05, |
| "loss": 0.6559, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.4948453608247423, |
| "grad_norm": 2.3519272780997893, |
| "learning_rate": 7.912087912087914e-05, |
| "loss": 0.661, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.49759450171821307, |
| "grad_norm": 1.4425041211579737, |
| "learning_rate": 7.956043956043956e-05, |
| "loss": 0.6555, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.5003436426116838, |
| "grad_norm": 2.2406732058079917, |
| "learning_rate": 8e-05, |
| "loss": 0.6672, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.5030927835051546, |
| "grad_norm": 1.717445277482838, |
| "learning_rate": 7.999992597860977e-05, |
| "loss": 0.669, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.5058419243986254, |
| "grad_norm": 1.9554853415214135, |
| "learning_rate": 7.999970391471297e-05, |
| "loss": 0.6484, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.5085910652920962, |
| "grad_norm": 1.3337471142504986, |
| "learning_rate": 7.99993338091315e-05, |
| "loss": 0.6542, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.511340206185567, |
| "grad_norm": 2.3628021636514913, |
| "learning_rate": 7.999881566323518e-05, |
| "loss": 0.6666, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5140893470790378, |
| "grad_norm": 1.6284990281451381, |
| "learning_rate": 7.999814947894166e-05, |
| "loss": 0.6713, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.5168384879725086, |
| "grad_norm": 2.208780281202899, |
| "learning_rate": 7.999733525871655e-05, |
| "loss": 0.6564, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.5195876288659794, |
| "grad_norm": 2.0324845611311653, |
| "learning_rate": 7.999637300557334e-05, |
| "loss": 0.6643, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.5223367697594502, |
| "grad_norm": 1.411207407439057, |
| "learning_rate": 7.999526272307338e-05, |
| "loss": 0.6466, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.525085910652921, |
| "grad_norm": 1.67795112571074, |
| "learning_rate": 7.999400441532593e-05, |
| "loss": 0.6567, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.5278350515463918, |
| "grad_norm": 1.0171507403303963, |
| "learning_rate": 7.999259808698805e-05, |
| "loss": 0.6535, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.5305841924398625, |
| "grad_norm": 1.8103499540778905, |
| "learning_rate": 7.999104374326465e-05, |
| "loss": 0.6546, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.5333333333333333, |
| "grad_norm": 1.3619232333701012, |
| "learning_rate": 7.99893413899085e-05, |
| "loss": 0.6495, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.5360824742268041, |
| "grad_norm": 1.7537074766579162, |
| "learning_rate": 7.99874910332201e-05, |
| "loss": 0.6513, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.5388316151202749, |
| "grad_norm": 1.5125359865682328, |
| "learning_rate": 7.998549268004776e-05, |
| "loss": 0.6627, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.5415807560137457, |
| "grad_norm": 1.8694904140870736, |
| "learning_rate": 7.998334633778752e-05, |
| "loss": 0.6552, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.5443298969072164, |
| "grad_norm": 1.0313289192419033, |
| "learning_rate": 7.998105201438315e-05, |
| "loss": 0.6572, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.5470790378006873, |
| "grad_norm": 1.7524234342414688, |
| "learning_rate": 7.997860971832609e-05, |
| "loss": 0.6485, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.5498281786941581, |
| "grad_norm": 1.674466921847352, |
| "learning_rate": 7.997601945865545e-05, |
| "loss": 0.6567, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.5525773195876289, |
| "grad_norm": 0.9044207656617541, |
| "learning_rate": 7.997328124495797e-05, |
| "loss": 0.6571, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.5553264604810997, |
| "grad_norm": 1.5698427976284253, |
| "learning_rate": 7.997039508736794e-05, |
| "loss": 0.6552, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.5580756013745705, |
| "grad_norm": 1.2149289312154958, |
| "learning_rate": 7.996736099656728e-05, |
| "loss": 0.6504, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.5608247422680412, |
| "grad_norm": 1.5016916008686747, |
| "learning_rate": 7.996417898378532e-05, |
| "loss": 0.6501, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.563573883161512, |
| "grad_norm": 1.3674415360691992, |
| "learning_rate": 7.996084906079895e-05, |
| "loss": 0.6521, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.5663230240549828, |
| "grad_norm": 1.012486835623592, |
| "learning_rate": 7.995737123993242e-05, |
| "loss": 0.6533, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.5690721649484536, |
| "grad_norm": 1.2333701554709846, |
| "learning_rate": 7.99537455340574e-05, |
| "loss": 0.6423, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.5718213058419244, |
| "grad_norm": 1.432895391786843, |
| "learning_rate": 7.994997195659288e-05, |
| "loss": 0.6588, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.5745704467353951, |
| "grad_norm": 1.3560618026372162, |
| "learning_rate": 7.994605052150512e-05, |
| "loss": 0.6419, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.5773195876288659, |
| "grad_norm": 1.1169005294274361, |
| "learning_rate": 7.994198124330764e-05, |
| "loss": 0.643, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.5800687285223368, |
| "grad_norm": 1.1551183527776216, |
| "learning_rate": 7.99377641370611e-05, |
| "loss": 0.6658, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.5828178694158076, |
| "grad_norm": 1.494094722393518, |
| "learning_rate": 7.993339921837333e-05, |
| "loss": 0.6566, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.5855670103092784, |
| "grad_norm": 1.1087501045845025, |
| "learning_rate": 7.992888650339918e-05, |
| "loss": 0.6389, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.5883161512027492, |
| "grad_norm": 1.9002883231044971, |
| "learning_rate": 7.992422600884052e-05, |
| "loss": 0.6539, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.5910652920962199, |
| "grad_norm": 0.7299685549898423, |
| "learning_rate": 7.991941775194619e-05, |
| "loss": 0.6465, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.5938144329896907, |
| "grad_norm": 1.712564742362983, |
| "learning_rate": 7.991446175051184e-05, |
| "loss": 0.6474, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.5965635738831615, |
| "grad_norm": 1.4830236426687247, |
| "learning_rate": 7.990935802288002e-05, |
| "loss": 0.6512, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.5993127147766323, |
| "grad_norm": 1.1265182423363587, |
| "learning_rate": 7.990410658793994e-05, |
| "loss": 0.6541, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.6020618556701031, |
| "grad_norm": 1.2971103607944785, |
| "learning_rate": 7.989870746512756e-05, |
| "loss": 0.6479, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.6048109965635738, |
| "grad_norm": 1.2538177052036752, |
| "learning_rate": 7.989316067442539e-05, |
| "loss": 0.6497, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6075601374570446, |
| "grad_norm": 0.7957257777767685, |
| "learning_rate": 7.98874662363625e-05, |
| "loss": 0.6352, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.6103092783505155, |
| "grad_norm": 0.9768965818664068, |
| "learning_rate": 7.988162417201437e-05, |
| "loss": 0.6443, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.6130584192439863, |
| "grad_norm": 1.9580245398506602, |
| "learning_rate": 7.987563450300293e-05, |
| "loss": 0.6652, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.6158075601374571, |
| "grad_norm": 1.065435200900762, |
| "learning_rate": 7.986949725149634e-05, |
| "loss": 0.6456, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.6185567010309279, |
| "grad_norm": 1.9649018261160003, |
| "learning_rate": 7.986321244020901e-05, |
| "loss": 0.6479, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.6213058419243986, |
| "grad_norm": 1.6213807682895127, |
| "learning_rate": 7.985678009240142e-05, |
| "loss": 0.6595, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.6240549828178694, |
| "grad_norm": 1.4900550716535215, |
| "learning_rate": 7.985020023188018e-05, |
| "loss": 0.6359, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.6268041237113402, |
| "grad_norm": 1.3214648767288044, |
| "learning_rate": 7.98434728829978e-05, |
| "loss": 0.647, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.629553264604811, |
| "grad_norm": 1.4301295600290387, |
| "learning_rate": 7.983659807065267e-05, |
| "loss": 0.6475, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.6323024054982818, |
| "grad_norm": 1.0375560871972622, |
| "learning_rate": 7.982957582028892e-05, |
| "loss": 0.6364, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.6350515463917525, |
| "grad_norm": 1.3894087032049172, |
| "learning_rate": 7.982240615789641e-05, |
| "loss": 0.6426, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.6378006872852233, |
| "grad_norm": 0.9850693356026884, |
| "learning_rate": 7.981508911001057e-05, |
| "loss": 0.6459, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.6405498281786941, |
| "grad_norm": 1.4426682351463136, |
| "learning_rate": 7.980762470371228e-05, |
| "loss": 0.6474, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.643298969072165, |
| "grad_norm": 1.0547468177759594, |
| "learning_rate": 7.980001296662784e-05, |
| "loss": 0.6469, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.6460481099656358, |
| "grad_norm": 1.2056014455360582, |
| "learning_rate": 7.979225392692882e-05, |
| "loss": 0.6354, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.6487972508591066, |
| "grad_norm": 1.535046209332006, |
| "learning_rate": 7.978434761333195e-05, |
| "loss": 0.6432, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.6515463917525773, |
| "grad_norm": 0.7757101221034997, |
| "learning_rate": 7.977629405509905e-05, |
| "loss": 0.633, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.6542955326460481, |
| "grad_norm": 0.992595763686358, |
| "learning_rate": 7.976809328203693e-05, |
| "loss": 0.64, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.6570446735395189, |
| "grad_norm": 1.3919630066257997, |
| "learning_rate": 7.975974532449718e-05, |
| "loss": 0.6495, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.6597938144329897, |
| "grad_norm": 1.130072368133697, |
| "learning_rate": 7.975125021337618e-05, |
| "loss": 0.6337, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.6625429553264605, |
| "grad_norm": 1.0428416388066186, |
| "learning_rate": 7.974260798011494e-05, |
| "loss": 0.6515, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.6652920962199312, |
| "grad_norm": 1.4678385771525282, |
| "learning_rate": 7.973381865669897e-05, |
| "loss": 0.6446, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.668041237113402, |
| "grad_norm": 1.1731453018255897, |
| "learning_rate": 7.972488227565814e-05, |
| "loss": 0.6424, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.6707903780068728, |
| "grad_norm": 1.1037532875258624, |
| "learning_rate": 7.971579887006663e-05, |
| "loss": 0.6356, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.6735395189003437, |
| "grad_norm": 1.3601555121892395, |
| "learning_rate": 7.970656847354277e-05, |
| "loss": 0.6332, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.6762886597938145, |
| "grad_norm": 0.6674453682989576, |
| "learning_rate": 7.969719112024889e-05, |
| "loss": 0.6341, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.6790378006872853, |
| "grad_norm": 0.8185867421910552, |
| "learning_rate": 7.968766684489122e-05, |
| "loss": 0.6351, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.681786941580756, |
| "grad_norm": 1.2859620347833656, |
| "learning_rate": 7.967799568271978e-05, |
| "loss": 0.636, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.6845360824742268, |
| "grad_norm": 1.0894539357769442, |
| "learning_rate": 7.96681776695282e-05, |
| "loss": 0.6483, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.6872852233676976, |
| "grad_norm": 1.536917534466485, |
| "learning_rate": 7.965821284165362e-05, |
| "loss": 0.6358, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.6900343642611684, |
| "grad_norm": 0.8985662521364918, |
| "learning_rate": 7.964810123597659e-05, |
| "loss": 0.6345, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.6927835051546392, |
| "grad_norm": 1.274319886309132, |
| "learning_rate": 7.963784288992085e-05, |
| "loss": 0.6416, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.6955326460481099, |
| "grad_norm": 0.8642801778285559, |
| "learning_rate": 7.962743784145323e-05, |
| "loss": 0.6386, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.6982817869415807, |
| "grad_norm": 0.9360706797234668, |
| "learning_rate": 7.961688612908358e-05, |
| "loss": 0.6311, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.7010309278350515, |
| "grad_norm": 1.2347692000194708, |
| "learning_rate": 7.96061877918645e-05, |
| "loss": 0.6342, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.7037800687285224, |
| "grad_norm": 1.3067449184972966, |
| "learning_rate": 7.959534286939126e-05, |
| "loss": 0.6348, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.7065292096219932, |
| "grad_norm": 1.0853797825255465, |
| "learning_rate": 7.95843514018017e-05, |
| "loss": 0.6387, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.709278350515464, |
| "grad_norm": 1.3876365868084906, |
| "learning_rate": 7.9573213429776e-05, |
| "loss": 0.6408, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.7120274914089347, |
| "grad_norm": 1.2720820097889098, |
| "learning_rate": 7.956192899453656e-05, |
| "loss": 0.6403, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.7147766323024055, |
| "grad_norm": 1.083330147390025, |
| "learning_rate": 7.955049813784787e-05, |
| "loss": 0.6326, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.7175257731958763, |
| "grad_norm": 0.8536354360684208, |
| "learning_rate": 7.953892090201633e-05, |
| "loss": 0.6289, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.7202749140893471, |
| "grad_norm": 0.6669283801432064, |
| "learning_rate": 7.952719732989007e-05, |
| "loss": 0.6311, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.7230240549828179, |
| "grad_norm": 0.6028131310980143, |
| "learning_rate": 7.951532746485886e-05, |
| "loss": 0.6383, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.7257731958762886, |
| "grad_norm": 1.0830419136461447, |
| "learning_rate": 7.950331135085389e-05, |
| "loss": 0.6257, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.7285223367697594, |
| "grad_norm": 1.7725196403596395, |
| "learning_rate": 7.949114903234766e-05, |
| "loss": 0.6431, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.7312714776632302, |
| "grad_norm": 0.7205576501203266, |
| "learning_rate": 7.947884055435371e-05, |
| "loss": 0.636, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.734020618556701, |
| "grad_norm": 2.0339470836036386, |
| "learning_rate": 7.946638596242661e-05, |
| "loss": 0.6435, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.7367697594501719, |
| "grad_norm": 1.0158450970399455, |
| "learning_rate": 7.945378530266166e-05, |
| "loss": 0.6342, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.7395189003436426, |
| "grad_norm": 2.328741839753608, |
| "learning_rate": 7.944103862169478e-05, |
| "loss": 0.6496, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.7422680412371134, |
| "grad_norm": 1.5377779904132143, |
| "learning_rate": 7.94281459667023e-05, |
| "loss": 0.6536, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.7450171821305842, |
| "grad_norm": 2.1537049583171064, |
| "learning_rate": 7.941510738540086e-05, |
| "loss": 0.6411, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.747766323024055, |
| "grad_norm": 1.7517265079351534, |
| "learning_rate": 7.940192292604714e-05, |
| "loss": 0.6489, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.7505154639175258, |
| "grad_norm": 2.1942121182211527, |
| "learning_rate": 7.938859263743776e-05, |
| "loss": 0.6345, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.7532646048109966, |
| "grad_norm": 1.863240282026407, |
| "learning_rate": 7.937511656890903e-05, |
| "loss": 0.65, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.7560137457044673, |
| "grad_norm": 1.4298523411886686, |
| "learning_rate": 7.936149477033682e-05, |
| "loss": 0.6391, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.7587628865979381, |
| "grad_norm": 1.2516811961606267, |
| "learning_rate": 7.934772729213634e-05, |
| "loss": 0.638, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.7615120274914089, |
| "grad_norm": 1.5514968401316607, |
| "learning_rate": 7.9333814185262e-05, |
| "loss": 0.6464, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.7642611683848797, |
| "grad_norm": 1.1588148872170223, |
| "learning_rate": 7.931975550120716e-05, |
| "loss": 0.642, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.7670103092783506, |
| "grad_norm": 2.0239434861105403, |
| "learning_rate": 7.930555129200402e-05, |
| "loss": 0.6428, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.7697594501718213, |
| "grad_norm": 1.6949238476000754, |
| "learning_rate": 7.929120161022329e-05, |
| "loss": 0.6333, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.7725085910652921, |
| "grad_norm": 1.2740472233054914, |
| "learning_rate": 7.927670650897421e-05, |
| "loss": 0.6392, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.7752577319587629, |
| "grad_norm": 1.2888896268808947, |
| "learning_rate": 7.92620660419041e-05, |
| "loss": 0.6324, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.7780068728522337, |
| "grad_norm": 1.2148413669943177, |
| "learning_rate": 7.924728026319837e-05, |
| "loss": 0.646, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.7807560137457045, |
| "grad_norm": 0.7141756639924294, |
| "learning_rate": 7.923234922758021e-05, |
| "loss": 0.6403, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.7835051546391752, |
| "grad_norm": 1.0058322496482315, |
| "learning_rate": 7.921727299031042e-05, |
| "loss": 0.6418, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.786254295532646, |
| "grad_norm": 0.794851634713387, |
| "learning_rate": 7.920205160718721e-05, |
| "loss": 0.6383, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.7890034364261168, |
| "grad_norm": 0.7763515931120876, |
| "learning_rate": 7.918668513454598e-05, |
| "loss": 0.629, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.7917525773195876, |
| "grad_norm": 1.0206658407382474, |
| "learning_rate": 7.917117362925907e-05, |
| "loss": 0.6369, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.7945017182130584, |
| "grad_norm": 0.9044945170274734, |
| "learning_rate": 7.915551714873571e-05, |
| "loss": 0.6265, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.7972508591065293, |
| "grad_norm": 1.7861242632179295, |
| "learning_rate": 7.913971575092157e-05, |
| "loss": 0.6354, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.8352377362963846, |
| "learning_rate": 7.912376949429875e-05, |
| "loss": 0.6258, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.8027491408934708, |
| "grad_norm": 1.1285143530097073, |
| "learning_rate": 7.910767843788543e-05, |
| "loss": 0.6411, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.8054982817869416, |
| "grad_norm": 2.0986758482850774, |
| "learning_rate": 7.909144264123575e-05, |
| "loss": 0.6432, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.8082474226804124, |
| "grad_norm": 1.0319530318383152, |
| "learning_rate": 7.90750621644395e-05, |
| "loss": 0.6489, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.8109965635738832, |
| "grad_norm": 2.628617525239813, |
| "learning_rate": 7.905853706812199e-05, |
| "loss": 0.6439, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.813745704467354, |
| "grad_norm": 1.977433582449252, |
| "learning_rate": 7.904186741344373e-05, |
| "loss": 0.6482, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.8164948453608247, |
| "grad_norm": 1.8360229102761563, |
| "learning_rate": 7.902505326210028e-05, |
| "loss": 0.65, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.8192439862542955, |
| "grad_norm": 1.5433944283254346, |
| "learning_rate": 7.900809467632197e-05, |
| "loss": 0.6434, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.8219931271477663, |
| "grad_norm": 1.6379518485922502, |
| "learning_rate": 7.899099171887373e-05, |
| "loss": 0.6553, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.8247422680412371, |
| "grad_norm": 1.477099597856914, |
| "learning_rate": 7.897374445305478e-05, |
| "loss": 0.6392, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.8274914089347079, |
| "grad_norm": 1.1857730742135408, |
| "learning_rate": 7.895635294269843e-05, |
| "loss": 0.6316, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.8302405498281787, |
| "grad_norm": 1.361216152623904, |
| "learning_rate": 7.893881725217191e-05, |
| "loss": 0.6352, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.8329896907216495, |
| "grad_norm": 0.8010015883602253, |
| "learning_rate": 7.892113744637599e-05, |
| "loss": 0.6382, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.8357388316151203, |
| "grad_norm": 1.102241513039724, |
| "learning_rate": 7.890331359074488e-05, |
| "loss": 0.6447, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.8384879725085911, |
| "grad_norm": 1.1821390144057153, |
| "learning_rate": 7.888534575124591e-05, |
| "loss": 0.6292, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.8412371134020619, |
| "grad_norm": 0.7269296274858387, |
| "learning_rate": 7.886723399437931e-05, |
| "loss": 0.6319, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.8439862542955326, |
| "grad_norm": 1.630523895781267, |
| "learning_rate": 7.884897838717792e-05, |
| "loss": 0.6169, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.8467353951890034, |
| "grad_norm": 0.740638554856235, |
| "learning_rate": 7.883057899720703e-05, |
| "loss": 0.6355, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.8494845360824742, |
| "grad_norm": 1.8497443172662162, |
| "learning_rate": 7.881203589256408e-05, |
| "loss": 0.6274, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.852233676975945, |
| "grad_norm": 1.1910232798072693, |
| "learning_rate": 7.879334914187836e-05, |
| "loss": 0.6392, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.8549828178694158, |
| "grad_norm": 2.1003839885669815, |
| "learning_rate": 7.877451881431086e-05, |
| "loss": 0.6296, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.8577319587628865, |
| "grad_norm": 2.041585074702372, |
| "learning_rate": 7.87555449795539e-05, |
| "loss": 0.6352, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.8604810996563574, |
| "grad_norm": 1.0287133005035243, |
| "learning_rate": 7.873642770783098e-05, |
| "loss": 0.6361, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.8632302405498282, |
| "grad_norm": 1.4803932962834592, |
| "learning_rate": 7.871716706989645e-05, |
| "loss": 0.6433, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.865979381443299, |
| "grad_norm": 0.9388113471557215, |
| "learning_rate": 7.869776313703528e-05, |
| "loss": 0.6346, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.8687285223367698, |
| "grad_norm": 0.8575055727089447, |
| "learning_rate": 7.867821598106275e-05, |
| "loss": 0.6287, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.8714776632302406, |
| "grad_norm": 0.9753760114729692, |
| "learning_rate": 7.865852567432428e-05, |
| "loss": 0.6386, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.8742268041237113, |
| "grad_norm": 0.9688066608476287, |
| "learning_rate": 7.863869228969501e-05, |
| "loss": 0.631, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.8769759450171821, |
| "grad_norm": 0.9593995798512949, |
| "learning_rate": 7.861871590057971e-05, |
| "loss": 0.6347, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.8797250859106529, |
| "grad_norm": 1.2389383575654551, |
| "learning_rate": 7.85985965809124e-05, |
| "loss": 0.6346, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.8824742268041237, |
| "grad_norm": 0.8827816805243792, |
| "learning_rate": 7.857833440515605e-05, |
| "loss": 0.6265, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.8852233676975945, |
| "grad_norm": 0.6061777594451044, |
| "learning_rate": 7.85579294483024e-05, |
| "loss": 0.6286, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.8879725085910652, |
| "grad_norm": 0.6113638623646376, |
| "learning_rate": 7.85373817858716e-05, |
| "loss": 0.6256, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.8907216494845361, |
| "grad_norm": 0.8948079812828788, |
| "learning_rate": 7.851669149391198e-05, |
| "loss": 0.6238, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.8934707903780069, |
| "grad_norm": 1.4121615992032686, |
| "learning_rate": 7.849585864899976e-05, |
| "loss": 0.633, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.8962199312714777, |
| "grad_norm": 0.7940592789567055, |
| "learning_rate": 7.847488332823873e-05, |
| "loss": 0.6359, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.8989690721649485, |
| "grad_norm": 0.5764914369379771, |
| "learning_rate": 7.845376560926002e-05, |
| "loss": 0.6375, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.9017182130584193, |
| "grad_norm": 1.071092163645109, |
| "learning_rate": 7.843250557022177e-05, |
| "loss": 0.6196, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.90446735395189, |
| "grad_norm": 1.6020470680476175, |
| "learning_rate": 7.841110328980887e-05, |
| "loss": 0.6293, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.9072164948453608, |
| "grad_norm": 0.4752698709356062, |
| "learning_rate": 7.838955884723265e-05, |
| "loss": 0.626, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.9099656357388316, |
| "grad_norm": 1.5726092706691321, |
| "learning_rate": 7.836787232223058e-05, |
| "loss": 0.6294, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.9127147766323024, |
| "grad_norm": 1.01011634702882, |
| "learning_rate": 7.8346043795066e-05, |
| "loss": 0.6431, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.9154639175257732, |
| "grad_norm": 0.8236758132081481, |
| "learning_rate": 7.83240733465278e-05, |
| "loss": 0.6237, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.9182130584192439, |
| "grad_norm": 0.816087984512738, |
| "learning_rate": 7.830196105793017e-05, |
| "loss": 0.6304, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.9209621993127147, |
| "grad_norm": 0.7514093388017495, |
| "learning_rate": 7.827970701111219e-05, |
| "loss": 0.6311, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.9237113402061856, |
| "grad_norm": 0.9502215222777328, |
| "learning_rate": 7.825731128843762e-05, |
| "loss": 0.6317, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.9264604810996564, |
| "grad_norm": 1.2376836455505118, |
| "learning_rate": 7.823477397279464e-05, |
| "loss": 0.6199, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.9292096219931272, |
| "grad_norm": 0.7535199306698559, |
| "learning_rate": 7.821209514759539e-05, |
| "loss": 0.6248, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.931958762886598, |
| "grad_norm": 0.8490031147278277, |
| "learning_rate": 7.818927489677577e-05, |
| "loss": 0.6267, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.9347079037800687, |
| "grad_norm": 1.3764847868272945, |
| "learning_rate": 7.816631330479514e-05, |
| "loss": 0.6307, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.9374570446735395, |
| "grad_norm": 0.6440823293174261, |
| "learning_rate": 7.814321045663594e-05, |
| "loss": 0.6366, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.9402061855670103, |
| "grad_norm": 1.117461654132214, |
| "learning_rate": 7.811996643780339e-05, |
| "loss": 0.6284, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.9429553264604811, |
| "grad_norm": 1.198712384928446, |
| "learning_rate": 7.809658133432526e-05, |
| "loss": 0.6179, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.9457044673539519, |
| "grad_norm": 0.7800718683120617, |
| "learning_rate": 7.807305523275142e-05, |
| "loss": 0.6246, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.9484536082474226, |
| "grad_norm": 1.106890600114741, |
| "learning_rate": 7.804938822015361e-05, |
| "loss": 0.6186, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.9512027491408934, |
| "grad_norm": 1.1783968723372122, |
| "learning_rate": 7.802558038412509e-05, |
| "loss": 0.6249, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.9539518900343643, |
| "grad_norm": 0.909115741821025, |
| "learning_rate": 7.800163181278033e-05, |
| "loss": 0.6244, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.9567010309278351, |
| "grad_norm": 1.0133199245495836, |
| "learning_rate": 7.797754259475464e-05, |
| "loss": 0.6264, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.9594501718213059, |
| "grad_norm": 1.195705135014382, |
| "learning_rate": 7.795331281920387e-05, |
| "loss": 0.6278, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.9621993127147767, |
| "grad_norm": 0.7727285070098746, |
| "learning_rate": 7.792894257580415e-05, |
| "loss": 0.6322, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.9649484536082474, |
| "grad_norm": 0.9523131612793756, |
| "learning_rate": 7.790443195475142e-05, |
| "loss": 0.6251, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.9676975945017182, |
| "grad_norm": 1.3036288679788564, |
| "learning_rate": 7.78797810467612e-05, |
| "loss": 0.6218, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.970446735395189, |
| "grad_norm": 0.5634490374165979, |
| "learning_rate": 7.785498994306821e-05, |
| "loss": 0.6221, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.9731958762886598, |
| "grad_norm": 1.5260597480048004, |
| "learning_rate": 7.783005873542605e-05, |
| "loss": 0.619, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.9759450171821306, |
| "grad_norm": 0.6409092943149695, |
| "learning_rate": 7.780498751610684e-05, |
| "loss": 0.6251, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.9786941580756013, |
| "grad_norm": 1.6664465662420271, |
| "learning_rate": 7.777977637790092e-05, |
| "loss": 0.6357, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.9814432989690721, |
| "grad_norm": 0.928708660543268, |
| "learning_rate": 7.775442541411647e-05, |
| "loss": 0.6218, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.9841924398625429, |
| "grad_norm": 1.8449391562901856, |
| "learning_rate": 7.772893471857915e-05, |
| "loss": 0.6396, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.9869415807560138, |
| "grad_norm": 1.6259312779533432, |
| "learning_rate": 7.77033043856318e-05, |
| "loss": 0.6368, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.9896907216494846, |
| "grad_norm": 0.9340296294033434, |
| "learning_rate": 7.767753451013408e-05, |
| "loss": 0.6204, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.9924398625429554, |
| "grad_norm": 1.2307139404659375, |
| "learning_rate": 7.765162518746207e-05, |
| "loss": 0.6177, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.9951890034364261, |
| "grad_norm": 0.7744777080187526, |
| "learning_rate": 7.762557651350798e-05, |
| "loss": 0.6288, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.9979381443298969, |
| "grad_norm": 1.0676481742466821, |
| "learning_rate": 7.759938858467979e-05, |
| "loss": 0.6272, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.0006872852233677, |
| "grad_norm": 0.9232629543637857, |
| "learning_rate": 7.757306149790082e-05, |
| "loss": 0.7685, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.0034364261168385, |
| "grad_norm": 1.0678574538408068, |
| "learning_rate": 7.754659535060943e-05, |
| "loss": 0.5916, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.0061855670103093, |
| "grad_norm": 1.2551799774905172, |
| "learning_rate": 7.751999024075871e-05, |
| "loss": 0.5988, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.00893470790378, |
| "grad_norm": 0.9263667600041875, |
| "learning_rate": 7.749324626681599e-05, |
| "loss": 0.5966, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.0116838487972508, |
| "grad_norm": 0.731842781771422, |
| "learning_rate": 7.746636352776259e-05, |
| "loss": 0.5928, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.0144329896907216, |
| "grad_norm": 0.8372632610382791, |
| "learning_rate": 7.74393421230934e-05, |
| "loss": 0.5919, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.0171821305841924, |
| "grad_norm": 1.0188957663159135, |
| "learning_rate": 7.741218215281652e-05, |
| "loss": 0.5932, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.0199312714776632, |
| "grad_norm": 1.3394999167844803, |
| "learning_rate": 7.738488371745287e-05, |
| "loss": 0.5879, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.022680412371134, |
| "grad_norm": 0.8980665393748412, |
| "learning_rate": 7.735744691803588e-05, |
| "loss": 0.5942, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.0254295532646047, |
| "grad_norm": 0.9856701598024391, |
| "learning_rate": 7.732987185611102e-05, |
| "loss": 0.5793, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.0281786941580755, |
| "grad_norm": 0.8156719105288647, |
| "learning_rate": 7.730215863373554e-05, |
| "loss": 0.5953, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.0309278350515463, |
| "grad_norm": 0.5095314934411239, |
| "learning_rate": 7.727430735347799e-05, |
| "loss": 0.5848, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.0336769759450173, |
| "grad_norm": 0.5914859323410439, |
| "learning_rate": 7.724631811841789e-05, |
| "loss": 0.5877, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.036426116838488, |
| "grad_norm": 0.6410263828936001, |
| "learning_rate": 7.721819103214536e-05, |
| "loss": 0.5852, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.0391752577319588, |
| "grad_norm": 0.9724802661391164, |
| "learning_rate": 7.71899261987607e-05, |
| "loss": 0.5949, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.0419243986254296, |
| "grad_norm": 1.3805105132332318, |
| "learning_rate": 7.716152372287399e-05, |
| "loss": 0.5912, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.0446735395189004, |
| "grad_norm": 0.5961514316349491, |
| "learning_rate": 7.713298370960481e-05, |
| "loss": 0.5989, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.0474226804123712, |
| "grad_norm": 1.0381265315374555, |
| "learning_rate": 7.710430626458171e-05, |
| "loss": 0.5853, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.050171821305842, |
| "grad_norm": 0.88611297798641, |
| "learning_rate": 7.707549149394192e-05, |
| "loss": 0.5971, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.0529209621993127, |
| "grad_norm": 0.9894621130663098, |
| "learning_rate": 7.704653950433092e-05, |
| "loss": 0.5839, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.0556701030927835, |
| "grad_norm": 1.4525179754862534, |
| "learning_rate": 7.7017450402902e-05, |
| "loss": 0.5841, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.0584192439862543, |
| "grad_norm": 0.6031744444046739, |
| "learning_rate": 7.698822429731595e-05, |
| "loss": 0.577, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.061168384879725, |
| "grad_norm": 1.0814979553674235, |
| "learning_rate": 7.695886129574065e-05, |
| "loss": 0.5963, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.0639175257731959, |
| "grad_norm": 1.2035521726093799, |
| "learning_rate": 7.692936150685059e-05, |
| "loss": 0.59, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.0666666666666667, |
| "grad_norm": 1.065535271974634, |
| "learning_rate": 7.689972503982654e-05, |
| "loss": 0.59, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.0694158075601374, |
| "grad_norm": 0.9885511702158032, |
| "learning_rate": 7.686995200435513e-05, |
| "loss": 0.5908, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.0721649484536082, |
| "grad_norm": 1.0056851158221545, |
| "learning_rate": 7.684004251062844e-05, |
| "loss": 0.592, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.074914089347079, |
| "grad_norm": 1.0202284999862656, |
| "learning_rate": 7.680999666934358e-05, |
| "loss": 0.5809, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.0776632302405498, |
| "grad_norm": 1.1513878865642915, |
| "learning_rate": 7.677981459170228e-05, |
| "loss": 0.5878, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.0804123711340206, |
| "grad_norm": 0.8699810682515629, |
| "learning_rate": 7.674949638941053e-05, |
| "loss": 0.5915, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.0831615120274913, |
| "grad_norm": 0.4496053235798414, |
| "learning_rate": 7.671904217467808e-05, |
| "loss": 0.5801, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.0859106529209621, |
| "grad_norm": 0.6745209087686392, |
| "learning_rate": 7.668845206021812e-05, |
| "loss": 0.5979, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.088659793814433, |
| "grad_norm": 1.2754326592180227, |
| "learning_rate": 7.66577261592468e-05, |
| "loss": 0.5807, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.0914089347079037, |
| "grad_norm": 0.7069250995821895, |
| "learning_rate": 7.662686458548276e-05, |
| "loss": 0.582, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.0941580756013747, |
| "grad_norm": 0.4973387506195001, |
| "learning_rate": 7.659586745314689e-05, |
| "loss": 0.5855, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.0969072164948455, |
| "grad_norm": 0.5388764587620251, |
| "learning_rate": 7.65647348769617e-05, |
| "loss": 0.5901, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.0996563573883162, |
| "grad_norm": 0.5432430222716064, |
| "learning_rate": 7.653346697215103e-05, |
| "loss": 0.5852, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.102405498281787, |
| "grad_norm": 0.7845045531047907, |
| "learning_rate": 7.650206385443958e-05, |
| "loss": 0.5815, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.1051546391752578, |
| "grad_norm": 1.1593889092948502, |
| "learning_rate": 7.647052564005243e-05, |
| "loss": 0.5884, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.1079037800687286, |
| "grad_norm": 0.9849246083500859, |
| "learning_rate": 7.643885244571474e-05, |
| "loss": 0.5884, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.1106529209621994, |
| "grad_norm": 1.2327221523818575, |
| "learning_rate": 7.64070443886512e-05, |
| "loss": 0.5808, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.1134020618556701, |
| "grad_norm": 0.8894760613915432, |
| "learning_rate": 7.637510158658563e-05, |
| "loss": 0.5808, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.116151202749141, |
| "grad_norm": 0.8458010101721215, |
| "learning_rate": 7.634302415774057e-05, |
| "loss": 0.5921, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.1189003436426117, |
| "grad_norm": 1.0269578881644645, |
| "learning_rate": 7.63108122208368e-05, |
| "loss": 0.5876, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.1216494845360825, |
| "grad_norm": 0.9883024415526525, |
| "learning_rate": 7.627846589509295e-05, |
| "loss": 0.5938, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.1243986254295533, |
| "grad_norm": 1.0374738987854506, |
| "learning_rate": 7.624598530022502e-05, |
| "loss": 0.5759, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.127147766323024, |
| "grad_norm": 0.8968993730271081, |
| "learning_rate": 7.621337055644596e-05, |
| "loss": 0.5971, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.1298969072164948, |
| "grad_norm": 0.645336647760953, |
| "learning_rate": 7.618062178446518e-05, |
| "loss": 0.5843, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.1326460481099656, |
| "grad_norm": 0.8156198060430901, |
| "learning_rate": 7.614773910548816e-05, |
| "loss": 0.5834, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.1353951890034364, |
| "grad_norm": 0.9969205762211508, |
| "learning_rate": 7.6114722641216e-05, |
| "loss": 0.5874, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.1381443298969072, |
| "grad_norm": 0.9512507581396507, |
| "learning_rate": 7.608157251384493e-05, |
| "loss": 0.5907, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.140893470790378, |
| "grad_norm": 0.46469337738492333, |
| "learning_rate": 7.604828884606587e-05, |
| "loss": 0.5854, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.1436426116838487, |
| "grad_norm": 0.6875741181821637, |
| "learning_rate": 7.601487176106397e-05, |
| "loss": 0.5907, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.1463917525773195, |
| "grad_norm": 0.8229065410063605, |
| "learning_rate": 7.59813213825182e-05, |
| "loss": 0.5835, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.1491408934707903, |
| "grad_norm": 0.5535331052196206, |
| "learning_rate": 7.594763783460086e-05, |
| "loss": 0.5913, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.151890034364261, |
| "grad_norm": 0.609930416365321, |
| "learning_rate": 7.591382124197708e-05, |
| "loss": 0.5861, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.1546391752577319, |
| "grad_norm": 0.549391385583707, |
| "learning_rate": 7.587987172980443e-05, |
| "loss": 0.5774, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.1573883161512026, |
| "grad_norm": 0.5299626474050247, |
| "learning_rate": 7.58457894237324e-05, |
| "loss": 0.5915, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.1601374570446734, |
| "grad_norm": 0.5702316031822813, |
| "learning_rate": 7.581157444990199e-05, |
| "loss": 0.5771, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.1628865979381444, |
| "grad_norm": 0.8908922680435231, |
| "learning_rate": 7.577722693494519e-05, |
| "loss": 0.5932, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.1656357388316152, |
| "grad_norm": 1.4431986503442844, |
| "learning_rate": 7.574274700598453e-05, |
| "loss": 0.5813, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.168384879725086, |
| "grad_norm": 0.6369720186684446, |
| "learning_rate": 7.570813479063265e-05, |
| "loss": 0.5845, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.1711340206185568, |
| "grad_norm": 0.5256518804042303, |
| "learning_rate": 7.567339041699175e-05, |
| "loss": 0.5924, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.1738831615120275, |
| "grad_norm": 0.8165966324851138, |
| "learning_rate": 7.563851401365316e-05, |
| "loss": 0.588, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.1766323024054983, |
| "grad_norm": 1.1011809927414538, |
| "learning_rate": 7.56035057096969e-05, |
| "loss": 0.5878, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.179381443298969, |
| "grad_norm": 1.2202082201319733, |
| "learning_rate": 7.556836563469111e-05, |
| "loss": 0.589, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.1821305841924399, |
| "grad_norm": 0.8269982439978933, |
| "learning_rate": 7.553309391869167e-05, |
| "loss": 0.5846, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.1848797250859107, |
| "grad_norm": 1.0285455684872933, |
| "learning_rate": 7.549769069224164e-05, |
| "loss": 0.5883, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.1876288659793814, |
| "grad_norm": 1.4122193045161404, |
| "learning_rate": 7.546215608637083e-05, |
| "loss": 0.5832, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.1903780068728522, |
| "grad_norm": 0.3041741502072138, |
| "learning_rate": 7.542649023259527e-05, |
| "loss": 0.5813, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.193127147766323, |
| "grad_norm": 1.5989692268879376, |
| "learning_rate": 7.53906932629168e-05, |
| "loss": 0.5896, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.1958762886597938, |
| "grad_norm": 0.5519101760067153, |
| "learning_rate": 7.535476530982244e-05, |
| "loss": 0.5719, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.1986254295532646, |
| "grad_norm": 0.770417484741293, |
| "learning_rate": 7.53187065062841e-05, |
| "loss": 0.5881, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.2013745704467353, |
| "grad_norm": 1.3484068230851232, |
| "learning_rate": 7.528251698575788e-05, |
| "loss": 0.5868, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.2041237113402061, |
| "grad_norm": 0.5995693974246964, |
| "learning_rate": 7.524619688218372e-05, |
| "loss": 0.5812, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.206872852233677, |
| "grad_norm": 0.9168760247232807, |
| "learning_rate": 7.520974632998485e-05, |
| "loss": 0.5877, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.2096219931271477, |
| "grad_norm": 1.1013744178468492, |
| "learning_rate": 7.51731654640673e-05, |
| "loss": 0.592, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.2123711340206185, |
| "grad_norm": 0.5790863138582544, |
| "learning_rate": 7.51364544198194e-05, |
| "loss": 0.5749, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.2151202749140895, |
| "grad_norm": 0.5723305467246104, |
| "learning_rate": 7.509961333311126e-05, |
| "loss": 0.5905, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.2178694158075603, |
| "grad_norm": 0.6893985276435765, |
| "learning_rate": 7.506264234029432e-05, |
| "loss": 0.5799, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.220618556701031, |
| "grad_norm": 0.6306086582019484, |
| "learning_rate": 7.502554157820079e-05, |
| "loss": 0.583, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.2233676975945018, |
| "grad_norm": 0.5828509036657884, |
| "learning_rate": 7.498831118414316e-05, |
| "loss": 0.5791, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.2261168384879726, |
| "grad_norm": 0.9766394221436852, |
| "learning_rate": 7.495095129591373e-05, |
| "loss": 0.5876, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.2288659793814434, |
| "grad_norm": 1.416296834720271, |
| "learning_rate": 7.4913462051784e-05, |
| "loss": 0.5876, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.2316151202749142, |
| "grad_norm": 0.6702851111393332, |
| "learning_rate": 7.487584359050431e-05, |
| "loss": 0.5881, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.234364261168385, |
| "grad_norm": 0.9384647234340839, |
| "learning_rate": 7.483809605130319e-05, |
| "loss": 0.5959, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.2371134020618557, |
| "grad_norm": 1.2907110973467573, |
| "learning_rate": 7.480021957388691e-05, |
| "loss": 0.5785, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.2398625429553265, |
| "grad_norm": 0.8298126531860124, |
| "learning_rate": 7.476221429843894e-05, |
| "loss": 0.5934, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.2426116838487973, |
| "grad_norm": 1.1127124873762135, |
| "learning_rate": 7.472408036561942e-05, |
| "loss": 0.5833, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.245360824742268, |
| "grad_norm": 0.7148549185439407, |
| "learning_rate": 7.468581791656473e-05, |
| "loss": 0.5927, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.2481099656357388, |
| "grad_norm": 0.863808291677187, |
| "learning_rate": 7.464742709288683e-05, |
| "loss": 0.5955, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.2508591065292096, |
| "grad_norm": 0.8446688366988994, |
| "learning_rate": 7.460890803667283e-05, |
| "loss": 0.5939, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.2536082474226804, |
| "grad_norm": 1.0639580002350018, |
| "learning_rate": 7.457026089048445e-05, |
| "loss": 0.5839, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.2563573883161512, |
| "grad_norm": 1.1091140834360853, |
| "learning_rate": 7.453148579735743e-05, |
| "loss": 0.5829, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.259106529209622, |
| "grad_norm": 0.80462944337638, |
| "learning_rate": 7.449258290080112e-05, |
| "loss": 0.5845, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.2618556701030927, |
| "grad_norm": 0.8412084155727457, |
| "learning_rate": 7.445355234479783e-05, |
| "loss": 0.5767, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.2646048109965635, |
| "grad_norm": 0.6330472076770527, |
| "learning_rate": 7.441439427380235e-05, |
| "loss": 0.5816, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.2673539518900343, |
| "grad_norm": 0.597903788175847, |
| "learning_rate": 7.437510883274144e-05, |
| "loss": 0.5729, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.270103092783505, |
| "grad_norm": 0.4287714351702764, |
| "learning_rate": 7.433569616701324e-05, |
| "loss": 0.5932, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.2728522336769759, |
| "grad_norm": 0.47550714789032805, |
| "learning_rate": 7.429615642248677e-05, |
| "loss": 0.5929, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.2756013745704466, |
| "grad_norm": 0.5414468519874974, |
| "learning_rate": 7.425648974550138e-05, |
| "loss": 0.5774, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.2783505154639174, |
| "grad_norm": 0.5640877600721679, |
| "learning_rate": 7.421669628286617e-05, |
| "loss": 0.586, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.2810996563573882, |
| "grad_norm": 0.6048691932188638, |
| "learning_rate": 7.417677618185955e-05, |
| "loss": 0.597, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.283848797250859, |
| "grad_norm": 0.7508265029471074, |
| "learning_rate": 7.413672959022856e-05, |
| "loss": 0.5961, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.2865979381443298, |
| "grad_norm": 1.066042006928496, |
| "learning_rate": 7.409655665618843e-05, |
| "loss": 0.5837, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.2893470790378008, |
| "grad_norm": 1.4460442111626333, |
| "learning_rate": 7.405625752842198e-05, |
| "loss": 0.5771, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.2920962199312716, |
| "grad_norm": 0.40527253568707877, |
| "learning_rate": 7.401583235607908e-05, |
| "loss": 0.5769, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.2948453608247423, |
| "grad_norm": 0.8934936601794983, |
| "learning_rate": 7.397528128877611e-05, |
| "loss": 0.585, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.2975945017182131, |
| "grad_norm": 1.6705979909682174, |
| "learning_rate": 7.393460447659539e-05, |
| "loss": 0.5932, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.300343642611684, |
| "grad_norm": 0.4422750143504963, |
| "learning_rate": 7.389380207008462e-05, |
| "loss": 0.5878, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.3030927835051547, |
| "grad_norm": 1.5622517777634235, |
| "learning_rate": 7.385287422025635e-05, |
| "loss": 0.5895, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.3058419243986255, |
| "grad_norm": 0.6900852059622544, |
| "learning_rate": 7.381182107858738e-05, |
| "loss": 0.5883, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.3085910652920962, |
| "grad_norm": 0.9882097928385363, |
| "learning_rate": 7.377064279701827e-05, |
| "loss": 0.5894, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.311340206185567, |
| "grad_norm": 0.9430491893537721, |
| "learning_rate": 7.37293395279527e-05, |
| "loss": 0.5921, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.3140893470790378, |
| "grad_norm": 0.7682283056356898, |
| "learning_rate": 7.368791142425691e-05, |
| "loss": 0.5977, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.3168384879725086, |
| "grad_norm": 1.0703629836228026, |
| "learning_rate": 7.364635863925922e-05, |
| "loss": 0.5829, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.3195876288659794, |
| "grad_norm": 0.8976152595403926, |
| "learning_rate": 7.360468132674935e-05, |
| "loss": 0.5902, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.3223367697594501, |
| "grad_norm": 0.7540663375222781, |
| "learning_rate": 7.356287964097795e-05, |
| "loss": 0.588, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.325085910652921, |
| "grad_norm": 1.1037972218662753, |
| "learning_rate": 7.352095373665598e-05, |
| "loss": 0.5814, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.3278350515463917, |
| "grad_norm": 0.6721571418198156, |
| "learning_rate": 7.347890376895407e-05, |
| "loss": 0.586, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.3305841924398625, |
| "grad_norm": 0.6580601865881139, |
| "learning_rate": 7.343672989350214e-05, |
| "loss": 0.5726, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.9232629737168565, |
| "learning_rate": 7.33944322663886e-05, |
| "loss": 0.5841, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.3360824742268043, |
| "grad_norm": 0.921611093133066, |
| "learning_rate": 7.335201104415992e-05, |
| "loss": 0.5961, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.338831615120275, |
| "grad_norm": 0.7555439012244609, |
| "learning_rate": 7.330946638381998e-05, |
| "loss": 0.5877, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.3415807560137458, |
| "grad_norm": 0.8027350119891178, |
| "learning_rate": 7.326679844282953e-05, |
| "loss": 0.5917, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.3443298969072166, |
| "grad_norm": 1.1401383602299868, |
| "learning_rate": 7.322400737910558e-05, |
| "loss": 0.5852, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.3470790378006874, |
| "grad_norm": 0.805628145096471, |
| "learning_rate": 7.318109335102083e-05, |
| "loss": 0.5916, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.3498281786941582, |
| "grad_norm": 0.5776321913613638, |
| "learning_rate": 7.31380565174031e-05, |
| "loss": 0.5846, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.352577319587629, |
| "grad_norm": 0.730623963485602, |
| "learning_rate": 7.309489703753471e-05, |
| "loss": 0.5859, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.3553264604810997, |
| "grad_norm": 0.6579197440168587, |
| "learning_rate": 7.305161507115185e-05, |
| "loss": 0.5796, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.3580756013745705, |
| "grad_norm": 0.5630346265554919, |
| "learning_rate": 7.300821077844413e-05, |
| "loss": 0.5822, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.3608247422680413, |
| "grad_norm": 0.7908819829643783, |
| "learning_rate": 7.296468432005382e-05, |
| "loss": 0.5709, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.363573883161512, |
| "grad_norm": 1.1004873982746464, |
| "learning_rate": 7.292103585707541e-05, |
| "loss": 0.5923, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.3663230240549828, |
| "grad_norm": 1.0680631626760493, |
| "learning_rate": 7.287726555105485e-05, |
| "loss": 0.5932, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.3690721649484536, |
| "grad_norm": 0.9112494358591099, |
| "learning_rate": 7.283337356398911e-05, |
| "loss": 0.5785, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.3718213058419244, |
| "grad_norm": 1.0337260282053968, |
| "learning_rate": 7.278936005832549e-05, |
| "loss": 0.5882, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.3745704467353952, |
| "grad_norm": 1.3007806381700902, |
| "learning_rate": 7.274522519696102e-05, |
| "loss": 0.5798, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.377319587628866, |
| "grad_norm": 0.43519904375561846, |
| "learning_rate": 7.270096914324189e-05, |
| "loss": 0.5791, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.3800687285223368, |
| "grad_norm": 0.8544940867252295, |
| "learning_rate": 7.265659206096285e-05, |
| "loss": 0.5901, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.3828178694158075, |
| "grad_norm": 1.2429689674231412, |
| "learning_rate": 7.261209411436654e-05, |
| "loss": 0.5809, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.3855670103092783, |
| "grad_norm": 0.6308748621084995, |
| "learning_rate": 7.256747546814298e-05, |
| "loss": 0.5791, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.388316151202749, |
| "grad_norm": 0.7148204479546915, |
| "learning_rate": 7.252273628742885e-05, |
| "loss": 0.5796, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.3910652920962199, |
| "grad_norm": 0.9925558904292439, |
| "learning_rate": 7.2477876737807e-05, |
| "loss": 0.5858, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.3938144329896907, |
| "grad_norm": 0.8578115038769086, |
| "learning_rate": 7.243289698530572e-05, |
| "loss": 0.5845, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.3965635738831614, |
| "grad_norm": 0.7229862425355326, |
| "learning_rate": 7.23877971963982e-05, |
| "loss": 0.5755, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.3993127147766322, |
| "grad_norm": 0.8939365007653768, |
| "learning_rate": 7.234257753800191e-05, |
| "loss": 0.5875, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.402061855670103, |
| "grad_norm": 0.9280862590900643, |
| "learning_rate": 7.229723817747793e-05, |
| "loss": 0.5916, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.4048109965635738, |
| "grad_norm": 0.7722106893797273, |
| "learning_rate": 7.225177928263042e-05, |
| "loss": 0.5817, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.4075601374570446, |
| "grad_norm": 0.5590378429645152, |
| "learning_rate": 7.220620102170585e-05, |
| "loss": 0.5804, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.4103092783505153, |
| "grad_norm": 0.5415497371424565, |
| "learning_rate": 7.21605035633926e-05, |
| "loss": 0.5829, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.4130584192439861, |
| "grad_norm": 0.6720235507203421, |
| "learning_rate": 7.21146870768201e-05, |
| "loss": 0.5818, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.4158075601374571, |
| "grad_norm": 0.5829950782167737, |
| "learning_rate": 7.206875173155834e-05, |
| "loss": 0.5922, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.418556701030928, |
| "grad_norm": 0.3621229457523989, |
| "learning_rate": 7.202269769761726e-05, |
| "loss": 0.5842, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.4213058419243987, |
| "grad_norm": 0.5023031289705603, |
| "learning_rate": 7.197652514544601e-05, |
| "loss": 0.576, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.4240549828178695, |
| "grad_norm": 0.6099312033435136, |
| "learning_rate": 7.193023424593245e-05, |
| "loss": 0.5795, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.4268041237113402, |
| "grad_norm": 0.5933868127441155, |
| "learning_rate": 7.188382517040237e-05, |
| "loss": 0.5736, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.429553264604811, |
| "grad_norm": 0.582367928707779, |
| "learning_rate": 7.183729809061904e-05, |
| "loss": 0.5794, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.4323024054982818, |
| "grad_norm": 0.7208839572689155, |
| "learning_rate": 7.179065317878237e-05, |
| "loss": 0.587, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.4350515463917526, |
| "grad_norm": 1.0049567135040567, |
| "learning_rate": 7.174389060752845e-05, |
| "loss": 0.5863, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.4378006872852234, |
| "grad_norm": 1.3116471278229, |
| "learning_rate": 7.16970105499288e-05, |
| "loss": 0.5873, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.4405498281786941, |
| "grad_norm": 0.7052150815292837, |
| "learning_rate": 7.165001317948976e-05, |
| "loss": 0.5854, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.443298969072165, |
| "grad_norm": 0.6674498164029434, |
| "learning_rate": 7.160289867015187e-05, |
| "loss": 0.58, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.4460481099656357, |
| "grad_norm": 0.6479049706905784, |
| "learning_rate": 7.155566719628921e-05, |
| "loss": 0.583, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.4487972508591065, |
| "grad_norm": 0.6003355447355602, |
| "learning_rate": 7.150831893270874e-05, |
| "loss": 0.5792, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.4515463917525773, |
| "grad_norm": 0.7215636928866146, |
| "learning_rate": 7.146085405464968e-05, |
| "loss": 0.5746, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.454295532646048, |
| "grad_norm": 0.6022527234482411, |
| "learning_rate": 7.141327273778284e-05, |
| "loss": 0.5808, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.4570446735395188, |
| "grad_norm": 0.44820302089966574, |
| "learning_rate": 7.136557515820999e-05, |
| "loss": 0.5745, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.4597938144329896, |
| "grad_norm": 0.42351586228242205, |
| "learning_rate": 7.13177614924632e-05, |
| "loss": 0.5858, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.4625429553264606, |
| "grad_norm": 0.4466996033955547, |
| "learning_rate": 7.126983191750412e-05, |
| "loss": 0.5746, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.4652920962199314, |
| "grad_norm": 0.40737004869624294, |
| "learning_rate": 7.12217866107235e-05, |
| "loss": 0.5818, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.4680412371134022, |
| "grad_norm": 0.47191169515150533, |
| "learning_rate": 7.11736257499403e-05, |
| "loss": 0.5809, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.470790378006873, |
| "grad_norm": 0.4813204578717719, |
| "learning_rate": 7.112534951340126e-05, |
| "loss": 0.5844, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.4735395189003437, |
| "grad_norm": 0.5549078938525966, |
| "learning_rate": 7.107695807978007e-05, |
| "loss": 0.5812, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.4762886597938145, |
| "grad_norm": 0.6761669063164707, |
| "learning_rate": 7.10284516281768e-05, |
| "loss": 0.5766, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.4790378006872853, |
| "grad_norm": 0.6480701339305898, |
| "learning_rate": 7.097983033811718e-05, |
| "loss": 0.5805, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.481786941580756, |
| "grad_norm": 0.5593193077179679, |
| "learning_rate": 7.0931094389552e-05, |
| "loss": 0.575, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.4845360824742269, |
| "grad_norm": 0.4942110243815134, |
| "learning_rate": 7.088224396285638e-05, |
| "loss": 0.5833, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.4872852233676976, |
| "grad_norm": 0.5797979347815979, |
| "learning_rate": 7.083327923882917e-05, |
| "loss": 0.5747, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.4900343642611684, |
| "grad_norm": 0.9315007373980465, |
| "learning_rate": 7.078420039869218e-05, |
| "loss": 0.592, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.4927835051546392, |
| "grad_norm": 1.616429951355777, |
| "learning_rate": 7.073500762408964e-05, |
| "loss": 0.5944, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.49553264604811, |
| "grad_norm": 0.5055431608140836, |
| "learning_rate": 7.068570109708741e-05, |
| "loss": 0.5901, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.4982817869415808, |
| "grad_norm": 1.0997422339745508, |
| "learning_rate": 7.06362810001724e-05, |
| "loss": 0.5902, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.5010309278350515, |
| "grad_norm": 1.7657567775705625, |
| "learning_rate": 7.05867475162518e-05, |
| "loss": 0.5885, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.5037800687285223, |
| "grad_norm": 0.7356302747726631, |
| "learning_rate": 7.053710082865247e-05, |
| "loss": 0.5861, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.506529209621993, |
| "grad_norm": 2.1759109547514943, |
| "learning_rate": 7.048734112112026e-05, |
| "loss": 0.5997, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.5092783505154639, |
| "grad_norm": 1.2978110027459113, |
| "learning_rate": 7.043746857781933e-05, |
| "loss": 0.6157, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.5120274914089347, |
| "grad_norm": 2.219306449832288, |
| "learning_rate": 7.03874833833314e-05, |
| "loss": 0.607, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.5147766323024054, |
| "grad_norm": 1.9591339401200079, |
| "learning_rate": 7.033738572265517e-05, |
| "loss": 0.6369, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.5175257731958762, |
| "grad_norm": 1.478246394702369, |
| "learning_rate": 7.028717578120555e-05, |
| "loss": 0.6063, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.520274914089347, |
| "grad_norm": 1.2579901505180804, |
| "learning_rate": 7.023685374481301e-05, |
| "loss": 0.6019, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.5230240549828178, |
| "grad_norm": 1.46105914035749, |
| "learning_rate": 7.018641979972295e-05, |
| "loss": 0.6094, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.5257731958762886, |
| "grad_norm": 0.8090152261985227, |
| "learning_rate": 7.013587413259486e-05, |
| "loss": 0.5913, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.5285223367697593, |
| "grad_norm": 1.2609062811324916, |
| "learning_rate": 7.008521693050179e-05, |
| "loss": 0.5856, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.5312714776632301, |
| "grad_norm": 1.0043747397077383, |
| "learning_rate": 7.003444838092957e-05, |
| "loss": 0.5989, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.534020618556701, |
| "grad_norm": 0.7445889690645772, |
| "learning_rate": 6.998356867177613e-05, |
| "loss": 0.5865, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.5367697594501717, |
| "grad_norm": 1.2566147555053582, |
| "learning_rate": 6.993257799135078e-05, |
| "loss": 0.5855, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.5395189003436425, |
| "grad_norm": 0.623650378420435, |
| "learning_rate": 6.988147652837362e-05, |
| "loss": 0.5931, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.5422680412371133, |
| "grad_norm": 1.0346995401552566, |
| "learning_rate": 6.983026447197469e-05, |
| "loss": 0.5914, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.545017182130584, |
| "grad_norm": 0.9620047133783202, |
| "learning_rate": 6.977894201169336e-05, |
| "loss": 0.5915, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.547766323024055, |
| "grad_norm": 0.6073721542597159, |
| "learning_rate": 6.972750933747765e-05, |
| "loss": 0.5834, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.5505154639175258, |
| "grad_norm": 0.9240637148715215, |
| "learning_rate": 6.967596663968346e-05, |
| "loss": 0.5891, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.5532646048109966, |
| "grad_norm": 0.7729700501754245, |
| "learning_rate": 6.962431410907387e-05, |
| "loss": 0.5864, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.5560137457044674, |
| "grad_norm": 0.7023186108460778, |
| "learning_rate": 6.957255193681852e-05, |
| "loss": 0.5792, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.5587628865979382, |
| "grad_norm": 0.8156415963129797, |
| "learning_rate": 6.952068031449278e-05, |
| "loss": 0.5804, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.561512027491409, |
| "grad_norm": 0.4418458963414597, |
| "learning_rate": 6.946869943407714e-05, |
| "loss": 0.5796, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.5642611683848797, |
| "grad_norm": 0.7517403935755494, |
| "learning_rate": 6.941660948795646e-05, |
| "loss": 0.5757, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.5670103092783505, |
| "grad_norm": 0.6880319452283528, |
| "learning_rate": 6.936441066891924e-05, |
| "loss": 0.5866, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.5697594501718213, |
| "grad_norm": 0.44431908466865444, |
| "learning_rate": 6.931210317015694e-05, |
| "loss": 0.5831, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.572508591065292, |
| "grad_norm": 0.5206213485516508, |
| "learning_rate": 6.925968718526325e-05, |
| "loss": 0.5835, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.5752577319587628, |
| "grad_norm": 0.4865149316480254, |
| "learning_rate": 6.920716290823337e-05, |
| "loss": 0.5872, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.5780068728522336, |
| "grad_norm": 0.45702506236242896, |
| "learning_rate": 6.915453053346332e-05, |
| "loss": 0.5918, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.5807560137457046, |
| "grad_norm": 0.44494079428514854, |
| "learning_rate": 6.910179025574916e-05, |
| "loss": 0.5889, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.5835051546391754, |
| "grad_norm": 0.40161179223147797, |
| "learning_rate": 6.904894227028631e-05, |
| "loss": 0.5793, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.5862542955326462, |
| "grad_norm": 0.40020351103635327, |
| "learning_rate": 6.899598677266888e-05, |
| "loss": 0.5791, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.589003436426117, |
| "grad_norm": 0.5533864087207034, |
| "learning_rate": 6.89429239588888e-05, |
| "loss": 0.5686, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.5917525773195877, |
| "grad_norm": 0.4609556358868059, |
| "learning_rate": 6.888975402533528e-05, |
| "loss": 0.5696, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.5945017182130585, |
| "grad_norm": 0.376485016123922, |
| "learning_rate": 6.883647716879389e-05, |
| "loss": 0.5787, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.5972508591065293, |
| "grad_norm": 0.3703464382281221, |
| "learning_rate": 6.878309358644602e-05, |
| "loss": 0.5786, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.36242738510972944, |
| "learning_rate": 6.8729603475868e-05, |
| "loss": 0.5838, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.6027491408934709, |
| "grad_norm": 0.4005050809946878, |
| "learning_rate": 6.867600703503044e-05, |
| "loss": 0.5828, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.6054982817869417, |
| "grad_norm": 0.42329454686286444, |
| "learning_rate": 6.86223044622975e-05, |
| "loss": 0.5803, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.6082474226804124, |
| "grad_norm": 0.3329212652227201, |
| "learning_rate": 6.856849595642617e-05, |
| "loss": 0.5822, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.6109965635738832, |
| "grad_norm": 0.46886870029058014, |
| "learning_rate": 6.851458171656541e-05, |
| "loss": 0.58, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.613745704467354, |
| "grad_norm": 0.5556659975442035, |
| "learning_rate": 6.846056194225562e-05, |
| "loss": 0.5702, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.6164948453608248, |
| "grad_norm": 0.5260721900968394, |
| "learning_rate": 6.84064368334277e-05, |
| "loss": 0.5867, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.6192439862542956, |
| "grad_norm": 0.40786759602060935, |
| "learning_rate": 6.835220659040246e-05, |
| "loss": 0.5834, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.6219931271477663, |
| "grad_norm": 0.2996276604956069, |
| "learning_rate": 6.82978714138898e-05, |
| "loss": 0.5776, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.6247422680412371, |
| "grad_norm": 0.35270615435100505, |
| "learning_rate": 6.824343150498798e-05, |
| "loss": 0.5783, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.627491408934708, |
| "grad_norm": 0.28376863619687237, |
| "learning_rate": 6.81888870651829e-05, |
| "loss": 0.5767, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.6302405498281787, |
| "grad_norm": 0.36443318149486764, |
| "learning_rate": 6.813423829634732e-05, |
| "loss": 0.579, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.6329896907216495, |
| "grad_norm": 0.481183239710988, |
| "learning_rate": 6.80794854007401e-05, |
| "loss": 0.5815, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.6357388316151202, |
| "grad_norm": 0.4948913070938149, |
| "learning_rate": 6.802462858100556e-05, |
| "loss": 0.5847, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.638487972508591, |
| "grad_norm": 0.3815071178544923, |
| "learning_rate": 6.796966804017258e-05, |
| "loss": 0.5724, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.6412371134020618, |
| "grad_norm": 0.4407973877140981, |
| "learning_rate": 6.791460398165393e-05, |
| "loss": 0.5811, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.6439862542955326, |
| "grad_norm": 0.43391575615156724, |
| "learning_rate": 6.785943660924553e-05, |
| "loss": 0.5827, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.6467353951890034, |
| "grad_norm": 0.4035737413547384, |
| "learning_rate": 6.780416612712568e-05, |
| "loss": 0.5819, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.6494845360824741, |
| "grad_norm": 0.5281104884380362, |
| "learning_rate": 6.774879273985423e-05, |
| "loss": 0.5775, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.652233676975945, |
| "grad_norm": 0.7760361977252223, |
| "learning_rate": 6.769331665237198e-05, |
| "loss": 0.5899, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.6549828178694157, |
| "grad_norm": 1.09403271674242, |
| "learning_rate": 6.763773806999977e-05, |
| "loss": 0.5859, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.6577319587628865, |
| "grad_norm": 0.9348914419022682, |
| "learning_rate": 6.75820571984378e-05, |
| "loss": 0.5918, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.6604810996563573, |
| "grad_norm": 0.7435836588260047, |
| "learning_rate": 6.752627424376484e-05, |
| "loss": 0.582, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.663230240549828, |
| "grad_norm": 0.5416202424064285, |
| "learning_rate": 6.747038941243748e-05, |
| "loss": 0.5845, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.6659793814432988, |
| "grad_norm": 0.4289104135369005, |
| "learning_rate": 6.741440291128938e-05, |
| "loss": 0.5841, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.6687285223367696, |
| "grad_norm": 0.3842777168497921, |
| "learning_rate": 6.735831494753046e-05, |
| "loss": 0.5783, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.6714776632302404, |
| "grad_norm": 0.3676465027204242, |
| "learning_rate": 6.730212572874618e-05, |
| "loss": 0.5708, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.6742268041237114, |
| "grad_norm": 0.3960487648888393, |
| "learning_rate": 6.724583546289672e-05, |
| "loss": 0.5724, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.6769759450171822, |
| "grad_norm": 0.4083333422479102, |
| "learning_rate": 6.71894443583163e-05, |
| "loss": 0.5925, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.679725085910653, |
| "grad_norm": 0.4578552218287685, |
| "learning_rate": 6.713295262371232e-05, |
| "loss": 0.5715, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.6824742268041237, |
| "grad_norm": 0.5656117537132986, |
| "learning_rate": 6.707636046816457e-05, |
| "loss": 0.5766, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.6852233676975945, |
| "grad_norm": 0.6850771753028508, |
| "learning_rate": 6.70196681011246e-05, |
| "loss": 0.5928, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.6879725085910653, |
| "grad_norm": 0.698725362743093, |
| "learning_rate": 6.696287573241478e-05, |
| "loss": 0.5897, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.690721649484536, |
| "grad_norm": 0.7382677867943621, |
| "learning_rate": 6.690598357222764e-05, |
| "loss": 0.5668, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.6934707903780069, |
| "grad_norm": 0.7105270810835767, |
| "learning_rate": 6.684899183112498e-05, |
| "loss": 0.5736, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.6962199312714776, |
| "grad_norm": 0.6862323082435818, |
| "learning_rate": 6.679190072003722e-05, |
| "loss": 0.5765, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.6989690721649484, |
| "grad_norm": 0.6299104085981699, |
| "learning_rate": 6.67347104502625e-05, |
| "loss": 0.5642, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.7017182130584192, |
| "grad_norm": 0.6026002947844302, |
| "learning_rate": 6.667742123346603e-05, |
| "loss": 0.5804, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.7044673539518902, |
| "grad_norm": 0.4462813007489562, |
| "learning_rate": 6.662003328167917e-05, |
| "loss": 0.5807, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.707216494845361, |
| "grad_norm": 0.29472463703813456, |
| "learning_rate": 6.65625468072987e-05, |
| "loss": 0.5702, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.7099656357388318, |
| "grad_norm": 0.320893033856143, |
| "learning_rate": 6.650496202308607e-05, |
| "loss": 0.5835, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.7127147766323025, |
| "grad_norm": 0.39010773443389357, |
| "learning_rate": 6.644727914216656e-05, |
| "loss": 0.576, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.7154639175257733, |
| "grad_norm": 0.5319160826521161, |
| "learning_rate": 6.638949837802855e-05, |
| "loss": 0.5798, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.718213058419244, |
| "grad_norm": 0.6498383386047935, |
| "learning_rate": 6.633161994452262e-05, |
| "loss": 0.5795, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.7209621993127149, |
| "grad_norm": 0.7259267311718851, |
| "learning_rate": 6.627364405586091e-05, |
| "loss": 0.5919, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.7237113402061857, |
| "grad_norm": 0.8162977870752441, |
| "learning_rate": 6.621557092661621e-05, |
| "loss": 0.5753, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.7264604810996564, |
| "grad_norm": 0.7728017259091454, |
| "learning_rate": 6.61574007717212e-05, |
| "loss": 0.5843, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.7292096219931272, |
| "grad_norm": 0.7443225127869316, |
| "learning_rate": 6.609913380646767e-05, |
| "loss": 0.5802, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.731958762886598, |
| "grad_norm": 0.7541481509168282, |
| "learning_rate": 6.604077024650571e-05, |
| "loss": 0.5841, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.7347079037800688, |
| "grad_norm": 0.6874877805251565, |
| "learning_rate": 6.598231030784289e-05, |
| "loss": 0.5861, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.7374570446735396, |
| "grad_norm": 0.6302784745574853, |
| "learning_rate": 6.592375420684354e-05, |
| "loss": 0.579, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.7402061855670103, |
| "grad_norm": 0.3605005517279551, |
| "learning_rate": 6.586510216022785e-05, |
| "loss": 0.5761, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.7429553264604811, |
| "grad_norm": 0.40851626295602705, |
| "learning_rate": 6.58063543850711e-05, |
| "loss": 0.5809, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.745704467353952, |
| "grad_norm": 0.5889121925134353, |
| "learning_rate": 6.574751109880295e-05, |
| "loss": 0.589, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.7484536082474227, |
| "grad_norm": 0.6855740977784578, |
| "learning_rate": 6.568857251920641e-05, |
| "loss": 0.5791, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.7512027491408935, |
| "grad_norm": 0.7763253623405283, |
| "learning_rate": 6.562953886441731e-05, |
| "loss": 0.5698, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.7539518900343642, |
| "grad_norm": 0.8027574724353924, |
| "learning_rate": 6.557041035292331e-05, |
| "loss": 0.5695, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.756701030927835, |
| "grad_norm": 0.8124682042384597, |
| "learning_rate": 6.551118720356313e-05, |
| "loss": 0.5793, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.7594501718213058, |
| "grad_norm": 0.7285676570172558, |
| "learning_rate": 6.545186963552578e-05, |
| "loss": 0.5809, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.7621993127147766, |
| "grad_norm": 0.5493034420656492, |
| "learning_rate": 6.539245786834965e-05, |
| "loss": 0.5779, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.7649484536082474, |
| "grad_norm": 0.4100237013175215, |
| "learning_rate": 6.533295212192189e-05, |
| "loss": 0.5634, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.7676975945017182, |
| "grad_norm": 0.44386016701061204, |
| "learning_rate": 6.527335261647735e-05, |
| "loss": 0.5865, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.770446735395189, |
| "grad_norm": 0.7605693123034108, |
| "learning_rate": 6.521365957259798e-05, |
| "loss": 0.5758, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.7731958762886597, |
| "grad_norm": 0.9531489941794372, |
| "learning_rate": 6.515387321121185e-05, |
| "loss": 0.5845, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.7759450171821305, |
| "grad_norm": 0.9748291614843003, |
| "learning_rate": 6.509399375359247e-05, |
| "loss": 0.5719, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.7786941580756013, |
| "grad_norm": 0.7628578645914652, |
| "learning_rate": 6.503402142135787e-05, |
| "loss": 0.5768, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.781443298969072, |
| "grad_norm": 0.5799935111185708, |
| "learning_rate": 6.497395643646981e-05, |
| "loss": 0.5701, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.7841924398625428, |
| "grad_norm": 0.5277145245316653, |
| "learning_rate": 6.491379902123297e-05, |
| "loss": 0.5683, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.7869415807560136, |
| "grad_norm": 0.4736528081812741, |
| "learning_rate": 6.485354939829414e-05, |
| "loss": 0.5779, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.7896907216494844, |
| "grad_norm": 0.5669836094412534, |
| "learning_rate": 6.479320779064134e-05, |
| "loss": 0.5764, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.7924398625429552, |
| "grad_norm": 0.559780538593531, |
| "learning_rate": 6.473277442160309e-05, |
| "loss": 0.5832, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.795189003436426, |
| "grad_norm": 0.5065885370576576, |
| "learning_rate": 6.467224951484746e-05, |
| "loss": 0.5783, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.797938144329897, |
| "grad_norm": 0.4559459768927469, |
| "learning_rate": 6.461163329438135e-05, |
| "loss": 0.5715, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.8006872852233677, |
| "grad_norm": 0.42548224770937315, |
| "learning_rate": 6.45509259845496e-05, |
| "loss": 0.5751, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.8034364261168385, |
| "grad_norm": 0.3455007524085001, |
| "learning_rate": 6.449012781003419e-05, |
| "loss": 0.5754, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.8061855670103093, |
| "grad_norm": 0.4400225976066889, |
| "learning_rate": 6.442923899585338e-05, |
| "loss": 0.5941, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.80893470790378, |
| "grad_norm": 0.5084943205879741, |
| "learning_rate": 6.436825976736092e-05, |
| "loss": 0.579, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.8116838487972509, |
| "grad_norm": 0.5358918155905726, |
| "learning_rate": 6.430719035024515e-05, |
| "loss": 0.5748, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.8144329896907216, |
| "grad_norm": 0.503047756943303, |
| "learning_rate": 6.424603097052826e-05, |
| "loss": 0.5794, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.8171821305841924, |
| "grad_norm": 0.44321729048134617, |
| "learning_rate": 6.418478185456534e-05, |
| "loss": 0.5865, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.8199312714776632, |
| "grad_norm": 0.3737976145350619, |
| "learning_rate": 6.412344322904362e-05, |
| "loss": 0.5762, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.822680412371134, |
| "grad_norm": 0.2731612660975183, |
| "learning_rate": 6.406201532098165e-05, |
| "loss": 0.5691, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.8254295532646048, |
| "grad_norm": 0.3311008105208644, |
| "learning_rate": 6.400049835772836e-05, |
| "loss": 0.5729, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.8281786941580758, |
| "grad_norm": 0.3627533269491876, |
| "learning_rate": 6.393889256696231e-05, |
| "loss": 0.5729, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.8309278350515465, |
| "grad_norm": 0.3316310187075983, |
| "learning_rate": 6.387719817669081e-05, |
| "loss": 0.582, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.8336769759450173, |
| "grad_norm": 0.3431221379909935, |
| "learning_rate": 6.381541541524911e-05, |
| "loss": 0.5775, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.836426116838488, |
| "grad_norm": 0.308724522020036, |
| "learning_rate": 6.375354451129949e-05, |
| "loss": 0.5759, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.839175257731959, |
| "grad_norm": 0.2799060976007718, |
| "learning_rate": 6.369158569383045e-05, |
| "loss": 0.5826, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.8419243986254297, |
| "grad_norm": 0.3820386705421886, |
| "learning_rate": 6.362953919215591e-05, |
| "loss": 0.5778, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.8446735395189005, |
| "grad_norm": 0.38246444479750036, |
| "learning_rate": 6.356740523591427e-05, |
| "loss": 0.5749, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.8474226804123712, |
| "grad_norm": 0.3271098200229515, |
| "learning_rate": 6.350518405506764e-05, |
| "loss": 0.5817, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.850171821305842, |
| "grad_norm": 0.34228419827242806, |
| "learning_rate": 6.344287587990091e-05, |
| "loss": 0.5819, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.8529209621993128, |
| "grad_norm": 0.32655548951800334, |
| "learning_rate": 6.338048094102096e-05, |
| "loss": 0.5749, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.8556701030927836, |
| "grad_norm": 0.32871268032009765, |
| "learning_rate": 6.331799946935583e-05, |
| "loss": 0.5685, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.8584192439862544, |
| "grad_norm": 0.4199684721453658, |
| "learning_rate": 6.325543169615378e-05, |
| "loss": 0.5725, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.8611683848797251, |
| "grad_norm": 0.5756048364369786, |
| "learning_rate": 6.319277785298247e-05, |
| "loss": 0.5805, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.863917525773196, |
| "grad_norm": 0.6547196962591697, |
| "learning_rate": 6.313003817172812e-05, |
| "loss": 0.5662, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.8666666666666667, |
| "grad_norm": 0.7656825534443144, |
| "learning_rate": 6.30672128845947e-05, |
| "loss": 0.5864, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.8694158075601375, |
| "grad_norm": 0.9358535366007116, |
| "learning_rate": 6.300430222410292e-05, |
| "loss": 0.5798, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.8721649484536083, |
| "grad_norm": 0.9703236167568001, |
| "learning_rate": 6.294130642308952e-05, |
| "loss": 0.5752, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.874914089347079, |
| "grad_norm": 0.8892433252848201, |
| "learning_rate": 6.287822571470636e-05, |
| "loss": 0.5709, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.8776632302405498, |
| "grad_norm": 0.8810114036796293, |
| "learning_rate": 6.281506033241948e-05, |
| "loss": 0.5733, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.8804123711340206, |
| "grad_norm": 0.853242199524319, |
| "learning_rate": 6.275181051000842e-05, |
| "loss": 0.5758, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.8831615120274914, |
| "grad_norm": 0.6715868178268443, |
| "learning_rate": 6.26884764815651e-05, |
| "loss": 0.5681, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.8859106529209622, |
| "grad_norm": 0.41927983624654164, |
| "learning_rate": 6.26250584814932e-05, |
| "loss": 0.5706, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.888659793814433, |
| "grad_norm": 0.35123714928190247, |
| "learning_rate": 6.256155674450712e-05, |
| "loss": 0.5742, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.8914089347079037, |
| "grad_norm": 0.5653996664319825, |
| "learning_rate": 6.249797150563124e-05, |
| "loss": 0.5731, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.8941580756013745, |
| "grad_norm": 0.6649259671430193, |
| "learning_rate": 6.243430300019891e-05, |
| "loss": 0.5797, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.8969072164948453, |
| "grad_norm": 0.60181480524739, |
| "learning_rate": 6.237055146385173e-05, |
| "loss": 0.5758, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.899656357388316, |
| "grad_norm": 0.6235277209689051, |
| "learning_rate": 6.230671713253854e-05, |
| "loss": 0.5778, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.9024054982817868, |
| "grad_norm": 0.6196187849528243, |
| "learning_rate": 6.224280024251466e-05, |
| "loss": 0.5698, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.9051546391752576, |
| "grad_norm": 0.42709196066527666, |
| "learning_rate": 6.217880103034091e-05, |
| "loss": 0.5736, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.9079037800687284, |
| "grad_norm": 0.34692776304267553, |
| "learning_rate": 6.211471973288285e-05, |
| "loss": 0.5712, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.9106529209621992, |
| "grad_norm": 0.3110542487628964, |
| "learning_rate": 6.205055658730983e-05, |
| "loss": 0.5653, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.91340206185567, |
| "grad_norm": 0.34540933819310177, |
| "learning_rate": 6.198631183109408e-05, |
| "loss": 0.576, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.9161512027491407, |
| "grad_norm": 0.3126265820113025, |
| "learning_rate": 6.192198570200992e-05, |
| "loss": 0.5906, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.9189003436426115, |
| "grad_norm": 0.29077210119155705, |
| "learning_rate": 6.185757843813282e-05, |
| "loss": 0.5799, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.9216494845360823, |
| "grad_norm": 0.43001232514386034, |
| "learning_rate": 6.179309027783856e-05, |
| "loss": 0.5784, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.9243986254295533, |
| "grad_norm": 0.4962679375465972, |
| "learning_rate": 6.172852145980228e-05, |
| "loss": 0.5731, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.927147766323024, |
| "grad_norm": 0.5311714605519653, |
| "learning_rate": 6.166387222299767e-05, |
| "loss": 0.5812, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.9298969072164949, |
| "grad_norm": 0.586368926903193, |
| "learning_rate": 6.159914280669607e-05, |
| "loss": 0.5638, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.9326460481099657, |
| "grad_norm": 0.6549081294272183, |
| "learning_rate": 6.153433345046552e-05, |
| "loss": 0.5785, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.9353951890034364, |
| "grad_norm": 0.7252681437363886, |
| "learning_rate": 6.146944439416997e-05, |
| "loss": 0.5751, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.9381443298969072, |
| "grad_norm": 0.7845460102188683, |
| "learning_rate": 6.140447587796832e-05, |
| "loss": 0.5646, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.940893470790378, |
| "grad_norm": 0.8048678290944055, |
| "learning_rate": 6.133942814231357e-05, |
| "loss": 0.5742, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.9436426116838488, |
| "grad_norm": 0.7734169093084938, |
| "learning_rate": 6.127430142795192e-05, |
| "loss": 0.5876, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.9463917525773196, |
| "grad_norm": 0.5484007054312127, |
| "learning_rate": 6.120909597592185e-05, |
| "loss": 0.5843, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.9491408934707903, |
| "grad_norm": 0.3242608423209931, |
| "learning_rate": 6.114381202755328e-05, |
| "loss": 0.5654, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.9518900343642611, |
| "grad_norm": 0.3539205469330434, |
| "learning_rate": 6.107844982446662e-05, |
| "loss": 0.5833, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.9546391752577321, |
| "grad_norm": 0.5156015064072683, |
| "learning_rate": 6.1013009608571954e-05, |
| "loss": 0.583, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.957388316151203, |
| "grad_norm": 0.6131503596319059, |
| "learning_rate": 6.094749162206807e-05, |
| "loss": 0.5742, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.9601374570446737, |
| "grad_norm": 0.6333732757531064, |
| "learning_rate": 6.0881896107441575e-05, |
| "loss": 0.5751, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.9628865979381445, |
| "grad_norm": 0.456390717989884, |
| "learning_rate": 6.081622330746603e-05, |
| "loss": 0.5844, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.9656357388316152, |
| "grad_norm": 0.41591397010146025, |
| "learning_rate": 6.075047346520105e-05, |
| "loss": 0.5756, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.968384879725086, |
| "grad_norm": 0.5054704227601636, |
| "learning_rate": 6.0684646823991345e-05, |
| "loss": 0.5688, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.9711340206185568, |
| "grad_norm": 0.6526494286710874, |
| "learning_rate": 6.061874362746592e-05, |
| "loss": 0.568, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.9738831615120276, |
| "grad_norm": 0.7155431076783062, |
| "learning_rate": 6.055276411953705e-05, |
| "loss": 0.5746, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.9766323024054984, |
| "grad_norm": 0.5467249394073967, |
| "learning_rate": 6.048670854439952e-05, |
| "loss": 0.5793, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.9793814432989691, |
| "grad_norm": 0.44545561246216775, |
| "learning_rate": 6.042057714652958e-05, |
| "loss": 0.5793, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.98213058419244, |
| "grad_norm": 0.43668565117704844, |
| "learning_rate": 6.035437017068413e-05, |
| "loss": 0.5711, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.9848797250859107, |
| "grad_norm": 0.4794911786968242, |
| "learning_rate": 6.02880878618998e-05, |
| "loss": 0.5764, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.9876288659793815, |
| "grad_norm": 0.45936527802619287, |
| "learning_rate": 6.022173046549201e-05, |
| "loss": 0.5728, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.9903780068728523, |
| "grad_norm": 0.3710590792543285, |
| "learning_rate": 6.0155298227054125e-05, |
| "loss": 0.5764, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.993127147766323, |
| "grad_norm": 0.3684226778884953, |
| "learning_rate": 6.008879139245645e-05, |
| "loss": 0.5711, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.9958762886597938, |
| "grad_norm": 0.37506727135987533, |
| "learning_rate": 6.00222102078454e-05, |
| "loss": 0.5655, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.9986254295532646, |
| "grad_norm": 0.42203860157012285, |
| "learning_rate": 5.995555491964259e-05, |
| "loss": 0.6314, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.0013745704467354, |
| "grad_norm": 0.6121901786626351, |
| "learning_rate": 5.988882577454386e-05, |
| "loss": 0.6437, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.004123711340206, |
| "grad_norm": 0.838029390831891, |
| "learning_rate": 5.982202301951841e-05, |
| "loss": 0.5246, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.006872852233677, |
| "grad_norm": 1.0035111815398425, |
| "learning_rate": 5.97551469018079e-05, |
| "loss": 0.532, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.0096219931271477, |
| "grad_norm": 1.0550172252909944, |
| "learning_rate": 5.968819766892546e-05, |
| "loss": 0.5356, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.0123711340206185, |
| "grad_norm": 0.7212990022902317, |
| "learning_rate": 5.962117556865489e-05, |
| "loss": 0.5149, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.0151202749140893, |
| "grad_norm": 0.5084983736290086, |
| "learning_rate": 5.955408084904962e-05, |
| "loss": 0.5248, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.01786941580756, |
| "grad_norm": 0.508857242926003, |
| "learning_rate": 5.948691375843187e-05, |
| "loss": 0.5309, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.020618556701031, |
| "grad_norm": 0.5715174569632097, |
| "learning_rate": 5.941967454539171e-05, |
| "loss": 0.5247, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.0233676975945016, |
| "grad_norm": 0.5565322020849539, |
| "learning_rate": 5.935236345878616e-05, |
| "loss": 0.5175, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.0261168384879724, |
| "grad_norm": 0.5745604042771598, |
| "learning_rate": 5.928498074773823e-05, |
| "loss": 0.517, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.028865979381443, |
| "grad_norm": 0.500350720005458, |
| "learning_rate": 5.921752666163598e-05, |
| "loss": 0.5215, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.031615120274914, |
| "grad_norm": 0.4067063681679997, |
| "learning_rate": 5.9150001450131716e-05, |
| "loss": 0.5212, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.0343642611683848, |
| "grad_norm": 0.4770810449857095, |
| "learning_rate": 5.9082405363140916e-05, |
| "loss": 0.521, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.0371134020618555, |
| "grad_norm": 0.5487285643494075, |
| "learning_rate": 5.901473865084141e-05, |
| "loss": 0.5268, |
| "step": 741 |
| }, |
| { |
| "epoch": 2.0398625429553263, |
| "grad_norm": 0.4395320063115494, |
| "learning_rate": 5.894700156367238e-05, |
| "loss": 0.5243, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.042611683848797, |
| "grad_norm": 0.29982074556969307, |
| "learning_rate": 5.887919435233352e-05, |
| "loss": 0.5298, |
| "step": 743 |
| }, |
| { |
| "epoch": 2.045360824742268, |
| "grad_norm": 0.41119709116602937, |
| "learning_rate": 5.881131726778402e-05, |
| "loss": 0.5145, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.0481099656357387, |
| "grad_norm": 0.384320461752947, |
| "learning_rate": 5.8743370561241715e-05, |
| "loss": 0.5253, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.0508591065292094, |
| "grad_norm": 0.3420792544611106, |
| "learning_rate": 5.867535448418203e-05, |
| "loss": 0.5183, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.05360824742268, |
| "grad_norm": 0.36905823023095596, |
| "learning_rate": 5.8607269288337244e-05, |
| "loss": 0.5208, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.056357388316151, |
| "grad_norm": 0.32511574569133067, |
| "learning_rate": 5.853911522569538e-05, |
| "loss": 0.5252, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.059106529209622, |
| "grad_norm": 0.42000247500887966, |
| "learning_rate": 5.8470892548499384e-05, |
| "loss": 0.5182, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.0618556701030926, |
| "grad_norm": 0.5749785701633202, |
| "learning_rate": 5.840260150924609e-05, |
| "loss": 0.5172, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.064604810996564, |
| "grad_norm": 0.6483835292108968, |
| "learning_rate": 5.833424236068541e-05, |
| "loss": 0.5336, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.0673539518900346, |
| "grad_norm": 0.6231601301759575, |
| "learning_rate": 5.8265815355819284e-05, |
| "loss": 0.527, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.0701030927835053, |
| "grad_norm": 0.6400482537396563, |
| "learning_rate": 5.819732074790084e-05, |
| "loss": 0.5187, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.072852233676976, |
| "grad_norm": 0.6112696586750945, |
| "learning_rate": 5.812875879043336e-05, |
| "loss": 0.5198, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.075601374570447, |
| "grad_norm": 0.5115706970358354, |
| "learning_rate": 5.8060129737169416e-05, |
| "loss": 0.5279, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.0783505154639177, |
| "grad_norm": 0.4058486284297744, |
| "learning_rate": 5.799143384210992e-05, |
| "loss": 0.5268, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.0810996563573885, |
| "grad_norm": 0.32047749300200185, |
| "learning_rate": 5.7922671359503145e-05, |
| "loss": 0.5252, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.0838487972508593, |
| "grad_norm": 0.37004296113308727, |
| "learning_rate": 5.785384254384382e-05, |
| "loss": 0.5239, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.08659793814433, |
| "grad_norm": 0.4458786395281163, |
| "learning_rate": 5.7784947649872176e-05, |
| "loss": 0.5294, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.089347079037801, |
| "grad_norm": 0.4272640389491108, |
| "learning_rate": 5.7715986932573e-05, |
| "loss": 0.5249, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.0920962199312716, |
| "grad_norm": 0.3526739356483497, |
| "learning_rate": 5.7646960647174706e-05, |
| "loss": 0.5184, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.0948453608247424, |
| "grad_norm": 0.27188265621194263, |
| "learning_rate": 5.757786904914838e-05, |
| "loss": 0.5223, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.097594501718213, |
| "grad_norm": 0.3457734998315455, |
| "learning_rate": 5.750871239420681e-05, |
| "loss": 0.5227, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.100343642611684, |
| "grad_norm": 0.4222137704724379, |
| "learning_rate": 5.7439490938303604e-05, |
| "loss": 0.5273, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.1030927835051547, |
| "grad_norm": 0.45756362105195786, |
| "learning_rate": 5.7370204937632167e-05, |
| "loss": 0.5222, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.1058419243986255, |
| "grad_norm": 0.3435489204211452, |
| "learning_rate": 5.7300854648624824e-05, |
| "loss": 0.5102, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.1085910652920963, |
| "grad_norm": 0.24611635871938473, |
| "learning_rate": 5.723144032795179e-05, |
| "loss": 0.5221, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.111340206185567, |
| "grad_norm": 0.3013065227913591, |
| "learning_rate": 5.7161962232520304e-05, |
| "loss": 0.532, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.114089347079038, |
| "grad_norm": 0.3470862495086026, |
| "learning_rate": 5.7092420619473626e-05, |
| "loss": 0.5258, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.1168384879725086, |
| "grad_norm": 0.38943011157017127, |
| "learning_rate": 5.70228157461901e-05, |
| "loss": 0.5264, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.1195876288659794, |
| "grad_norm": 0.39287644862912424, |
| "learning_rate": 5.69531478702822e-05, |
| "loss": 0.5114, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.12233676975945, |
| "grad_norm": 0.2916898291723961, |
| "learning_rate": 5.688341724959557e-05, |
| "loss": 0.5275, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.125085910652921, |
| "grad_norm": 0.22962256743492102, |
| "learning_rate": 5.681362414220811e-05, |
| "loss": 0.5214, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.1278350515463917, |
| "grad_norm": 0.27944270631912393, |
| "learning_rate": 5.674376880642893e-05, |
| "loss": 0.5264, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.1305841924398625, |
| "grad_norm": 0.29044286341885533, |
| "learning_rate": 5.66738515007975e-05, |
| "loss": 0.5148, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.1333333333333333, |
| "grad_norm": 0.28701568805373207, |
| "learning_rate": 5.6603872484082614e-05, |
| "loss": 0.5218, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.136082474226804, |
| "grad_norm": 0.33569332451853506, |
| "learning_rate": 5.653383201528151e-05, |
| "loss": 0.5244, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.138831615120275, |
| "grad_norm": 0.2997007948052843, |
| "learning_rate": 5.6463730353618795e-05, |
| "loss": 0.5207, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.1415807560137456, |
| "grad_norm": 0.21237174047717156, |
| "learning_rate": 5.6393567758545616e-05, |
| "loss": 0.5103, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.1443298969072164, |
| "grad_norm": 0.2360600600158792, |
| "learning_rate": 5.63233444897386e-05, |
| "loss": 0.5293, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.147079037800687, |
| "grad_norm": 0.23546258348715238, |
| "learning_rate": 5.625306080709895e-05, |
| "loss": 0.526, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.149828178694158, |
| "grad_norm": 0.23742640059602604, |
| "learning_rate": 5.618271697075147e-05, |
| "loss": 0.5207, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.1525773195876288, |
| "grad_norm": 0.2697802862240549, |
| "learning_rate": 5.611231324104358e-05, |
| "loss": 0.5185, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.1553264604810995, |
| "grad_norm": 0.33962286687598364, |
| "learning_rate": 5.604184987854437e-05, |
| "loss": 0.5324, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.1580756013745703, |
| "grad_norm": 0.3496574863191211, |
| "learning_rate": 5.597132714404366e-05, |
| "loss": 0.5149, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.160824742268041, |
| "grad_norm": 0.3002103817256998, |
| "learning_rate": 5.590074529855099e-05, |
| "loss": 0.5271, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.163573883161512, |
| "grad_norm": 0.33431676858651316, |
| "learning_rate": 5.583010460329465e-05, |
| "loss": 0.5304, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.1663230240549827, |
| "grad_norm": 0.3298687279509311, |
| "learning_rate": 5.5759405319720806e-05, |
| "loss": 0.5167, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.1690721649484535, |
| "grad_norm": 0.21499388088920707, |
| "learning_rate": 5.568864770949237e-05, |
| "loss": 0.536, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.1718213058419242, |
| "grad_norm": 0.26397548444272334, |
| "learning_rate": 5.5617832034488236e-05, |
| "loss": 0.5251, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.174570446735395, |
| "grad_norm": 0.40340132985540694, |
| "learning_rate": 5.554695855680209e-05, |
| "loss": 0.5283, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.177319587628866, |
| "grad_norm": 0.4098335019644846, |
| "learning_rate": 5.547602753874163e-05, |
| "loss": 0.5238, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.1800687285223366, |
| "grad_norm": 0.3531497244525965, |
| "learning_rate": 5.540503924282746e-05, |
| "loss": 0.5231, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.1828178694158074, |
| "grad_norm": 0.38987792040546504, |
| "learning_rate": 5.5333993931792224e-05, |
| "loss": 0.5366, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.1855670103092786, |
| "grad_norm": 0.2647915747419579, |
| "learning_rate": 5.526289186857953e-05, |
| "loss": 0.5348, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.1883161512027494, |
| "grad_norm": 0.26788341550791417, |
| "learning_rate": 5.5191733316343074e-05, |
| "loss": 0.5253, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.19106529209622, |
| "grad_norm": 0.31976809906665954, |
| "learning_rate": 5.5120518538445595e-05, |
| "loss": 0.5225, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.193814432989691, |
| "grad_norm": 0.27229292264708094, |
| "learning_rate": 5.504924779845794e-05, |
| "loss": 0.5169, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.1965635738831617, |
| "grad_norm": 0.3204542340097227, |
| "learning_rate": 5.4977921360158096e-05, |
| "loss": 0.5256, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.1993127147766325, |
| "grad_norm": 0.33648692429827426, |
| "learning_rate": 5.490653948753013e-05, |
| "loss": 0.523, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.2020618556701033, |
| "grad_norm": 0.25849474311106246, |
| "learning_rate": 5.483510244476332e-05, |
| "loss": 0.5345, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.204810996563574, |
| "grad_norm": 0.2777163411097152, |
| "learning_rate": 5.476361049625115e-05, |
| "loss": 0.5217, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.207560137457045, |
| "grad_norm": 0.259120862015438, |
| "learning_rate": 5.469206390659028e-05, |
| "loss": 0.5293, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.2103092783505156, |
| "grad_norm": 0.25014178976390844, |
| "learning_rate": 5.46204629405796e-05, |
| "loss": 0.5269, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.2130584192439864, |
| "grad_norm": 0.223711086431831, |
| "learning_rate": 5.454880786321928e-05, |
| "loss": 0.5187, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.215807560137457, |
| "grad_norm": 0.2438903572639829, |
| "learning_rate": 5.447709893970974e-05, |
| "loss": 0.5209, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.218556701030928, |
| "grad_norm": 0.2545794798823825, |
| "learning_rate": 5.4405336435450684e-05, |
| "loss": 0.527, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.2213058419243987, |
| "grad_norm": 0.18607426816746073, |
| "learning_rate": 5.4333520616040116e-05, |
| "loss": 0.5184, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.2240549828178695, |
| "grad_norm": 0.29001196046138, |
| "learning_rate": 5.42616517472734e-05, |
| "loss": 0.5195, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.2268041237113403, |
| "grad_norm": 0.2664754443847999, |
| "learning_rate": 5.418973009514221e-05, |
| "loss": 0.5254, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.229553264604811, |
| "grad_norm": 0.2763244689974836, |
| "learning_rate": 5.411775592583358e-05, |
| "loss": 0.5225, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.232302405498282, |
| "grad_norm": 0.2952802637754747, |
| "learning_rate": 5.4045729505728884e-05, |
| "loss": 0.5227, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.2350515463917526, |
| "grad_norm": 0.23994298354796337, |
| "learning_rate": 5.397365110140295e-05, |
| "loss": 0.5305, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.2378006872852234, |
| "grad_norm": 0.22190622512563482, |
| "learning_rate": 5.390152097962295e-05, |
| "loss": 0.532, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.240549828178694, |
| "grad_norm": 0.2958910982821988, |
| "learning_rate": 5.382933940734747e-05, |
| "loss": 0.5215, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.243298969072165, |
| "grad_norm": 0.3582968358435383, |
| "learning_rate": 5.375710665172554e-05, |
| "loss": 0.5206, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.2460481099656358, |
| "grad_norm": 0.4048685164576461, |
| "learning_rate": 5.368482298009559e-05, |
| "loss": 0.5322, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.2487972508591065, |
| "grad_norm": 0.35928125952347756, |
| "learning_rate": 5.3612488659984534e-05, |
| "loss": 0.5256, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.2515463917525773, |
| "grad_norm": 0.27863313688997904, |
| "learning_rate": 5.3540103959106696e-05, |
| "loss": 0.5243, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.254295532646048, |
| "grad_norm": 0.3504201948483768, |
| "learning_rate": 5.3467669145362914e-05, |
| "loss": 0.5369, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.257044673539519, |
| "grad_norm": 0.4374644885930723, |
| "learning_rate": 5.339518448683945e-05, |
| "loss": 0.5206, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.2597938144329897, |
| "grad_norm": 0.2820104015984913, |
| "learning_rate": 5.332265025180707e-05, |
| "loss": 0.5201, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.2625429553264604, |
| "grad_norm": 0.3051225307345294, |
| "learning_rate": 5.325006670872002e-05, |
| "loss": 0.5248, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.265292096219931, |
| "grad_norm": 0.41761093729690574, |
| "learning_rate": 5.3177434126215034e-05, |
| "loss": 0.5378, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.268041237113402, |
| "grad_norm": 0.3487478544658144, |
| "learning_rate": 5.3104752773110345e-05, |
| "loss": 0.5245, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.270790378006873, |
| "grad_norm": 0.32194879135312154, |
| "learning_rate": 5.303202291840471e-05, |
| "loss": 0.526, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.2735395189003436, |
| "grad_norm": 0.27245901521747345, |
| "learning_rate": 5.295924483127635e-05, |
| "loss": 0.5184, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.2762886597938143, |
| "grad_norm": 0.26804164140224673, |
| "learning_rate": 5.2886418781082066e-05, |
| "loss": 0.5201, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.279037800687285, |
| "grad_norm": 0.23960902170409884, |
| "learning_rate": 5.2813545037356086e-05, |
| "loss": 0.5256, |
| "step": 829 |
| }, |
| { |
| "epoch": 2.281786941580756, |
| "grad_norm": 0.23457140639322555, |
| "learning_rate": 5.2740623869809234e-05, |
| "loss": 0.5209, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.2845360824742267, |
| "grad_norm": 0.27297058783320943, |
| "learning_rate": 5.2667655548327796e-05, |
| "loss": 0.5315, |
| "step": 831 |
| }, |
| { |
| "epoch": 2.2872852233676975, |
| "grad_norm": 0.28200029368869456, |
| "learning_rate": 5.259464034297262e-05, |
| "loss": 0.527, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.2900343642611682, |
| "grad_norm": 0.34366587435096785, |
| "learning_rate": 5.2521578523978046e-05, |
| "loss": 0.5166, |
| "step": 833 |
| }, |
| { |
| "epoch": 2.292783505154639, |
| "grad_norm": 0.33716783962672386, |
| "learning_rate": 5.2448470361750955e-05, |
| "loss": 0.5271, |
| "step": 834 |
| }, |
| { |
| "epoch": 2.29553264604811, |
| "grad_norm": 0.2883538098562466, |
| "learning_rate": 5.237531612686973e-05, |
| "loss": 0.5326, |
| "step": 835 |
| }, |
| { |
| "epoch": 2.2982817869415806, |
| "grad_norm": 0.23816842715139308, |
| "learning_rate": 5.2302116090083274e-05, |
| "loss": 0.521, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.3010309278350514, |
| "grad_norm": 0.2676295491191559, |
| "learning_rate": 5.222887052231003e-05, |
| "loss": 0.5307, |
| "step": 837 |
| }, |
| { |
| "epoch": 2.303780068728522, |
| "grad_norm": 0.21899952831754288, |
| "learning_rate": 5.2155579694636916e-05, |
| "loss": 0.5254, |
| "step": 838 |
| }, |
| { |
| "epoch": 2.306529209621993, |
| "grad_norm": 0.2725930609511096, |
| "learning_rate": 5.208224387831839e-05, |
| "loss": 0.5243, |
| "step": 839 |
| }, |
| { |
| "epoch": 2.3092783505154637, |
| "grad_norm": 0.2670269822849005, |
| "learning_rate": 5.200886334477541e-05, |
| "loss": 0.5263, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.3120274914089345, |
| "grad_norm": 0.2896259602041522, |
| "learning_rate": 5.1935438365594424e-05, |
| "loss": 0.5212, |
| "step": 841 |
| }, |
| { |
| "epoch": 2.3147766323024053, |
| "grad_norm": 0.3040525797226736, |
| "learning_rate": 5.186196921252638e-05, |
| "loss": 0.5272, |
| "step": 842 |
| }, |
| { |
| "epoch": 2.317525773195876, |
| "grad_norm": 0.402527650035371, |
| "learning_rate": 5.178845615748573e-05, |
| "loss": 0.5317, |
| "step": 843 |
| }, |
| { |
| "epoch": 2.320274914089347, |
| "grad_norm": 0.28589840098643327, |
| "learning_rate": 5.1714899472549394e-05, |
| "loss": 0.5302, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.323024054982818, |
| "grad_norm": 0.2746598297288049, |
| "learning_rate": 5.164129942995578e-05, |
| "loss": 0.5237, |
| "step": 845 |
| }, |
| { |
| "epoch": 2.325773195876289, |
| "grad_norm": 0.35895254123689857, |
| "learning_rate": 5.156765630210375e-05, |
| "loss": 0.5321, |
| "step": 846 |
| }, |
| { |
| "epoch": 2.3285223367697596, |
| "grad_norm": 0.3075206472125044, |
| "learning_rate": 5.1493970361551666e-05, |
| "loss": 0.5261, |
| "step": 847 |
| }, |
| { |
| "epoch": 2.3312714776632304, |
| "grad_norm": 0.2631726158525417, |
| "learning_rate": 5.14202418810163e-05, |
| "loss": 0.5295, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.334020618556701, |
| "grad_norm": 0.26253313412596246, |
| "learning_rate": 5.134647113337187e-05, |
| "loss": 0.5278, |
| "step": 849 |
| }, |
| { |
| "epoch": 2.336769759450172, |
| "grad_norm": 0.27292981975275177, |
| "learning_rate": 5.127265839164906e-05, |
| "loss": 0.5174, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.3395189003436427, |
| "grad_norm": 0.29830820148659387, |
| "learning_rate": 5.119880392903396e-05, |
| "loss": 0.5235, |
| "step": 851 |
| }, |
| { |
| "epoch": 2.3422680412371135, |
| "grad_norm": 0.319178636597951, |
| "learning_rate": 5.112490801886706e-05, |
| "loss": 0.5249, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.3450171821305843, |
| "grad_norm": 0.39509509521849606, |
| "learning_rate": 5.105097093464225e-05, |
| "loss": 0.53, |
| "step": 853 |
| }, |
| { |
| "epoch": 2.347766323024055, |
| "grad_norm": 0.43481008500350815, |
| "learning_rate": 5.0976992950005836e-05, |
| "loss": 0.5222, |
| "step": 854 |
| }, |
| { |
| "epoch": 2.350515463917526, |
| "grad_norm": 0.35580076547200273, |
| "learning_rate": 5.090297433875549e-05, |
| "loss": 0.5341, |
| "step": 855 |
| }, |
| { |
| "epoch": 2.3532646048109966, |
| "grad_norm": 0.27387444348548706, |
| "learning_rate": 5.082891537483921e-05, |
| "loss": 0.527, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.3560137457044674, |
| "grad_norm": 0.28242295705533316, |
| "learning_rate": 5.0754816332354384e-05, |
| "loss": 0.5313, |
| "step": 857 |
| }, |
| { |
| "epoch": 2.358762886597938, |
| "grad_norm": 0.23633783144067932, |
| "learning_rate": 5.0680677485546724e-05, |
| "loss": 0.5256, |
| "step": 858 |
| }, |
| { |
| "epoch": 2.361512027491409, |
| "grad_norm": 0.20611827366274735, |
| "learning_rate": 5.060649910880926e-05, |
| "loss": 0.5246, |
| "step": 859 |
| }, |
| { |
| "epoch": 2.3642611683848798, |
| "grad_norm": 0.24527029533975683, |
| "learning_rate": 5.0532281476681295e-05, |
| "loss": 0.526, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.3670103092783505, |
| "grad_norm": 0.2988866489058128, |
| "learning_rate": 5.0458024863847455e-05, |
| "loss": 0.5318, |
| "step": 861 |
| }, |
| { |
| "epoch": 2.3697594501718213, |
| "grad_norm": 0.3278941979649635, |
| "learning_rate": 5.038372954513664e-05, |
| "loss": 0.5208, |
| "step": 862 |
| }, |
| { |
| "epoch": 2.372508591065292, |
| "grad_norm": 0.3590976948020781, |
| "learning_rate": 5.030939579552098e-05, |
| "loss": 0.5288, |
| "step": 863 |
| }, |
| { |
| "epoch": 2.375257731958763, |
| "grad_norm": 0.3257482610923462, |
| "learning_rate": 5.0235023890114844e-05, |
| "loss": 0.5342, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.3780068728522337, |
| "grad_norm": 0.26418250148170586, |
| "learning_rate": 5.016061410417384e-05, |
| "loss": 0.534, |
| "step": 865 |
| }, |
| { |
| "epoch": 2.3807560137457044, |
| "grad_norm": 0.25433731862506237, |
| "learning_rate": 5.008616671309374e-05, |
| "loss": 0.5349, |
| "step": 866 |
| }, |
| { |
| "epoch": 2.3835051546391752, |
| "grad_norm": 0.3098360496587967, |
| "learning_rate": 5.0011681992409526e-05, |
| "loss": 0.536, |
| "step": 867 |
| }, |
| { |
| "epoch": 2.386254295532646, |
| "grad_norm": 0.48700691274762764, |
| "learning_rate": 4.993716021779431e-05, |
| "loss": 0.5281, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.389003436426117, |
| "grad_norm": 0.4325007530550783, |
| "learning_rate": 4.986260166505838e-05, |
| "loss": 0.5287, |
| "step": 869 |
| }, |
| { |
| "epoch": 2.3917525773195876, |
| "grad_norm": 0.40355654494603005, |
| "learning_rate": 4.978800661014811e-05, |
| "loss": 0.5211, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.3945017182130583, |
| "grad_norm": 0.26544768260770013, |
| "learning_rate": 4.9713375329145e-05, |
| "loss": 0.5195, |
| "step": 871 |
| }, |
| { |
| "epoch": 2.397250859106529, |
| "grad_norm": 0.2791642651045501, |
| "learning_rate": 4.963870809826458e-05, |
| "loss": 0.5265, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.3408862865130159, |
| "learning_rate": 4.9564005193855486e-05, |
| "loss": 0.5391, |
| "step": 873 |
| }, |
| { |
| "epoch": 2.4027491408934707, |
| "grad_norm": 0.3331718549433266, |
| "learning_rate": 4.9489266892398346e-05, |
| "loss": 0.5216, |
| "step": 874 |
| }, |
| { |
| "epoch": 2.4054982817869415, |
| "grad_norm": 0.33450428391908577, |
| "learning_rate": 4.9414493470504834e-05, |
| "loss": 0.5274, |
| "step": 875 |
| }, |
| { |
| "epoch": 2.4082474226804123, |
| "grad_norm": 0.324321171570019, |
| "learning_rate": 4.933968520491654e-05, |
| "loss": 0.5373, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.410996563573883, |
| "grad_norm": 0.28829591882553246, |
| "learning_rate": 4.9264842372504095e-05, |
| "loss": 0.5286, |
| "step": 877 |
| }, |
| { |
| "epoch": 2.413745704467354, |
| "grad_norm": 0.28746004519536006, |
| "learning_rate": 4.9189965250265994e-05, |
| "loss": 0.5199, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.4164948453608246, |
| "grad_norm": 0.31445005880135923, |
| "learning_rate": 4.911505411532769e-05, |
| "loss": 0.5278, |
| "step": 879 |
| }, |
| { |
| "epoch": 2.4192439862542954, |
| "grad_norm": 0.47892737901970533, |
| "learning_rate": 4.9040109244940505e-05, |
| "loss": 0.5234, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.421993127147766, |
| "grad_norm": 0.45215275967709107, |
| "learning_rate": 4.896513091648058e-05, |
| "loss": 0.535, |
| "step": 881 |
| }, |
| { |
| "epoch": 2.424742268041237, |
| "grad_norm": 0.34138582396119665, |
| "learning_rate": 4.889011940744796e-05, |
| "loss": 0.5164, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.4274914089347077, |
| "grad_norm": 0.2682120490670279, |
| "learning_rate": 4.881507499546545e-05, |
| "loss": 0.5342, |
| "step": 883 |
| }, |
| { |
| "epoch": 2.430240549828179, |
| "grad_norm": 0.2692048784370494, |
| "learning_rate": 4.873999795827761e-05, |
| "loss": 0.5344, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.4329896907216497, |
| "grad_norm": 0.3048032911190028, |
| "learning_rate": 4.866488857374979e-05, |
| "loss": 0.5131, |
| "step": 885 |
| }, |
| { |
| "epoch": 2.4357388316151205, |
| "grad_norm": 0.43798907642210033, |
| "learning_rate": 4.858974711986704e-05, |
| "loss": 0.5277, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.4384879725085913, |
| "grad_norm": 0.4537544665998809, |
| "learning_rate": 4.851457387473312e-05, |
| "loss": 0.5272, |
| "step": 887 |
| }, |
| { |
| "epoch": 2.441237113402062, |
| "grad_norm": 0.275619974234131, |
| "learning_rate": 4.843936911656941e-05, |
| "loss": 0.5229, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.443986254295533, |
| "grad_norm": 0.24697116802006067, |
| "learning_rate": 4.836413312371394e-05, |
| "loss": 0.5237, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.4467353951890036, |
| "grad_norm": 0.2889992360936922, |
| "learning_rate": 4.828886617462039e-05, |
| "loss": 0.5214, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.4494845360824744, |
| "grad_norm": 0.34236892659433343, |
| "learning_rate": 4.821356854785695e-05, |
| "loss": 0.5204, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.452233676975945, |
| "grad_norm": 0.26986931392665453, |
| "learning_rate": 4.8138240522105365e-05, |
| "loss": 0.5285, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.454982817869416, |
| "grad_norm": 0.2386727777106889, |
| "learning_rate": 4.806288237615989e-05, |
| "loss": 0.5258, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.4577319587628867, |
| "grad_norm": 0.2400107390785469, |
| "learning_rate": 4.7987494388926275e-05, |
| "loss": 0.523, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.4604810996563575, |
| "grad_norm": 0.30720815238105814, |
| "learning_rate": 4.7912076839420695e-05, |
| "loss": 0.5232, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.4632302405498283, |
| "grad_norm": 0.25563044453886097, |
| "learning_rate": 4.7836630006768746e-05, |
| "loss": 0.5289, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.465979381443299, |
| "grad_norm": 0.2618995944120738, |
| "learning_rate": 4.77611541702044e-05, |
| "loss": 0.5256, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.46872852233677, |
| "grad_norm": 0.2872261399026541, |
| "learning_rate": 4.768564960906897e-05, |
| "loss": 0.527, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.4714776632302407, |
| "grad_norm": 0.22134537204990035, |
| "learning_rate": 4.7610116602810096e-05, |
| "loss": 0.5343, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.4742268041237114, |
| "grad_norm": 0.2565389994931295, |
| "learning_rate": 4.753455543098067e-05, |
| "loss": 0.5375, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.476975945017182, |
| "grad_norm": 0.3740627121098048, |
| "learning_rate": 4.745896637323785e-05, |
| "loss": 0.5227, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.479725085910653, |
| "grad_norm": 0.3714016967888814, |
| "learning_rate": 4.7383349709341994e-05, |
| "loss": 0.5216, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.4824742268041238, |
| "grad_norm": 0.2863979507280106, |
| "learning_rate": 4.730770571915562e-05, |
| "loss": 0.5289, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.4852233676975946, |
| "grad_norm": 0.2639569172110602, |
| "learning_rate": 4.7232034682642417e-05, |
| "loss": 0.5207, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.4879725085910653, |
| "grad_norm": 0.26506986793397436, |
| "learning_rate": 4.715633687986613e-05, |
| "loss": 0.5276, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.490721649484536, |
| "grad_norm": 0.3034302978268054, |
| "learning_rate": 4.7080612590989596e-05, |
| "loss": 0.5185, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.493470790378007, |
| "grad_norm": 0.3242741496552742, |
| "learning_rate": 4.7004862096273674e-05, |
| "loss": 0.5292, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.4962199312714777, |
| "grad_norm": 0.25198934191589384, |
| "learning_rate": 4.692908567607621e-05, |
| "loss": 0.5209, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.4989690721649485, |
| "grad_norm": 0.2851823027369076, |
| "learning_rate": 4.6853283610851004e-05, |
| "loss": 0.5332, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.5017182130584192, |
| "grad_norm": 0.2250586051327087, |
| "learning_rate": 4.677745618114674e-05, |
| "loss": 0.5326, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.50446735395189, |
| "grad_norm": 0.2137916864449506, |
| "learning_rate": 4.670160366760606e-05, |
| "loss": 0.5214, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.507216494845361, |
| "grad_norm": 0.23692573516235416, |
| "learning_rate": 4.6625726350964355e-05, |
| "loss": 0.5209, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.5099656357388316, |
| "grad_norm": 0.2503089565651227, |
| "learning_rate": 4.654982451204885e-05, |
| "loss": 0.5228, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.5127147766323024, |
| "grad_norm": 0.187246659026728, |
| "learning_rate": 4.6473898431777535e-05, |
| "loss": 0.5279, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.515463917525773, |
| "grad_norm": 0.22968241737897607, |
| "learning_rate": 4.6397948391158104e-05, |
| "loss": 0.5229, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.518213058419244, |
| "grad_norm": 0.34560044485812286, |
| "learning_rate": 4.632197467128695e-05, |
| "loss": 0.521, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.5209621993127147, |
| "grad_norm": 0.36369842042382355, |
| "learning_rate": 4.624597755334807e-05, |
| "loss": 0.5222, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.5237113402061855, |
| "grad_norm": 0.3273459802424622, |
| "learning_rate": 4.6169957318612096e-05, |
| "loss": 0.5269, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.5264604810996563, |
| "grad_norm": 0.27046737413984473, |
| "learning_rate": 4.609391424843519e-05, |
| "loss": 0.5207, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.529209621993127, |
| "grad_norm": 0.23973509685168032, |
| "learning_rate": 4.601784862425807e-05, |
| "loss": 0.5305, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.531958762886598, |
| "grad_norm": 0.207584971564237, |
| "learning_rate": 4.594176072760485e-05, |
| "loss": 0.5391, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.5347079037800686, |
| "grad_norm": 0.26389513176009577, |
| "learning_rate": 4.586565084008217e-05, |
| "loss": 0.5257, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.5374570446735394, |
| "grad_norm": 0.27492964134545905, |
| "learning_rate": 4.5789519243377975e-05, |
| "loss": 0.5198, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.54020618556701, |
| "grad_norm": 0.2550669423258001, |
| "learning_rate": 4.571336621926065e-05, |
| "loss": 0.5161, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.542955326460481, |
| "grad_norm": 0.2434323842617188, |
| "learning_rate": 4.563719204957776e-05, |
| "loss": 0.5266, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.5457044673539517, |
| "grad_norm": 0.2381537603353079, |
| "learning_rate": 4.5560997016255265e-05, |
| "loss": 0.5306, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.5484536082474225, |
| "grad_norm": 0.25877756360414456, |
| "learning_rate": 4.548478140129624e-05, |
| "loss": 0.5279, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.5512027491408933, |
| "grad_norm": 0.23404565240810715, |
| "learning_rate": 4.5408545486779996e-05, |
| "loss": 0.5325, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.553951890034364, |
| "grad_norm": 0.20244068111276767, |
| "learning_rate": 4.533228955486094e-05, |
| "loss": 0.5245, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.556701030927835, |
| "grad_norm": 0.20414770829038065, |
| "learning_rate": 4.525601388776758e-05, |
| "loss": 0.5193, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.5594501718213056, |
| "grad_norm": 0.22610091916102834, |
| "learning_rate": 4.517971876780147e-05, |
| "loss": 0.5335, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.5621993127147764, |
| "grad_norm": 0.2030707662947604, |
| "learning_rate": 4.5103404477336144e-05, |
| "loss": 0.5247, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.564948453608247, |
| "grad_norm": 0.19565686931462759, |
| "learning_rate": 4.502707129881609e-05, |
| "loss": 0.5223, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.567697594501718, |
| "grad_norm": 0.22241975043349305, |
| "learning_rate": 4.495071951475572e-05, |
| "loss": 0.5289, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.5704467353951888, |
| "grad_norm": 0.25405146475942325, |
| "learning_rate": 4.487434940773828e-05, |
| "loss": 0.5216, |
| "step": 935 |
| }, |
| { |
| "epoch": 2.5731958762886595, |
| "grad_norm": 0.24090685844792697, |
| "learning_rate": 4.479796126041487e-05, |
| "loss": 0.5347, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.5759450171821303, |
| "grad_norm": 0.20268315657201996, |
| "learning_rate": 4.472155535550331e-05, |
| "loss": 0.5174, |
| "step": 937 |
| }, |
| { |
| "epoch": 2.5786941580756015, |
| "grad_norm": 0.2164104030727069, |
| "learning_rate": 4.464513197578717e-05, |
| "loss": 0.5306, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.5814432989690723, |
| "grad_norm": 0.20675687136906504, |
| "learning_rate": 4.45686914041147e-05, |
| "loss": 0.5217, |
| "step": 939 |
| }, |
| { |
| "epoch": 2.584192439862543, |
| "grad_norm": 0.2495476073768104, |
| "learning_rate": 4.449223392339776e-05, |
| "loss": 0.5233, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.586941580756014, |
| "grad_norm": 0.2188055318879783, |
| "learning_rate": 4.44157598166108e-05, |
| "loss": 0.536, |
| "step": 941 |
| }, |
| { |
| "epoch": 2.5896907216494847, |
| "grad_norm": 0.19661959776334886, |
| "learning_rate": 4.43392693667898e-05, |
| "loss": 0.5167, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.5924398625429554, |
| "grad_norm": 0.21281895098022574, |
| "learning_rate": 4.426276285703125e-05, |
| "loss": 0.5241, |
| "step": 943 |
| }, |
| { |
| "epoch": 2.5951890034364262, |
| "grad_norm": 0.24612261354981668, |
| "learning_rate": 4.418624057049106e-05, |
| "loss": 0.5288, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.597938144329897, |
| "grad_norm": 0.26256604053792143, |
| "learning_rate": 4.410970279038351e-05, |
| "loss": 0.528, |
| "step": 945 |
| }, |
| { |
| "epoch": 2.600687285223368, |
| "grad_norm": 0.2146570261022671, |
| "learning_rate": 4.4033149799980265e-05, |
| "loss": 0.5336, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.6034364261168386, |
| "grad_norm": 0.20864992098912383, |
| "learning_rate": 4.395658188260924e-05, |
| "loss": 0.5289, |
| "step": 947 |
| }, |
| { |
| "epoch": 2.6061855670103093, |
| "grad_norm": 0.23475812934644366, |
| "learning_rate": 4.3879999321653664e-05, |
| "loss": 0.5294, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.60893470790378, |
| "grad_norm": 0.21552160156786482, |
| "learning_rate": 4.380340240055087e-05, |
| "loss": 0.5225, |
| "step": 949 |
| }, |
| { |
| "epoch": 2.611683848797251, |
| "grad_norm": 0.21451249484405446, |
| "learning_rate": 4.3726791402791405e-05, |
| "loss": 0.5309, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.6144329896907217, |
| "grad_norm": 0.2228499608780446, |
| "learning_rate": 4.3650166611917904e-05, |
| "loss": 0.5218, |
| "step": 951 |
| }, |
| { |
| "epoch": 2.6171821305841925, |
| "grad_norm": 0.19761673711898892, |
| "learning_rate": 4.3573528311524056e-05, |
| "loss": 0.5263, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.6199312714776632, |
| "grad_norm": 0.2717760446302159, |
| "learning_rate": 4.34968767852535e-05, |
| "loss": 0.5192, |
| "step": 953 |
| }, |
| { |
| "epoch": 2.622680412371134, |
| "grad_norm": 0.2307774736780584, |
| "learning_rate": 4.3420212316798895e-05, |
| "loss": 0.5224, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.625429553264605, |
| "grad_norm": 0.26499404963083745, |
| "learning_rate": 4.334353518990075e-05, |
| "loss": 0.5284, |
| "step": 955 |
| }, |
| { |
| "epoch": 2.6281786941580756, |
| "grad_norm": 0.33949301525073333, |
| "learning_rate": 4.326684568834647e-05, |
| "loss": 0.5215, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.6309278350515464, |
| "grad_norm": 0.33940964847094485, |
| "learning_rate": 4.31901440959692e-05, |
| "loss": 0.527, |
| "step": 957 |
| }, |
| { |
| "epoch": 2.633676975945017, |
| "grad_norm": 0.3034703314779094, |
| "learning_rate": 4.311343069664688e-05, |
| "loss": 0.5315, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.636426116838488, |
| "grad_norm": 0.3285064736514596, |
| "learning_rate": 4.303670577430111e-05, |
| "loss": 0.5328, |
| "step": 959 |
| }, |
| { |
| "epoch": 2.6391752577319587, |
| "grad_norm": 0.3453164976173602, |
| "learning_rate": 4.295996961289619e-05, |
| "loss": 0.5323, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.6419243986254295, |
| "grad_norm": 0.38444181745132805, |
| "learning_rate": 4.288322249643799e-05, |
| "loss": 0.5288, |
| "step": 961 |
| }, |
| { |
| "epoch": 2.6446735395189003, |
| "grad_norm": 0.2686311128572819, |
| "learning_rate": 4.2806464708972905e-05, |
| "loss": 0.525, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.647422680412371, |
| "grad_norm": 0.3232991261053769, |
| "learning_rate": 4.272969653458685e-05, |
| "loss": 0.5278, |
| "step": 963 |
| }, |
| { |
| "epoch": 2.650171821305842, |
| "grad_norm": 0.32877598170142874, |
| "learning_rate": 4.265291825740416e-05, |
| "loss": 0.5176, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.6529209621993126, |
| "grad_norm": 0.30844002727607134, |
| "learning_rate": 4.257613016158661e-05, |
| "loss": 0.5326, |
| "step": 965 |
| }, |
| { |
| "epoch": 2.6556701030927834, |
| "grad_norm": 0.2255639863284666, |
| "learning_rate": 4.249933253133224e-05, |
| "loss": 0.5266, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.658419243986254, |
| "grad_norm": 0.25200678515207303, |
| "learning_rate": 4.2422525650874446e-05, |
| "loss": 0.5292, |
| "step": 967 |
| }, |
| { |
| "epoch": 2.661168384879725, |
| "grad_norm": 0.22993658765349623, |
| "learning_rate": 4.234570980448081e-05, |
| "loss": 0.5251, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.6639175257731957, |
| "grad_norm": 0.2642010038722434, |
| "learning_rate": 4.226888527645215e-05, |
| "loss": 0.529, |
| "step": 969 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.28025388603567647, |
| "learning_rate": 4.219205235112135e-05, |
| "loss": 0.523, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.6694158075601373, |
| "grad_norm": 0.24286835186859662, |
| "learning_rate": 4.2115211312852435e-05, |
| "loss": 0.5241, |
| "step": 971 |
| }, |
| { |
| "epoch": 2.6721649484536085, |
| "grad_norm": 0.260911275304071, |
| "learning_rate": 4.203836244603941e-05, |
| "loss": 0.5251, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.6749140893470793, |
| "grad_norm": 0.23336974876081842, |
| "learning_rate": 4.1961506035105285e-05, |
| "loss": 0.5337, |
| "step": 973 |
| }, |
| { |
| "epoch": 2.67766323024055, |
| "grad_norm": 0.19717415494962492, |
| "learning_rate": 4.188464236450098e-05, |
| "loss": 0.518, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.680412371134021, |
| "grad_norm": 0.2635705526316712, |
| "learning_rate": 4.180777171870427e-05, |
| "loss": 0.519, |
| "step": 975 |
| }, |
| { |
| "epoch": 2.6831615120274916, |
| "grad_norm": 0.2420795496999561, |
| "learning_rate": 4.173089438221876e-05, |
| "loss": 0.5329, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.6859106529209624, |
| "grad_norm": 0.24604439574458017, |
| "learning_rate": 4.165401063957283e-05, |
| "loss": 0.5331, |
| "step": 977 |
| }, |
| { |
| "epoch": 2.688659793814433, |
| "grad_norm": 0.1948605941664403, |
| "learning_rate": 4.157712077531856e-05, |
| "loss": 0.5221, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.691408934707904, |
| "grad_norm": 0.21270792355056445, |
| "learning_rate": 4.1500225074030654e-05, |
| "loss": 0.5355, |
| "step": 979 |
| }, |
| { |
| "epoch": 2.6941580756013748, |
| "grad_norm": 0.251861479262255, |
| "learning_rate": 4.142332382030547e-05, |
| "loss": 0.5336, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.6969072164948455, |
| "grad_norm": 0.2112157139724383, |
| "learning_rate": 4.13464172987599e-05, |
| "loss": 0.5243, |
| "step": 981 |
| }, |
| { |
| "epoch": 2.6996563573883163, |
| "grad_norm": 0.20387549860274823, |
| "learning_rate": 4.1269505794030296e-05, |
| "loss": 0.5237, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.702405498281787, |
| "grad_norm": 0.2515717159268574, |
| "learning_rate": 4.119258959077151e-05, |
| "loss": 0.5319, |
| "step": 983 |
| }, |
| { |
| "epoch": 2.705154639175258, |
| "grad_norm": 0.20847504323318589, |
| "learning_rate": 4.111566897365575e-05, |
| "loss": 0.52, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.7079037800687287, |
| "grad_norm": 0.21911840052600298, |
| "learning_rate": 4.103874422737157e-05, |
| "loss": 0.5283, |
| "step": 985 |
| }, |
| { |
| "epoch": 2.7106529209621995, |
| "grad_norm": 0.2049343201857368, |
| "learning_rate": 4.096181563662279e-05, |
| "loss": 0.5263, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.7134020618556702, |
| "grad_norm": 0.1947687795273586, |
| "learning_rate": 4.088488348612748e-05, |
| "loss": 0.5239, |
| "step": 987 |
| }, |
| { |
| "epoch": 2.716151202749141, |
| "grad_norm": 0.23438600392965853, |
| "learning_rate": 4.080794806061688e-05, |
| "loss": 0.5235, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.718900343642612, |
| "grad_norm": 0.19851824128921414, |
| "learning_rate": 4.073100964483435e-05, |
| "loss": 0.5236, |
| "step": 989 |
| }, |
| { |
| "epoch": 2.7216494845360826, |
| "grad_norm": 0.23677872375792366, |
| "learning_rate": 4.06540685235343e-05, |
| "loss": 0.5307, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.7243986254295534, |
| "grad_norm": 0.19484452355877876, |
| "learning_rate": 4.057712498148119e-05, |
| "loss": 0.533, |
| "step": 991 |
| }, |
| { |
| "epoch": 2.727147766323024, |
| "grad_norm": 0.24691106342879912, |
| "learning_rate": 4.05001793034484e-05, |
| "loss": 0.5282, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.729896907216495, |
| "grad_norm": 0.23911573502762684, |
| "learning_rate": 4.0423231774217246e-05, |
| "loss": 0.5291, |
| "step": 993 |
| }, |
| { |
| "epoch": 2.7326460481099657, |
| "grad_norm": 0.24784394785326042, |
| "learning_rate": 4.034628267857587e-05, |
| "loss": 0.5299, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.7353951890034365, |
| "grad_norm": 0.2530406886888911, |
| "learning_rate": 4.026933230131823e-05, |
| "loss": 0.5155, |
| "step": 995 |
| }, |
| { |
| "epoch": 2.7381443298969073, |
| "grad_norm": 0.24815572290601853, |
| "learning_rate": 4.0192380927243014e-05, |
| "loss": 0.5331, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.740893470790378, |
| "grad_norm": 0.2756774245673258, |
| "learning_rate": 4.0115428841152637e-05, |
| "loss": 0.5334, |
| "step": 997 |
| }, |
| { |
| "epoch": 2.743642611683849, |
| "grad_norm": 0.24077726898420207, |
| "learning_rate": 4.0038476327852065e-05, |
| "loss": 0.5337, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.7463917525773196, |
| "grad_norm": 0.20128889819397291, |
| "learning_rate": 3.9961523672147955e-05, |
| "loss": 0.5302, |
| "step": 999 |
| }, |
| { |
| "epoch": 2.7491408934707904, |
| "grad_norm": 0.20443940819422327, |
| "learning_rate": 3.988457115884739e-05, |
| "loss": 0.5284, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.751890034364261, |
| "grad_norm": 0.2208826727313727, |
| "learning_rate": 3.9807619072757e-05, |
| "loss": 0.5271, |
| "step": 1001 |
| }, |
| { |
| "epoch": 2.754639175257732, |
| "grad_norm": 0.24447959308366699, |
| "learning_rate": 3.973066769868178e-05, |
| "loss": 0.5283, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.7573883161512027, |
| "grad_norm": 0.1966957237573264, |
| "learning_rate": 3.965371732142415e-05, |
| "loss": 0.5292, |
| "step": 1003 |
| }, |
| { |
| "epoch": 2.7601374570446735, |
| "grad_norm": 0.21603427189344282, |
| "learning_rate": 3.957676822578276e-05, |
| "loss": 0.5237, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.7628865979381443, |
| "grad_norm": 0.18664057521742963, |
| "learning_rate": 3.949982069655161e-05, |
| "loss": 0.5233, |
| "step": 1005 |
| }, |
| { |
| "epoch": 2.765635738831615, |
| "grad_norm": 0.2234326703320951, |
| "learning_rate": 3.942287501851881e-05, |
| "loss": 0.5256, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.768384879725086, |
| "grad_norm": 0.2278011506232875, |
| "learning_rate": 3.9345931476465706e-05, |
| "loss": 0.5279, |
| "step": 1007 |
| }, |
| { |
| "epoch": 2.7711340206185566, |
| "grad_norm": 0.19798832981596853, |
| "learning_rate": 3.9268990355165664e-05, |
| "loss": 0.5211, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.7738831615120274, |
| "grad_norm": 0.2183804005479682, |
| "learning_rate": 3.9192051939383126e-05, |
| "loss": 0.5239, |
| "step": 1009 |
| }, |
| { |
| "epoch": 2.776632302405498, |
| "grad_norm": 0.22246588980190424, |
| "learning_rate": 3.911511651387253e-05, |
| "loss": 0.527, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.779381443298969, |
| "grad_norm": 0.1780086146278406, |
| "learning_rate": 3.903818436337722e-05, |
| "loss": 0.5284, |
| "step": 1011 |
| }, |
| { |
| "epoch": 2.7821305841924397, |
| "grad_norm": 0.19604513054901485, |
| "learning_rate": 3.896125577262845e-05, |
| "loss": 0.5162, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.7848797250859105, |
| "grad_norm": 0.18794858233130177, |
| "learning_rate": 3.888433102634425e-05, |
| "loss": 0.5216, |
| "step": 1013 |
| }, |
| { |
| "epoch": 2.7876288659793813, |
| "grad_norm": 0.21435085046499355, |
| "learning_rate": 3.8807410409228496e-05, |
| "loss": 0.5305, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.790378006872852, |
| "grad_norm": 0.17410246023922102, |
| "learning_rate": 3.8730494205969724e-05, |
| "loss": 0.5312, |
| "step": 1015 |
| }, |
| { |
| "epoch": 2.793127147766323, |
| "grad_norm": 0.1822314934502394, |
| "learning_rate": 3.865358270124013e-05, |
| "loss": 0.5189, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.7958762886597937, |
| "grad_norm": 0.206717747798629, |
| "learning_rate": 3.857667617969454e-05, |
| "loss": 0.5269, |
| "step": 1017 |
| }, |
| { |
| "epoch": 2.7986254295532644, |
| "grad_norm": 0.16863448749904908, |
| "learning_rate": 3.849977492596936e-05, |
| "loss": 0.5303, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.801374570446735, |
| "grad_norm": 0.23449573918173383, |
| "learning_rate": 3.8422879224681456e-05, |
| "loss": 0.5322, |
| "step": 1019 |
| }, |
| { |
| "epoch": 2.804123711340206, |
| "grad_norm": 0.17234697630039422, |
| "learning_rate": 3.8345989360427174e-05, |
| "loss": 0.5229, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.8068728522336768, |
| "grad_norm": 0.21339720138725765, |
| "learning_rate": 3.826910561778124e-05, |
| "loss": 0.5271, |
| "step": 1021 |
| }, |
| { |
| "epoch": 2.8096219931271476, |
| "grad_norm": 0.22865589786306695, |
| "learning_rate": 3.819222828129574e-05, |
| "loss": 0.5261, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.8123711340206183, |
| "grad_norm": 0.19395333537771012, |
| "learning_rate": 3.8115357635499045e-05, |
| "loss": 0.5307, |
| "step": 1023 |
| }, |
| { |
| "epoch": 2.815120274914089, |
| "grad_norm": 0.26952248831929015, |
| "learning_rate": 3.803849396489473e-05, |
| "loss": 0.5304, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.81786941580756, |
| "grad_norm": 0.2078676429133822, |
| "learning_rate": 3.7961637553960605e-05, |
| "loss": 0.521, |
| "step": 1025 |
| }, |
| { |
| "epoch": 2.8206185567010307, |
| "grad_norm": 0.2437865253535147, |
| "learning_rate": 3.788478868714758e-05, |
| "loss": 0.5284, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.8233676975945015, |
| "grad_norm": 0.23244204464146223, |
| "learning_rate": 3.780794764887866e-05, |
| "loss": 0.5347, |
| "step": 1027 |
| }, |
| { |
| "epoch": 2.8261168384879722, |
| "grad_norm": 0.1883460034103066, |
| "learning_rate": 3.7731114723547856e-05, |
| "loss": 0.5332, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.8288659793814435, |
| "grad_norm": 0.18889412109996112, |
| "learning_rate": 3.7654290195519195e-05, |
| "loss": 0.527, |
| "step": 1029 |
| }, |
| { |
| "epoch": 2.8316151202749142, |
| "grad_norm": 0.23037525353226912, |
| "learning_rate": 3.757747434912556e-05, |
| "loss": 0.5293, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.834364261168385, |
| "grad_norm": 0.21033380977426863, |
| "learning_rate": 3.750066746866778e-05, |
| "loss": 0.5226, |
| "step": 1031 |
| }, |
| { |
| "epoch": 2.837113402061856, |
| "grad_norm": 0.17225724075961382, |
| "learning_rate": 3.742386983841341e-05, |
| "loss": 0.5244, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.8398625429553266, |
| "grad_norm": 0.22879502842160185, |
| "learning_rate": 3.734708174259585e-05, |
| "loss": 0.5291, |
| "step": 1033 |
| }, |
| { |
| "epoch": 2.8426116838487974, |
| "grad_norm": 0.1859425423671784, |
| "learning_rate": 3.727030346541317e-05, |
| "loss": 0.5207, |
| "step": 1034 |
| }, |
| { |
| "epoch": 2.845360824742268, |
| "grad_norm": 0.21138789242685446, |
| "learning_rate": 3.7193535291027115e-05, |
| "loss": 0.5283, |
| "step": 1035 |
| }, |
| { |
| "epoch": 2.848109965635739, |
| "grad_norm": 0.22384899016314974, |
| "learning_rate": 3.7116777503562016e-05, |
| "loss": 0.528, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.8508591065292097, |
| "grad_norm": 0.2431535434052746, |
| "learning_rate": 3.7040030387103815e-05, |
| "loss": 0.5187, |
| "step": 1037 |
| }, |
| { |
| "epoch": 2.8536082474226805, |
| "grad_norm": 0.24399503353207871, |
| "learning_rate": 3.696329422569889e-05, |
| "loss": 0.5179, |
| "step": 1038 |
| }, |
| { |
| "epoch": 2.8563573883161513, |
| "grad_norm": 0.16921161494449857, |
| "learning_rate": 3.6886569303353136e-05, |
| "loss": 0.5292, |
| "step": 1039 |
| }, |
| { |
| "epoch": 2.859106529209622, |
| "grad_norm": 0.26728522647583636, |
| "learning_rate": 3.680985590403082e-05, |
| "loss": 0.5262, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.861855670103093, |
| "grad_norm": 0.27421229932307095, |
| "learning_rate": 3.673315431165355e-05, |
| "loss": 0.5252, |
| "step": 1041 |
| }, |
| { |
| "epoch": 2.8646048109965636, |
| "grad_norm": 0.2601105298366, |
| "learning_rate": 3.665646481009926e-05, |
| "loss": 0.5203, |
| "step": 1042 |
| }, |
| { |
| "epoch": 2.8673539518900344, |
| "grad_norm": 0.2997837262368804, |
| "learning_rate": 3.657978768320111e-05, |
| "loss": 0.5266, |
| "step": 1043 |
| }, |
| { |
| "epoch": 2.870103092783505, |
| "grad_norm": 0.1739319697716107, |
| "learning_rate": 3.650312321474651e-05, |
| "loss": 0.5285, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.872852233676976, |
| "grad_norm": 0.2868154357660894, |
| "learning_rate": 3.642647168847596e-05, |
| "loss": 0.5349, |
| "step": 1045 |
| }, |
| { |
| "epoch": 2.8756013745704467, |
| "grad_norm": 0.3150816781576196, |
| "learning_rate": 3.6349833388082096e-05, |
| "loss": 0.5213, |
| "step": 1046 |
| }, |
| { |
| "epoch": 2.8783505154639175, |
| "grad_norm": 0.21703226563550534, |
| "learning_rate": 3.6273208597208595e-05, |
| "loss": 0.5302, |
| "step": 1047 |
| }, |
| { |
| "epoch": 2.8810996563573883, |
| "grad_norm": 0.24416409278757065, |
| "learning_rate": 3.619659759944916e-05, |
| "loss": 0.5249, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.883848797250859, |
| "grad_norm": 0.2807527048682551, |
| "learning_rate": 3.612000067834636e-05, |
| "loss": 0.5249, |
| "step": 1049 |
| }, |
| { |
| "epoch": 2.88659793814433, |
| "grad_norm": 0.2744380467215706, |
| "learning_rate": 3.604341811739077e-05, |
| "loss": 0.5305, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.8893470790378006, |
| "grad_norm": 0.2271292993546667, |
| "learning_rate": 3.596685020001975e-05, |
| "loss": 0.5194, |
| "step": 1051 |
| }, |
| { |
| "epoch": 2.8920962199312714, |
| "grad_norm": 0.21515311729133313, |
| "learning_rate": 3.5890297209616507e-05, |
| "loss": 0.5242, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.894845360824742, |
| "grad_norm": 0.2904407669489142, |
| "learning_rate": 3.581375942950895e-05, |
| "loss": 0.5163, |
| "step": 1053 |
| }, |
| { |
| "epoch": 2.897594501718213, |
| "grad_norm": 0.23987566400445187, |
| "learning_rate": 3.5737237142968755e-05, |
| "loss": 0.5321, |
| "step": 1054 |
| }, |
| { |
| "epoch": 2.9003436426116838, |
| "grad_norm": 0.203928269390139, |
| "learning_rate": 3.56607306332102e-05, |
| "loss": 0.5203, |
| "step": 1055 |
| }, |
| { |
| "epoch": 2.9030927835051545, |
| "grad_norm": 0.26314705579076864, |
| "learning_rate": 3.558424018338922e-05, |
| "loss": 0.5267, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.9058419243986253, |
| "grad_norm": 0.22978092693646193, |
| "learning_rate": 3.5507766076602264e-05, |
| "loss": 0.5211, |
| "step": 1057 |
| }, |
| { |
| "epoch": 2.908591065292096, |
| "grad_norm": 0.1831775606016915, |
| "learning_rate": 3.5431308595885316e-05, |
| "loss": 0.5186, |
| "step": 1058 |
| }, |
| { |
| "epoch": 2.911340206185567, |
| "grad_norm": 0.2557072250330763, |
| "learning_rate": 3.535486802421284e-05, |
| "loss": 0.5204, |
| "step": 1059 |
| }, |
| { |
| "epoch": 2.9140893470790377, |
| "grad_norm": 0.2285012181274199, |
| "learning_rate": 3.5278444644496695e-05, |
| "loss": 0.5273, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.9168384879725084, |
| "grad_norm": 0.18355449593774995, |
| "learning_rate": 3.520203873958514e-05, |
| "loss": 0.531, |
| "step": 1061 |
| }, |
| { |
| "epoch": 2.9195876288659792, |
| "grad_norm": 0.21328292549083275, |
| "learning_rate": 3.512565059226172e-05, |
| "loss": 0.5229, |
| "step": 1062 |
| }, |
| { |
| "epoch": 2.9223367697594504, |
| "grad_norm": 0.21044538382380515, |
| "learning_rate": 3.5049280485244286e-05, |
| "loss": 0.5231, |
| "step": 1063 |
| }, |
| { |
| "epoch": 2.9250859106529212, |
| "grad_norm": 0.17119562274819797, |
| "learning_rate": 3.4972928701183925e-05, |
| "loss": 0.522, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.927835051546392, |
| "grad_norm": 0.22581900383360864, |
| "learning_rate": 3.489659552266388e-05, |
| "loss": 0.5246, |
| "step": 1065 |
| }, |
| { |
| "epoch": 2.930584192439863, |
| "grad_norm": 0.301379881236332, |
| "learning_rate": 3.482028123219855e-05, |
| "loss": 0.529, |
| "step": 1066 |
| }, |
| { |
| "epoch": 2.9333333333333336, |
| "grad_norm": 0.295319936733804, |
| "learning_rate": 3.4743986112232434e-05, |
| "loss": 0.5289, |
| "step": 1067 |
| }, |
| { |
| "epoch": 2.9360824742268044, |
| "grad_norm": 0.2388909601572767, |
| "learning_rate": 3.466771044513907e-05, |
| "loss": 0.5252, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.938831615120275, |
| "grad_norm": 0.2678249461923104, |
| "learning_rate": 3.459145451322002e-05, |
| "loss": 0.5262, |
| "step": 1069 |
| }, |
| { |
| "epoch": 2.941580756013746, |
| "grad_norm": 0.22322825864211238, |
| "learning_rate": 3.4515218598703765e-05, |
| "loss": 0.5297, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.9443298969072167, |
| "grad_norm": 0.179417097733591, |
| "learning_rate": 3.443900298374475e-05, |
| "loss": 0.5249, |
| "step": 1071 |
| }, |
| { |
| "epoch": 2.9470790378006875, |
| "grad_norm": 0.2551012950816274, |
| "learning_rate": 3.436280795042225e-05, |
| "loss": 0.5212, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.9498281786941583, |
| "grad_norm": 0.25848561223138594, |
| "learning_rate": 3.428663378073937e-05, |
| "loss": 0.528, |
| "step": 1073 |
| }, |
| { |
| "epoch": 2.952577319587629, |
| "grad_norm": 0.17137059703558605, |
| "learning_rate": 3.421048075662203e-05, |
| "loss": 0.524, |
| "step": 1074 |
| }, |
| { |
| "epoch": 2.9553264604811, |
| "grad_norm": 0.21809992041709833, |
| "learning_rate": 3.413434915991784e-05, |
| "loss": 0.5225, |
| "step": 1075 |
| }, |
| { |
| "epoch": 2.9580756013745706, |
| "grad_norm": 0.19281232602243703, |
| "learning_rate": 3.4058239272395156e-05, |
| "loss": 0.5198, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.9608247422680414, |
| "grad_norm": 0.18758042777019787, |
| "learning_rate": 3.398215137574194e-05, |
| "loss": 0.5145, |
| "step": 1077 |
| }, |
| { |
| "epoch": 2.963573883161512, |
| "grad_norm": 0.24212160366698007, |
| "learning_rate": 3.390608575156481e-05, |
| "loss": 0.529, |
| "step": 1078 |
| }, |
| { |
| "epoch": 2.966323024054983, |
| "grad_norm": 0.19965723896338422, |
| "learning_rate": 3.3830042681387904e-05, |
| "loss": 0.5345, |
| "step": 1079 |
| }, |
| { |
| "epoch": 2.9690721649484537, |
| "grad_norm": 0.16936178177221572, |
| "learning_rate": 3.375402244665194e-05, |
| "loss": 0.5208, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.9718213058419245, |
| "grad_norm": 0.20843282321759193, |
| "learning_rate": 3.367802532871306e-05, |
| "loss": 0.5254, |
| "step": 1081 |
| }, |
| { |
| "epoch": 2.9745704467353953, |
| "grad_norm": 0.1837127741621114, |
| "learning_rate": 3.360205160884191e-05, |
| "loss": 0.5313, |
| "step": 1082 |
| }, |
| { |
| "epoch": 2.977319587628866, |
| "grad_norm": 0.199460376426822, |
| "learning_rate": 3.352610156822248e-05, |
| "loss": 0.5292, |
| "step": 1083 |
| }, |
| { |
| "epoch": 2.980068728522337, |
| "grad_norm": 0.24820207311148457, |
| "learning_rate": 3.345017548795116e-05, |
| "loss": 0.5257, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.9828178694158076, |
| "grad_norm": 0.24493105512545987, |
| "learning_rate": 3.337427364903565e-05, |
| "loss": 0.5224, |
| "step": 1085 |
| }, |
| { |
| "epoch": 2.9855670103092784, |
| "grad_norm": 0.23786424160796374, |
| "learning_rate": 3.329839633239395e-05, |
| "loss": 0.5304, |
| "step": 1086 |
| }, |
| { |
| "epoch": 2.988316151202749, |
| "grad_norm": 0.27502699218694276, |
| "learning_rate": 3.322254381885325e-05, |
| "loss": 0.5281, |
| "step": 1087 |
| }, |
| { |
| "epoch": 2.99106529209622, |
| "grad_norm": 0.27049323273784326, |
| "learning_rate": 3.314671638914902e-05, |
| "loss": 0.5301, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.9938144329896907, |
| "grad_norm": 0.19737402579388286, |
| "learning_rate": 3.307091432392382e-05, |
| "loss": 0.5288, |
| "step": 1089 |
| }, |
| { |
| "epoch": 2.9965635738831615, |
| "grad_norm": 0.26232022578272246, |
| "learning_rate": 3.299513790372634e-05, |
| "loss": 0.521, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.9993127147766323, |
| "grad_norm": 0.23892201559966744, |
| "learning_rate": 3.2919387409010424e-05, |
| "loss": 0.6184, |
| "step": 1091 |
| }, |
| { |
| "epoch": 3.002061855670103, |
| "grad_norm": 0.3594917888207816, |
| "learning_rate": 3.284366312013388e-05, |
| "loss": 0.5355, |
| "step": 1092 |
| }, |
| { |
| "epoch": 3.004810996563574, |
| "grad_norm": 0.25599834111682307, |
| "learning_rate": 3.27679653173576e-05, |
| "loss": 0.4746, |
| "step": 1093 |
| }, |
| { |
| "epoch": 3.0075601374570446, |
| "grad_norm": 0.2254148080474178, |
| "learning_rate": 3.2692294280844374e-05, |
| "loss": 0.4669, |
| "step": 1094 |
| }, |
| { |
| "epoch": 3.0103092783505154, |
| "grad_norm": 0.2506660939835137, |
| "learning_rate": 3.261665029065801e-05, |
| "loss": 0.4682, |
| "step": 1095 |
| }, |
| { |
| "epoch": 3.013058419243986, |
| "grad_norm": 0.267195831442756, |
| "learning_rate": 3.254103362676217e-05, |
| "loss": 0.4715, |
| "step": 1096 |
| }, |
| { |
| "epoch": 3.015807560137457, |
| "grad_norm": 0.24179014490822495, |
| "learning_rate": 3.246544456901934e-05, |
| "loss": 0.4778, |
| "step": 1097 |
| }, |
| { |
| "epoch": 3.0185567010309278, |
| "grad_norm": 0.2329847121267379, |
| "learning_rate": 3.238988339718992e-05, |
| "loss": 0.4747, |
| "step": 1098 |
| }, |
| { |
| "epoch": 3.0213058419243985, |
| "grad_norm": 0.2043691678048163, |
| "learning_rate": 3.231435039093104e-05, |
| "loss": 0.4786, |
| "step": 1099 |
| }, |
| { |
| "epoch": 3.0240549828178693, |
| "grad_norm": 0.22745344698265332, |
| "learning_rate": 3.223884582979561e-05, |
| "loss": 0.4657, |
| "step": 1100 |
| }, |
| { |
| "epoch": 3.02680412371134, |
| "grad_norm": 0.22934476133037818, |
| "learning_rate": 3.216336999323127e-05, |
| "loss": 0.4889, |
| "step": 1101 |
| }, |
| { |
| "epoch": 3.029553264604811, |
| "grad_norm": 0.21801654584396532, |
| "learning_rate": 3.208792316057931e-05, |
| "loss": 0.4784, |
| "step": 1102 |
| }, |
| { |
| "epoch": 3.0323024054982817, |
| "grad_norm": 0.2431718990003574, |
| "learning_rate": 3.201250561107374e-05, |
| "loss": 0.4797, |
| "step": 1103 |
| }, |
| { |
| "epoch": 3.0350515463917525, |
| "grad_norm": 0.26915847727829645, |
| "learning_rate": 3.1937117623840114e-05, |
| "loss": 0.4826, |
| "step": 1104 |
| }, |
| { |
| "epoch": 3.0378006872852232, |
| "grad_norm": 0.2351108919147438, |
| "learning_rate": 3.1861759477894656e-05, |
| "loss": 0.4772, |
| "step": 1105 |
| }, |
| { |
| "epoch": 3.040549828178694, |
| "grad_norm": 0.2904532318483722, |
| "learning_rate": 3.178643145214307e-05, |
| "loss": 0.4803, |
| "step": 1106 |
| }, |
| { |
| "epoch": 3.043298969072165, |
| "grad_norm": 0.19635036055007402, |
| "learning_rate": 3.171113382537962e-05, |
| "loss": 0.4701, |
| "step": 1107 |
| }, |
| { |
| "epoch": 3.0460481099656356, |
| "grad_norm": 0.22223894507419642, |
| "learning_rate": 3.1635866876286064e-05, |
| "loss": 0.4741, |
| "step": 1108 |
| }, |
| { |
| "epoch": 3.0487972508591064, |
| "grad_norm": 0.2152706605758009, |
| "learning_rate": 3.156063088343061e-05, |
| "loss": 0.4746, |
| "step": 1109 |
| }, |
| { |
| "epoch": 3.051546391752577, |
| "grad_norm": 0.17409361205810703, |
| "learning_rate": 3.1485426125266896e-05, |
| "loss": 0.478, |
| "step": 1110 |
| }, |
| { |
| "epoch": 3.054295532646048, |
| "grad_norm": 0.21539232573809372, |
| "learning_rate": 3.141025288013296e-05, |
| "loss": 0.4706, |
| "step": 1111 |
| }, |
| { |
| "epoch": 3.0570446735395187, |
| "grad_norm": 0.19308259974547204, |
| "learning_rate": 3.1335111426250216e-05, |
| "loss": 0.4777, |
| "step": 1112 |
| }, |
| { |
| "epoch": 3.0597938144329895, |
| "grad_norm": 0.2276406440928922, |
| "learning_rate": 3.1260002041722405e-05, |
| "loss": 0.4823, |
| "step": 1113 |
| }, |
| { |
| "epoch": 3.0625429553264603, |
| "grad_norm": 0.1977395826548686, |
| "learning_rate": 3.118492500453456e-05, |
| "loss": 0.4781, |
| "step": 1114 |
| }, |
| { |
| "epoch": 3.0652920962199315, |
| "grad_norm": 0.17401988491729847, |
| "learning_rate": 3.1109880592552046e-05, |
| "loss": 0.474, |
| "step": 1115 |
| }, |
| { |
| "epoch": 3.0680412371134023, |
| "grad_norm": 0.1912931420538338, |
| "learning_rate": 3.103486908351943e-05, |
| "loss": 0.4763, |
| "step": 1116 |
| }, |
| { |
| "epoch": 3.070790378006873, |
| "grad_norm": 0.16713338636093636, |
| "learning_rate": 3.0959890755059515e-05, |
| "loss": 0.4796, |
| "step": 1117 |
| }, |
| { |
| "epoch": 3.073539518900344, |
| "grad_norm": 0.20702313839329717, |
| "learning_rate": 3.088494588467232e-05, |
| "loss": 0.4778, |
| "step": 1118 |
| }, |
| { |
| "epoch": 3.0762886597938146, |
| "grad_norm": 0.1616831225744788, |
| "learning_rate": 3.081003474973401e-05, |
| "loss": 0.4756, |
| "step": 1119 |
| }, |
| { |
| "epoch": 3.0790378006872854, |
| "grad_norm": 0.24558159371403487, |
| "learning_rate": 3.0735157627495925e-05, |
| "loss": 0.4789, |
| "step": 1120 |
| }, |
| { |
| "epoch": 3.081786941580756, |
| "grad_norm": 0.17163392192032517, |
| "learning_rate": 3.0660314795083475e-05, |
| "loss": 0.4768, |
| "step": 1121 |
| }, |
| { |
| "epoch": 3.084536082474227, |
| "grad_norm": 0.21495669147653343, |
| "learning_rate": 3.0585506529495186e-05, |
| "loss": 0.4757, |
| "step": 1122 |
| }, |
| { |
| "epoch": 3.0872852233676977, |
| "grad_norm": 0.15045704205058852, |
| "learning_rate": 3.0510733107601664e-05, |
| "loss": 0.473, |
| "step": 1123 |
| }, |
| { |
| "epoch": 3.0900343642611685, |
| "grad_norm": 0.2020087192391845, |
| "learning_rate": 3.0435994806144524e-05, |
| "loss": 0.4735, |
| "step": 1124 |
| }, |
| { |
| "epoch": 3.0927835051546393, |
| "grad_norm": 0.15940495290357568, |
| "learning_rate": 3.0361291901735428e-05, |
| "loss": 0.4709, |
| "step": 1125 |
| }, |
| { |
| "epoch": 3.09553264604811, |
| "grad_norm": 0.18904118815363216, |
| "learning_rate": 3.028662467085501e-05, |
| "loss": 0.4838, |
| "step": 1126 |
| }, |
| { |
| "epoch": 3.098281786941581, |
| "grad_norm": 0.17129808608598124, |
| "learning_rate": 3.0211993389851893e-05, |
| "loss": 0.4768, |
| "step": 1127 |
| }, |
| { |
| "epoch": 3.1010309278350516, |
| "grad_norm": 0.18416866902814874, |
| "learning_rate": 3.013739833494162e-05, |
| "loss": 0.4718, |
| "step": 1128 |
| }, |
| { |
| "epoch": 3.1037800687285224, |
| "grad_norm": 0.1705293761247234, |
| "learning_rate": 3.00628397822057e-05, |
| "loss": 0.4791, |
| "step": 1129 |
| }, |
| { |
| "epoch": 3.106529209621993, |
| "grad_norm": 0.20209221200923272, |
| "learning_rate": 2.9988318007590494e-05, |
| "loss": 0.4823, |
| "step": 1130 |
| }, |
| { |
| "epoch": 3.109278350515464, |
| "grad_norm": 0.2118645657673755, |
| "learning_rate": 2.991383328690627e-05, |
| "loss": 0.4782, |
| "step": 1131 |
| }, |
| { |
| "epoch": 3.1120274914089348, |
| "grad_norm": 0.17556823451574127, |
| "learning_rate": 2.9839385895826175e-05, |
| "loss": 0.4783, |
| "step": 1132 |
| }, |
| { |
| "epoch": 3.1147766323024055, |
| "grad_norm": 0.19795461740770434, |
| "learning_rate": 2.9764976109885166e-05, |
| "loss": 0.4818, |
| "step": 1133 |
| }, |
| { |
| "epoch": 3.1175257731958763, |
| "grad_norm": 0.16701933368048089, |
| "learning_rate": 2.9690604204479026e-05, |
| "loss": 0.476, |
| "step": 1134 |
| }, |
| { |
| "epoch": 3.120274914089347, |
| "grad_norm": 0.20618027859459478, |
| "learning_rate": 2.9616270454863368e-05, |
| "loss": 0.4798, |
| "step": 1135 |
| }, |
| { |
| "epoch": 3.123024054982818, |
| "grad_norm": 0.1431586178012281, |
| "learning_rate": 2.9541975136152548e-05, |
| "loss": 0.478, |
| "step": 1136 |
| }, |
| { |
| "epoch": 3.1257731958762887, |
| "grad_norm": 0.17793879100992058, |
| "learning_rate": 2.946771852331873e-05, |
| "loss": 0.4756, |
| "step": 1137 |
| }, |
| { |
| "epoch": 3.1285223367697594, |
| "grad_norm": 0.16681103898774272, |
| "learning_rate": 2.939350089119077e-05, |
| "loss": 0.4715, |
| "step": 1138 |
| }, |
| { |
| "epoch": 3.13127147766323, |
| "grad_norm": 0.17261193531928995, |
| "learning_rate": 2.9319322514453286e-05, |
| "loss": 0.4759, |
| "step": 1139 |
| }, |
| { |
| "epoch": 3.134020618556701, |
| "grad_norm": 0.1582640372762462, |
| "learning_rate": 2.9245183667645626e-05, |
| "loss": 0.4749, |
| "step": 1140 |
| }, |
| { |
| "epoch": 3.136769759450172, |
| "grad_norm": 0.17322436049835935, |
| "learning_rate": 2.9171084625160797e-05, |
| "loss": 0.4703, |
| "step": 1141 |
| }, |
| { |
| "epoch": 3.1395189003436426, |
| "grad_norm": 0.17367955729692558, |
| "learning_rate": 2.909702566124452e-05, |
| "loss": 0.4792, |
| "step": 1142 |
| }, |
| { |
| "epoch": 3.1422680412371133, |
| "grad_norm": 0.16722751357988883, |
| "learning_rate": 2.9023007049994157e-05, |
| "loss": 0.4717, |
| "step": 1143 |
| }, |
| { |
| "epoch": 3.145017182130584, |
| "grad_norm": 0.1728042548273763, |
| "learning_rate": 2.8949029065357753e-05, |
| "loss": 0.4818, |
| "step": 1144 |
| }, |
| { |
| "epoch": 3.147766323024055, |
| "grad_norm": 0.18069700472194267, |
| "learning_rate": 2.8875091981132963e-05, |
| "loss": 0.476, |
| "step": 1145 |
| }, |
| { |
| "epoch": 3.1505154639175257, |
| "grad_norm": 0.15820067646687608, |
| "learning_rate": 2.8801196070966054e-05, |
| "loss": 0.4666, |
| "step": 1146 |
| }, |
| { |
| "epoch": 3.1532646048109965, |
| "grad_norm": 0.15258574864023677, |
| "learning_rate": 2.8727341608350952e-05, |
| "loss": 0.4759, |
| "step": 1147 |
| }, |
| { |
| "epoch": 3.1560137457044672, |
| "grad_norm": 0.1960418259666068, |
| "learning_rate": 2.8653528866628132e-05, |
| "loss": 0.4769, |
| "step": 1148 |
| }, |
| { |
| "epoch": 3.158762886597938, |
| "grad_norm": 0.15371731621163243, |
| "learning_rate": 2.8579758118983716e-05, |
| "loss": 0.4842, |
| "step": 1149 |
| }, |
| { |
| "epoch": 3.161512027491409, |
| "grad_norm": 0.19068409005542658, |
| "learning_rate": 2.8506029638448347e-05, |
| "loss": 0.4776, |
| "step": 1150 |
| }, |
| { |
| "epoch": 3.1642611683848796, |
| "grad_norm": 0.17886606119360748, |
| "learning_rate": 2.843234369789625e-05, |
| "loss": 0.4744, |
| "step": 1151 |
| }, |
| { |
| "epoch": 3.1670103092783504, |
| "grad_norm": 0.19710185432909114, |
| "learning_rate": 2.8358700570044232e-05, |
| "loss": 0.4813, |
| "step": 1152 |
| }, |
| { |
| "epoch": 3.169759450171821, |
| "grad_norm": 0.18041901946396155, |
| "learning_rate": 2.8285100527450623e-05, |
| "loss": 0.4833, |
| "step": 1153 |
| }, |
| { |
| "epoch": 3.172508591065292, |
| "grad_norm": 0.16581599590174512, |
| "learning_rate": 2.8211543842514288e-05, |
| "loss": 0.4739, |
| "step": 1154 |
| }, |
| { |
| "epoch": 3.1752577319587627, |
| "grad_norm": 0.15872506391538035, |
| "learning_rate": 2.8138030787473635e-05, |
| "loss": 0.4854, |
| "step": 1155 |
| }, |
| { |
| "epoch": 3.1780068728522335, |
| "grad_norm": 0.18595712745784665, |
| "learning_rate": 2.806456163440559e-05, |
| "loss": 0.4791, |
| "step": 1156 |
| }, |
| { |
| "epoch": 3.1807560137457043, |
| "grad_norm": 0.1585604017012495, |
| "learning_rate": 2.7991136655224602e-05, |
| "loss": 0.4794, |
| "step": 1157 |
| }, |
| { |
| "epoch": 3.183505154639175, |
| "grad_norm": 0.16724675223224328, |
| "learning_rate": 2.791775612168161e-05, |
| "loss": 0.484, |
| "step": 1158 |
| }, |
| { |
| "epoch": 3.1862542955326463, |
| "grad_norm": 0.1598241225286718, |
| "learning_rate": 2.784442030536309e-05, |
| "loss": 0.4816, |
| "step": 1159 |
| }, |
| { |
| "epoch": 3.189003436426117, |
| "grad_norm": 0.16901071883801533, |
| "learning_rate": 2.7771129477689972e-05, |
| "loss": 0.473, |
| "step": 1160 |
| }, |
| { |
| "epoch": 3.191752577319588, |
| "grad_norm": 0.13853638340372265, |
| "learning_rate": 2.7697883909916732e-05, |
| "loss": 0.4736, |
| "step": 1161 |
| }, |
| { |
| "epoch": 3.1945017182130586, |
| "grad_norm": 0.16091561527631862, |
| "learning_rate": 2.7624683873130287e-05, |
| "loss": 0.4805, |
| "step": 1162 |
| }, |
| { |
| "epoch": 3.1972508591065294, |
| "grad_norm": 0.14598087437104398, |
| "learning_rate": 2.755152963824905e-05, |
| "loss": 0.4835, |
| "step": 1163 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.175532838361529, |
| "learning_rate": 2.7478421476021968e-05, |
| "loss": 0.4743, |
| "step": 1164 |
| }, |
| { |
| "epoch": 3.202749140893471, |
| "grad_norm": 0.1337685529203635, |
| "learning_rate": 2.740535965702739e-05, |
| "loss": 0.4734, |
| "step": 1165 |
| }, |
| { |
| "epoch": 3.2054982817869417, |
| "grad_norm": 0.1735634483137809, |
| "learning_rate": 2.7332344451672214e-05, |
| "loss": 0.4709, |
| "step": 1166 |
| }, |
| { |
| "epoch": 3.2082474226804125, |
| "grad_norm": 0.15693421713978023, |
| "learning_rate": 2.7259376130190783e-05, |
| "loss": 0.4741, |
| "step": 1167 |
| }, |
| { |
| "epoch": 3.2109965635738833, |
| "grad_norm": 0.1330436144853641, |
| "learning_rate": 2.718645496264392e-05, |
| "loss": 0.4739, |
| "step": 1168 |
| }, |
| { |
| "epoch": 3.213745704467354, |
| "grad_norm": 0.17309959852487516, |
| "learning_rate": 2.711358121891795e-05, |
| "loss": 0.4705, |
| "step": 1169 |
| }, |
| { |
| "epoch": 3.216494845360825, |
| "grad_norm": 0.1642984807751716, |
| "learning_rate": 2.704075516872366e-05, |
| "loss": 0.4726, |
| "step": 1170 |
| }, |
| { |
| "epoch": 3.2192439862542956, |
| "grad_norm": 0.14722892838781462, |
| "learning_rate": 2.6967977081595304e-05, |
| "loss": 0.4775, |
| "step": 1171 |
| }, |
| { |
| "epoch": 3.2219931271477664, |
| "grad_norm": 0.16887717979369643, |
| "learning_rate": 2.6895247226889672e-05, |
| "loss": 0.4738, |
| "step": 1172 |
| }, |
| { |
| "epoch": 3.224742268041237, |
| "grad_norm": 0.17049853048281954, |
| "learning_rate": 2.682256587378498e-05, |
| "loss": 0.4719, |
| "step": 1173 |
| }, |
| { |
| "epoch": 3.227491408934708, |
| "grad_norm": 0.15138446686573354, |
| "learning_rate": 2.6749933291279994e-05, |
| "loss": 0.4856, |
| "step": 1174 |
| }, |
| { |
| "epoch": 3.2302405498281788, |
| "grad_norm": 0.16148723512701157, |
| "learning_rate": 2.6677349748192934e-05, |
| "loss": 0.4762, |
| "step": 1175 |
| }, |
| { |
| "epoch": 3.2329896907216495, |
| "grad_norm": 0.14912677660800724, |
| "learning_rate": 2.6604815513160556e-05, |
| "loss": 0.4653, |
| "step": 1176 |
| }, |
| { |
| "epoch": 3.2357388316151203, |
| "grad_norm": 0.13862596201937702, |
| "learning_rate": 2.6532330854637086e-05, |
| "loss": 0.4743, |
| "step": 1177 |
| }, |
| { |
| "epoch": 3.238487972508591, |
| "grad_norm": 0.15927565474045197, |
| "learning_rate": 2.645989604089331e-05, |
| "loss": 0.4746, |
| "step": 1178 |
| }, |
| { |
| "epoch": 3.241237113402062, |
| "grad_norm": 0.17434623830674337, |
| "learning_rate": 2.638751134001549e-05, |
| "loss": 0.48, |
| "step": 1179 |
| }, |
| { |
| "epoch": 3.2439862542955327, |
| "grad_norm": 0.1447492759368682, |
| "learning_rate": 2.6315177019904423e-05, |
| "loss": 0.4781, |
| "step": 1180 |
| }, |
| { |
| "epoch": 3.2467353951890034, |
| "grad_norm": 0.16927963210867344, |
| "learning_rate": 2.624289334827448e-05, |
| "loss": 0.4704, |
| "step": 1181 |
| }, |
| { |
| "epoch": 3.2494845360824742, |
| "grad_norm": 0.1665258090219738, |
| "learning_rate": 2.6170660592652545e-05, |
| "loss": 0.4781, |
| "step": 1182 |
| }, |
| { |
| "epoch": 3.252233676975945, |
| "grad_norm": 0.15855482153444905, |
| "learning_rate": 2.609847902037706e-05, |
| "loss": 0.4743, |
| "step": 1183 |
| }, |
| { |
| "epoch": 3.254982817869416, |
| "grad_norm": 0.17971555626146168, |
| "learning_rate": 2.6026348898597057e-05, |
| "loss": 0.4803, |
| "step": 1184 |
| }, |
| { |
| "epoch": 3.2577319587628866, |
| "grad_norm": 0.2816535496494584, |
| "learning_rate": 2.5954270494271116e-05, |
| "loss": 0.4761, |
| "step": 1185 |
| }, |
| { |
| "epoch": 3.2604810996563574, |
| "grad_norm": 0.23781070038647936, |
| "learning_rate": 2.588224407416645e-05, |
| "loss": 0.4729, |
| "step": 1186 |
| }, |
| { |
| "epoch": 3.263230240549828, |
| "grad_norm": 0.23075459880557803, |
| "learning_rate": 2.581026990485781e-05, |
| "loss": 0.4764, |
| "step": 1187 |
| }, |
| { |
| "epoch": 3.265979381443299, |
| "grad_norm": 0.14086275865969622, |
| "learning_rate": 2.5738348252726607e-05, |
| "loss": 0.4809, |
| "step": 1188 |
| }, |
| { |
| "epoch": 3.2687285223367697, |
| "grad_norm": 0.18978376603435007, |
| "learning_rate": 2.566647938395989e-05, |
| "loss": 0.4817, |
| "step": 1189 |
| }, |
| { |
| "epoch": 3.2714776632302405, |
| "grad_norm": 0.16648197235336828, |
| "learning_rate": 2.559466356454933e-05, |
| "loss": 0.4703, |
| "step": 1190 |
| }, |
| { |
| "epoch": 3.2742268041237113, |
| "grad_norm": 0.17731919695127468, |
| "learning_rate": 2.5522901060290272e-05, |
| "loss": 0.4757, |
| "step": 1191 |
| }, |
| { |
| "epoch": 3.276975945017182, |
| "grad_norm": 0.22533634711616554, |
| "learning_rate": 2.545119213678072e-05, |
| "loss": 0.4779, |
| "step": 1192 |
| }, |
| { |
| "epoch": 3.279725085910653, |
| "grad_norm": 0.29341047419363175, |
| "learning_rate": 2.53795370594204e-05, |
| "loss": 0.4809, |
| "step": 1193 |
| }, |
| { |
| "epoch": 3.2824742268041236, |
| "grad_norm": 0.17396435349550476, |
| "learning_rate": 2.530793609340974e-05, |
| "loss": 0.4796, |
| "step": 1194 |
| }, |
| { |
| "epoch": 3.2852233676975944, |
| "grad_norm": 0.20383849713641408, |
| "learning_rate": 2.523638950374886e-05, |
| "loss": 0.4721, |
| "step": 1195 |
| }, |
| { |
| "epoch": 3.287972508591065, |
| "grad_norm": 0.16601271855097022, |
| "learning_rate": 2.5164897555236686e-05, |
| "loss": 0.4804, |
| "step": 1196 |
| }, |
| { |
| "epoch": 3.290721649484536, |
| "grad_norm": 0.30470051683065796, |
| "learning_rate": 2.509346051246988e-05, |
| "loss": 0.4792, |
| "step": 1197 |
| }, |
| { |
| "epoch": 3.2934707903780067, |
| "grad_norm": 0.2812298427453436, |
| "learning_rate": 2.5022078639841918e-05, |
| "loss": 0.4782, |
| "step": 1198 |
| }, |
| { |
| "epoch": 3.2962199312714775, |
| "grad_norm": 0.1597994542210946, |
| "learning_rate": 2.495075220154206e-05, |
| "loss": 0.4823, |
| "step": 1199 |
| }, |
| { |
| "epoch": 3.2989690721649483, |
| "grad_norm": 0.3095999568945906, |
| "learning_rate": 2.4879481461554405e-05, |
| "loss": 0.4762, |
| "step": 1200 |
| }, |
| { |
| "epoch": 3.301718213058419, |
| "grad_norm": 0.24822013052998354, |
| "learning_rate": 2.4808266683656932e-05, |
| "loss": 0.4805, |
| "step": 1201 |
| }, |
| { |
| "epoch": 3.30446735395189, |
| "grad_norm": 0.19304267454670382, |
| "learning_rate": 2.473710813142049e-05, |
| "loss": 0.4709, |
| "step": 1202 |
| }, |
| { |
| "epoch": 3.3072164948453606, |
| "grad_norm": 0.22511180187157134, |
| "learning_rate": 2.4666006068207793e-05, |
| "loss": 0.4791, |
| "step": 1203 |
| }, |
| { |
| "epoch": 3.3099656357388314, |
| "grad_norm": 0.17597702188477718, |
| "learning_rate": 2.4594960757172555e-05, |
| "loss": 0.4825, |
| "step": 1204 |
| }, |
| { |
| "epoch": 3.312714776632302, |
| "grad_norm": 0.18231300214622423, |
| "learning_rate": 2.4523972461258386e-05, |
| "loss": 0.4794, |
| "step": 1205 |
| }, |
| { |
| "epoch": 3.315463917525773, |
| "grad_norm": 0.25489199823717157, |
| "learning_rate": 2.445304144319792e-05, |
| "loss": 0.4795, |
| "step": 1206 |
| }, |
| { |
| "epoch": 3.3182130584192437, |
| "grad_norm": 0.15105215480831594, |
| "learning_rate": 2.4382167965511774e-05, |
| "loss": 0.4902, |
| "step": 1207 |
| }, |
| { |
| "epoch": 3.320962199312715, |
| "grad_norm": 0.2799009342897516, |
| "learning_rate": 2.431135229050763e-05, |
| "loss": 0.4818, |
| "step": 1208 |
| }, |
| { |
| "epoch": 3.3237113402061857, |
| "grad_norm": 0.21951158609310864, |
| "learning_rate": 2.4240594680279204e-05, |
| "loss": 0.479, |
| "step": 1209 |
| }, |
| { |
| "epoch": 3.3264604810996565, |
| "grad_norm": 0.19395691144843719, |
| "learning_rate": 2.416989539670536e-05, |
| "loss": 0.479, |
| "step": 1210 |
| }, |
| { |
| "epoch": 3.3292096219931273, |
| "grad_norm": 0.17094551246350398, |
| "learning_rate": 2.409925470144903e-05, |
| "loss": 0.4808, |
| "step": 1211 |
| }, |
| { |
| "epoch": 3.331958762886598, |
| "grad_norm": 0.23964152078624051, |
| "learning_rate": 2.402867285595635e-05, |
| "loss": 0.4764, |
| "step": 1212 |
| }, |
| { |
| "epoch": 3.334707903780069, |
| "grad_norm": 0.2782973993431327, |
| "learning_rate": 2.3958150121455638e-05, |
| "loss": 0.4811, |
| "step": 1213 |
| }, |
| { |
| "epoch": 3.3374570446735397, |
| "grad_norm": 0.1640795235166955, |
| "learning_rate": 2.3887686758956425e-05, |
| "loss": 0.4793, |
| "step": 1214 |
| }, |
| { |
| "epoch": 3.3402061855670104, |
| "grad_norm": 0.3070072205373487, |
| "learning_rate": 2.3817283029248536e-05, |
| "loss": 0.475, |
| "step": 1215 |
| }, |
| { |
| "epoch": 3.342955326460481, |
| "grad_norm": 0.23275102863915093, |
| "learning_rate": 2.3746939192901052e-05, |
| "loss": 0.479, |
| "step": 1216 |
| }, |
| { |
| "epoch": 3.345704467353952, |
| "grad_norm": 0.23028817088895479, |
| "learning_rate": 2.3676655510261402e-05, |
| "loss": 0.4735, |
| "step": 1217 |
| }, |
| { |
| "epoch": 3.3484536082474228, |
| "grad_norm": 0.320766094957754, |
| "learning_rate": 2.3606432241454398e-05, |
| "loss": 0.4877, |
| "step": 1218 |
| }, |
| { |
| "epoch": 3.3512027491408936, |
| "grad_norm": 0.2518366256607952, |
| "learning_rate": 2.353626964638122e-05, |
| "loss": 0.4733, |
| "step": 1219 |
| }, |
| { |
| "epoch": 3.3539518900343643, |
| "grad_norm": 0.1904395352124866, |
| "learning_rate": 2.34661679847185e-05, |
| "loss": 0.4895, |
| "step": 1220 |
| }, |
| { |
| "epoch": 3.356701030927835, |
| "grad_norm": 0.3279385370633572, |
| "learning_rate": 2.3396127515917392e-05, |
| "loss": 0.4777, |
| "step": 1221 |
| }, |
| { |
| "epoch": 3.359450171821306, |
| "grad_norm": 0.20836286167731985, |
| "learning_rate": 2.3326148499202518e-05, |
| "loss": 0.4765, |
| "step": 1222 |
| }, |
| { |
| "epoch": 3.3621993127147767, |
| "grad_norm": 0.22895782643501986, |
| "learning_rate": 2.325623119357107e-05, |
| "loss": 0.4883, |
| "step": 1223 |
| }, |
| { |
| "epoch": 3.3649484536082475, |
| "grad_norm": 0.29237657945287854, |
| "learning_rate": 2.3186375857791896e-05, |
| "loss": 0.483, |
| "step": 1224 |
| }, |
| { |
| "epoch": 3.3676975945017182, |
| "grad_norm": 0.18465319808299466, |
| "learning_rate": 2.3116582750404427e-05, |
| "loss": 0.472, |
| "step": 1225 |
| }, |
| { |
| "epoch": 3.370446735395189, |
| "grad_norm": 0.20550527495867124, |
| "learning_rate": 2.304685212971781e-05, |
| "loss": 0.477, |
| "step": 1226 |
| }, |
| { |
| "epoch": 3.37319587628866, |
| "grad_norm": 0.2692822007314835, |
| "learning_rate": 2.2977184253809913e-05, |
| "loss": 0.4763, |
| "step": 1227 |
| }, |
| { |
| "epoch": 3.3759450171821306, |
| "grad_norm": 0.1708056032464783, |
| "learning_rate": 2.2907579380526387e-05, |
| "loss": 0.48, |
| "step": 1228 |
| }, |
| { |
| "epoch": 3.3786941580756014, |
| "grad_norm": 0.368849523723143, |
| "learning_rate": 2.2838037767479713e-05, |
| "loss": 0.4797, |
| "step": 1229 |
| }, |
| { |
| "epoch": 3.381443298969072, |
| "grad_norm": 0.21646292002270615, |
| "learning_rate": 2.2768559672048218e-05, |
| "loss": 0.474, |
| "step": 1230 |
| }, |
| { |
| "epoch": 3.384192439862543, |
| "grad_norm": 0.26373109002232037, |
| "learning_rate": 2.2699145351375186e-05, |
| "loss": 0.4803, |
| "step": 1231 |
| }, |
| { |
| "epoch": 3.3869415807560137, |
| "grad_norm": 0.2843624854578763, |
| "learning_rate": 2.262979506236784e-05, |
| "loss": 0.4826, |
| "step": 1232 |
| }, |
| { |
| "epoch": 3.3896907216494845, |
| "grad_norm": 0.19833041217898748, |
| "learning_rate": 2.256050906169641e-05, |
| "loss": 0.491, |
| "step": 1233 |
| }, |
| { |
| "epoch": 3.3924398625429553, |
| "grad_norm": 0.2800244352558394, |
| "learning_rate": 2.2491287605793204e-05, |
| "loss": 0.4707, |
| "step": 1234 |
| }, |
| { |
| "epoch": 3.395189003436426, |
| "grad_norm": 0.18325778358692565, |
| "learning_rate": 2.2422130950851643e-05, |
| "loss": 0.4744, |
| "step": 1235 |
| }, |
| { |
| "epoch": 3.397938144329897, |
| "grad_norm": 0.3008410533335858, |
| "learning_rate": 2.235303935282531e-05, |
| "loss": 0.4846, |
| "step": 1236 |
| }, |
| { |
| "epoch": 3.4006872852233676, |
| "grad_norm": 0.19194203147930453, |
| "learning_rate": 2.2284013067427025e-05, |
| "loss": 0.4718, |
| "step": 1237 |
| }, |
| { |
| "epoch": 3.4034364261168384, |
| "grad_norm": 0.34898181416099955, |
| "learning_rate": 2.2215052350127834e-05, |
| "loss": 0.484, |
| "step": 1238 |
| }, |
| { |
| "epoch": 3.406185567010309, |
| "grad_norm": 0.2356425803363679, |
| "learning_rate": 2.214615745615619e-05, |
| "loss": 0.4774, |
| "step": 1239 |
| }, |
| { |
| "epoch": 3.40893470790378, |
| "grad_norm": 0.21466246650328097, |
| "learning_rate": 2.207732864049686e-05, |
| "loss": 0.4812, |
| "step": 1240 |
| }, |
| { |
| "epoch": 3.4116838487972507, |
| "grad_norm": 0.26377375192350677, |
| "learning_rate": 2.200856615789009e-05, |
| "loss": 0.4912, |
| "step": 1241 |
| }, |
| { |
| "epoch": 3.4144329896907215, |
| "grad_norm": 0.15695872082264978, |
| "learning_rate": 2.1939870262830577e-05, |
| "loss": 0.4715, |
| "step": 1242 |
| }, |
| { |
| "epoch": 3.4171821305841923, |
| "grad_norm": 0.28736994518322967, |
| "learning_rate": 2.187124120956666e-05, |
| "loss": 0.4811, |
| "step": 1243 |
| }, |
| { |
| "epoch": 3.419931271477663, |
| "grad_norm": 0.24847231178584678, |
| "learning_rate": 2.1802679252099184e-05, |
| "loss": 0.4784, |
| "step": 1244 |
| }, |
| { |
| "epoch": 3.422680412371134, |
| "grad_norm": 0.17957114598770363, |
| "learning_rate": 2.1734184644180715e-05, |
| "loss": 0.4706, |
| "step": 1245 |
| }, |
| { |
| "epoch": 3.4254295532646046, |
| "grad_norm": 0.3386000360336158, |
| "learning_rate": 2.16657576393146e-05, |
| "loss": 0.4764, |
| "step": 1246 |
| }, |
| { |
| "epoch": 3.4281786941580754, |
| "grad_norm": 0.14262617741372224, |
| "learning_rate": 2.1597398490753917e-05, |
| "loss": 0.4645, |
| "step": 1247 |
| }, |
| { |
| "epoch": 3.4309278350515466, |
| "grad_norm": 0.2371298714043362, |
| "learning_rate": 2.152910745150063e-05, |
| "loss": 0.4778, |
| "step": 1248 |
| }, |
| { |
| "epoch": 3.4336769759450174, |
| "grad_norm": 0.15007549750939145, |
| "learning_rate": 2.1460884774304614e-05, |
| "loss": 0.481, |
| "step": 1249 |
| }, |
| { |
| "epoch": 3.436426116838488, |
| "grad_norm": 0.2666157528473445, |
| "learning_rate": 2.1392730711662755e-05, |
| "loss": 0.473, |
| "step": 1250 |
| }, |
| { |
| "epoch": 3.439175257731959, |
| "grad_norm": 0.19370621015791017, |
| "learning_rate": 2.1324645515817988e-05, |
| "loss": 0.4916, |
| "step": 1251 |
| }, |
| { |
| "epoch": 3.4419243986254298, |
| "grad_norm": 0.1363626153767201, |
| "learning_rate": 2.125662943875832e-05, |
| "loss": 0.4783, |
| "step": 1252 |
| }, |
| { |
| "epoch": 3.4446735395189005, |
| "grad_norm": 0.1558927651589561, |
| "learning_rate": 2.1188682732215978e-05, |
| "loss": 0.4863, |
| "step": 1253 |
| }, |
| { |
| "epoch": 3.4474226804123713, |
| "grad_norm": 0.1878171719282548, |
| "learning_rate": 2.1120805647666484e-05, |
| "loss": 0.483, |
| "step": 1254 |
| }, |
| { |
| "epoch": 3.450171821305842, |
| "grad_norm": 0.1706433093556562, |
| "learning_rate": 2.1052998436327624e-05, |
| "loss": 0.4766, |
| "step": 1255 |
| }, |
| { |
| "epoch": 3.452920962199313, |
| "grad_norm": 0.14594078534672697, |
| "learning_rate": 2.0985261349158606e-05, |
| "loss": 0.4701, |
| "step": 1256 |
| }, |
| { |
| "epoch": 3.4556701030927837, |
| "grad_norm": 0.2713964023439289, |
| "learning_rate": 2.0917594636859084e-05, |
| "loss": 0.4734, |
| "step": 1257 |
| }, |
| { |
| "epoch": 3.4584192439862544, |
| "grad_norm": 0.20301631548067403, |
| "learning_rate": 2.084999854986829e-05, |
| "loss": 0.4664, |
| "step": 1258 |
| }, |
| { |
| "epoch": 3.4611683848797252, |
| "grad_norm": 0.21017329606796895, |
| "learning_rate": 2.0782473338364034e-05, |
| "loss": 0.4801, |
| "step": 1259 |
| }, |
| { |
| "epoch": 3.463917525773196, |
| "grad_norm": 0.2024174569645721, |
| "learning_rate": 2.0715019252261786e-05, |
| "loss": 0.473, |
| "step": 1260 |
| }, |
| { |
| "epoch": 3.466666666666667, |
| "grad_norm": 0.13513087820592787, |
| "learning_rate": 2.0647636541213843e-05, |
| "loss": 0.479, |
| "step": 1261 |
| }, |
| { |
| "epoch": 3.4694158075601376, |
| "grad_norm": 0.15323818419427804, |
| "learning_rate": 2.0580325454608294e-05, |
| "loss": 0.479, |
| "step": 1262 |
| }, |
| { |
| "epoch": 3.4721649484536083, |
| "grad_norm": 0.1883180981275938, |
| "learning_rate": 2.051308624156815e-05, |
| "loss": 0.4797, |
| "step": 1263 |
| }, |
| { |
| "epoch": 3.474914089347079, |
| "grad_norm": 0.18535688822536886, |
| "learning_rate": 2.0445919150950387e-05, |
| "loss": 0.4828, |
| "step": 1264 |
| }, |
| { |
| "epoch": 3.47766323024055, |
| "grad_norm": 0.1735738790195007, |
| "learning_rate": 2.0378824431345116e-05, |
| "loss": 0.4752, |
| "step": 1265 |
| }, |
| { |
| "epoch": 3.4804123711340207, |
| "grad_norm": 0.25380262888772, |
| "learning_rate": 2.0311802331074543e-05, |
| "loss": 0.479, |
| "step": 1266 |
| }, |
| { |
| "epoch": 3.4831615120274915, |
| "grad_norm": 0.17442121730064505, |
| "learning_rate": 2.024485309819213e-05, |
| "loss": 0.4739, |
| "step": 1267 |
| }, |
| { |
| "epoch": 3.4859106529209622, |
| "grad_norm": 0.284320944684907, |
| "learning_rate": 2.01779769804816e-05, |
| "loss": 0.4843, |
| "step": 1268 |
| }, |
| { |
| "epoch": 3.488659793814433, |
| "grad_norm": 0.1625892440951168, |
| "learning_rate": 2.011117422545616e-05, |
| "loss": 0.4763, |
| "step": 1269 |
| }, |
| { |
| "epoch": 3.491408934707904, |
| "grad_norm": 0.28275001646900066, |
| "learning_rate": 2.004444508035743e-05, |
| "loss": 0.4699, |
| "step": 1270 |
| }, |
| { |
| "epoch": 3.4941580756013746, |
| "grad_norm": 0.17127917891669825, |
| "learning_rate": 1.9977789792154615e-05, |
| "loss": 0.4882, |
| "step": 1271 |
| }, |
| { |
| "epoch": 3.4969072164948454, |
| "grad_norm": 0.155941741872769, |
| "learning_rate": 1.991120860754356e-05, |
| "loss": 0.4692, |
| "step": 1272 |
| }, |
| { |
| "epoch": 3.499656357388316, |
| "grad_norm": 0.15459720154148457, |
| "learning_rate": 1.984470177294588e-05, |
| "loss": 0.4841, |
| "step": 1273 |
| }, |
| { |
| "epoch": 3.502405498281787, |
| "grad_norm": 0.2196341305641022, |
| "learning_rate": 1.9778269534507987e-05, |
| "loss": 0.4713, |
| "step": 1274 |
| }, |
| { |
| "epoch": 3.5051546391752577, |
| "grad_norm": 0.22478563840945578, |
| "learning_rate": 1.971191213810021e-05, |
| "loss": 0.4892, |
| "step": 1275 |
| }, |
| { |
| "epoch": 3.5079037800687285, |
| "grad_norm": 0.1739449107806518, |
| "learning_rate": 1.964562982931588e-05, |
| "loss": 0.4765, |
| "step": 1276 |
| }, |
| { |
| "epoch": 3.5106529209621993, |
| "grad_norm": 0.27214067372219086, |
| "learning_rate": 1.9579422853470436e-05, |
| "loss": 0.4815, |
| "step": 1277 |
| }, |
| { |
| "epoch": 3.51340206185567, |
| "grad_norm": 0.17400788077272825, |
| "learning_rate": 1.95132914556005e-05, |
| "loss": 0.4727, |
| "step": 1278 |
| }, |
| { |
| "epoch": 3.516151202749141, |
| "grad_norm": 0.19337030209527964, |
| "learning_rate": 1.9447235880462947e-05, |
| "loss": 0.4804, |
| "step": 1279 |
| }, |
| { |
| "epoch": 3.5189003436426116, |
| "grad_norm": 0.1514665359437226, |
| "learning_rate": 1.938125637253409e-05, |
| "loss": 0.4729, |
| "step": 1280 |
| }, |
| { |
| "epoch": 3.5216494845360824, |
| "grad_norm": 0.27810047698132523, |
| "learning_rate": 1.9315353176008655e-05, |
| "loss": 0.478, |
| "step": 1281 |
| }, |
| { |
| "epoch": 3.524398625429553, |
| "grad_norm": 0.2052552676995858, |
| "learning_rate": 1.9249526534798965e-05, |
| "loss": 0.4825, |
| "step": 1282 |
| }, |
| { |
| "epoch": 3.527147766323024, |
| "grad_norm": 0.1638869338895521, |
| "learning_rate": 1.9183776692533977e-05, |
| "loss": 0.4754, |
| "step": 1283 |
| }, |
| { |
| "epoch": 3.5298969072164947, |
| "grad_norm": 0.1623552411012285, |
| "learning_rate": 1.9118103892558438e-05, |
| "loss": 0.4721, |
| "step": 1284 |
| }, |
| { |
| "epoch": 3.5326460481099655, |
| "grad_norm": 0.2765823979801074, |
| "learning_rate": 1.9052508377931945e-05, |
| "loss": 0.4721, |
| "step": 1285 |
| }, |
| { |
| "epoch": 3.5353951890034363, |
| "grad_norm": 0.20787425872060086, |
| "learning_rate": 1.8986990391428056e-05, |
| "loss": 0.4721, |
| "step": 1286 |
| }, |
| { |
| "epoch": 3.538144329896907, |
| "grad_norm": 0.23103945726058686, |
| "learning_rate": 1.8921550175533384e-05, |
| "loss": 0.4804, |
| "step": 1287 |
| }, |
| { |
| "epoch": 3.540893470790378, |
| "grad_norm": 0.14819667238986017, |
| "learning_rate": 1.885618797244674e-05, |
| "loss": 0.4781, |
| "step": 1288 |
| }, |
| { |
| "epoch": 3.5436426116838486, |
| "grad_norm": 0.34832469601339244, |
| "learning_rate": 1.8790904024078166e-05, |
| "loss": 0.4853, |
| "step": 1289 |
| }, |
| { |
| "epoch": 3.5463917525773194, |
| "grad_norm": 0.17762443199441277, |
| "learning_rate": 1.8725698572048096e-05, |
| "loss": 0.4687, |
| "step": 1290 |
| }, |
| { |
| "epoch": 3.54914089347079, |
| "grad_norm": 0.220110309410904, |
| "learning_rate": 1.866057185768644e-05, |
| "loss": 0.4722, |
| "step": 1291 |
| }, |
| { |
| "epoch": 3.551890034364261, |
| "grad_norm": 0.1568411202207363, |
| "learning_rate": 1.8595524122031695e-05, |
| "loss": 0.477, |
| "step": 1292 |
| }, |
| { |
| "epoch": 3.5546391752577318, |
| "grad_norm": 0.1487231862533628, |
| "learning_rate": 1.8530555605830048e-05, |
| "loss": 0.4812, |
| "step": 1293 |
| }, |
| { |
| "epoch": 3.5573883161512025, |
| "grad_norm": 0.27212850028975205, |
| "learning_rate": 1.8465666549534487e-05, |
| "loss": 0.4716, |
| "step": 1294 |
| }, |
| { |
| "epoch": 3.5601374570446733, |
| "grad_norm": 0.26360242336986534, |
| "learning_rate": 1.840085719330394e-05, |
| "loss": 0.4794, |
| "step": 1295 |
| }, |
| { |
| "epoch": 3.562886597938144, |
| "grad_norm": 0.20568964864290215, |
| "learning_rate": 1.8336127777002336e-05, |
| "loss": 0.4808, |
| "step": 1296 |
| }, |
| { |
| "epoch": 3.565635738831615, |
| "grad_norm": 0.282051987566169, |
| "learning_rate": 1.8271478540197735e-05, |
| "loss": 0.4761, |
| "step": 1297 |
| }, |
| { |
| "epoch": 3.5683848797250857, |
| "grad_norm": 0.16289176663769198, |
| "learning_rate": 1.8206909722161444e-05, |
| "loss": 0.4774, |
| "step": 1298 |
| }, |
| { |
| "epoch": 3.5711340206185564, |
| "grad_norm": 0.2575145343236498, |
| "learning_rate": 1.814242156186718e-05, |
| "loss": 0.4795, |
| "step": 1299 |
| }, |
| { |
| "epoch": 3.5738831615120272, |
| "grad_norm": 0.1486906599375562, |
| "learning_rate": 1.8078014297990104e-05, |
| "loss": 0.4824, |
| "step": 1300 |
| }, |
| { |
| "epoch": 3.576632302405498, |
| "grad_norm": 0.1501909622248065, |
| "learning_rate": 1.8013688168905946e-05, |
| "loss": 0.4804, |
| "step": 1301 |
| }, |
| { |
| "epoch": 3.5793814432989692, |
| "grad_norm": 0.15230793704069007, |
| "learning_rate": 1.7949443412690182e-05, |
| "loss": 0.4751, |
| "step": 1302 |
| }, |
| { |
| "epoch": 3.58213058419244, |
| "grad_norm": 0.16635368754820404, |
| "learning_rate": 1.788528026711715e-05, |
| "loss": 0.4806, |
| "step": 1303 |
| }, |
| { |
| "epoch": 3.584879725085911, |
| "grad_norm": 0.1347568536197528, |
| "learning_rate": 1.78211989696591e-05, |
| "loss": 0.4723, |
| "step": 1304 |
| }, |
| { |
| "epoch": 3.5876288659793816, |
| "grad_norm": 0.19251735027744715, |
| "learning_rate": 1.7757199757485363e-05, |
| "loss": 0.4796, |
| "step": 1305 |
| }, |
| { |
| "epoch": 3.5903780068728524, |
| "grad_norm": 0.14511887693354616, |
| "learning_rate": 1.7693282867461464e-05, |
| "loss": 0.4875, |
| "step": 1306 |
| }, |
| { |
| "epoch": 3.593127147766323, |
| "grad_norm": 0.21823490371477205, |
| "learning_rate": 1.762944853614828e-05, |
| "loss": 0.4831, |
| "step": 1307 |
| }, |
| { |
| "epoch": 3.595876288659794, |
| "grad_norm": 0.14789140619202265, |
| "learning_rate": 1.756569699980111e-05, |
| "loss": 0.4719, |
| "step": 1308 |
| }, |
| { |
| "epoch": 3.5986254295532647, |
| "grad_norm": 0.1537689543052464, |
| "learning_rate": 1.7502028494368777e-05, |
| "loss": 0.4775, |
| "step": 1309 |
| }, |
| { |
| "epoch": 3.6013745704467355, |
| "grad_norm": 0.16438166123180578, |
| "learning_rate": 1.743844325549289e-05, |
| "loss": 0.4778, |
| "step": 1310 |
| }, |
| { |
| "epoch": 3.6041237113402063, |
| "grad_norm": 0.1309598142497098, |
| "learning_rate": 1.737494151850682e-05, |
| "loss": 0.4799, |
| "step": 1311 |
| }, |
| { |
| "epoch": 3.606872852233677, |
| "grad_norm": 0.13501349366551338, |
| "learning_rate": 1.731152351843492e-05, |
| "loss": 0.4778, |
| "step": 1312 |
| }, |
| { |
| "epoch": 3.609621993127148, |
| "grad_norm": 0.12271151015542832, |
| "learning_rate": 1.7248189489991594e-05, |
| "loss": 0.4804, |
| "step": 1313 |
| }, |
| { |
| "epoch": 3.6123711340206186, |
| "grad_norm": 0.1579684841055174, |
| "learning_rate": 1.7184939667580512e-05, |
| "loss": 0.4756, |
| "step": 1314 |
| }, |
| { |
| "epoch": 3.6151202749140894, |
| "grad_norm": 0.11550758392122117, |
| "learning_rate": 1.712177428529365e-05, |
| "loss": 0.4813, |
| "step": 1315 |
| }, |
| { |
| "epoch": 3.61786941580756, |
| "grad_norm": 0.1446519336760662, |
| "learning_rate": 1.7058693576910495e-05, |
| "loss": 0.4761, |
| "step": 1316 |
| }, |
| { |
| "epoch": 3.620618556701031, |
| "grad_norm": 0.1387283733233433, |
| "learning_rate": 1.6995697775897097e-05, |
| "loss": 0.4855, |
| "step": 1317 |
| }, |
| { |
| "epoch": 3.6233676975945017, |
| "grad_norm": 0.12450900277305127, |
| "learning_rate": 1.6932787115405318e-05, |
| "loss": 0.4768, |
| "step": 1318 |
| }, |
| { |
| "epoch": 3.6261168384879725, |
| "grad_norm": 0.15118692423046173, |
| "learning_rate": 1.6869961828271892e-05, |
| "loss": 0.4795, |
| "step": 1319 |
| }, |
| { |
| "epoch": 3.6288659793814433, |
| "grad_norm": 0.1275953061475887, |
| "learning_rate": 1.6807222147017558e-05, |
| "loss": 0.4844, |
| "step": 1320 |
| }, |
| { |
| "epoch": 3.631615120274914, |
| "grad_norm": 0.12357741430968726, |
| "learning_rate": 1.6744568303846237e-05, |
| "loss": 0.4718, |
| "step": 1321 |
| }, |
| { |
| "epoch": 3.634364261168385, |
| "grad_norm": 0.13324292495631146, |
| "learning_rate": 1.6682000530644177e-05, |
| "loss": 0.4828, |
| "step": 1322 |
| }, |
| { |
| "epoch": 3.6371134020618556, |
| "grad_norm": 0.12727908902135235, |
| "learning_rate": 1.6619519058979044e-05, |
| "loss": 0.4731, |
| "step": 1323 |
| }, |
| { |
| "epoch": 3.6398625429553264, |
| "grad_norm": 0.14031695435202224, |
| "learning_rate": 1.6557124120099108e-05, |
| "loss": 0.4723, |
| "step": 1324 |
| }, |
| { |
| "epoch": 3.642611683848797, |
| "grad_norm": 0.1238822517218841, |
| "learning_rate": 1.6494815944932376e-05, |
| "loss": 0.4767, |
| "step": 1325 |
| }, |
| { |
| "epoch": 3.645360824742268, |
| "grad_norm": 0.14383655055562664, |
| "learning_rate": 1.6432594764085738e-05, |
| "loss": 0.4744, |
| "step": 1326 |
| }, |
| { |
| "epoch": 3.6481099656357387, |
| "grad_norm": 0.13074212391556805, |
| "learning_rate": 1.6370460807844096e-05, |
| "loss": 0.4719, |
| "step": 1327 |
| }, |
| { |
| "epoch": 3.6508591065292095, |
| "grad_norm": 0.1408486433749572, |
| "learning_rate": 1.6308414306169545e-05, |
| "loss": 0.4792, |
| "step": 1328 |
| }, |
| { |
| "epoch": 3.6536082474226803, |
| "grad_norm": 0.1425866228634816, |
| "learning_rate": 1.6246455488700522e-05, |
| "loss": 0.4759, |
| "step": 1329 |
| }, |
| { |
| "epoch": 3.656357388316151, |
| "grad_norm": 0.13452543614110554, |
| "learning_rate": 1.61845845847509e-05, |
| "loss": 0.4825, |
| "step": 1330 |
| }, |
| { |
| "epoch": 3.659106529209622, |
| "grad_norm": 0.123447733182089, |
| "learning_rate": 1.6122801823309195e-05, |
| "loss": 0.4839, |
| "step": 1331 |
| }, |
| { |
| "epoch": 3.6618556701030927, |
| "grad_norm": 0.141262084894595, |
| "learning_rate": 1.6061107433037707e-05, |
| "loss": 0.4803, |
| "step": 1332 |
| }, |
| { |
| "epoch": 3.6646048109965634, |
| "grad_norm": 0.16335769694535568, |
| "learning_rate": 1.5999501642271664e-05, |
| "loss": 0.491, |
| "step": 1333 |
| }, |
| { |
| "epoch": 3.667353951890034, |
| "grad_norm": 0.13731203188970648, |
| "learning_rate": 1.593798467901837e-05, |
| "loss": 0.4764, |
| "step": 1334 |
| }, |
| { |
| "epoch": 3.670103092783505, |
| "grad_norm": 0.14344242124627954, |
| "learning_rate": 1.5876556770956394e-05, |
| "loss": 0.4776, |
| "step": 1335 |
| }, |
| { |
| "epoch": 3.672852233676976, |
| "grad_norm": 0.14287518229033996, |
| "learning_rate": 1.581521814543467e-05, |
| "loss": 0.4729, |
| "step": 1336 |
| }, |
| { |
| "epoch": 3.675601374570447, |
| "grad_norm": 0.13919137334446374, |
| "learning_rate": 1.575396902947175e-05, |
| "loss": 0.4803, |
| "step": 1337 |
| }, |
| { |
| "epoch": 3.678350515463918, |
| "grad_norm": 0.1332949080422285, |
| "learning_rate": 1.5692809649754855e-05, |
| "loss": 0.4732, |
| "step": 1338 |
| }, |
| { |
| "epoch": 3.6810996563573886, |
| "grad_norm": 0.15251079258406947, |
| "learning_rate": 1.563174023263909e-05, |
| "loss": 0.4721, |
| "step": 1339 |
| }, |
| { |
| "epoch": 3.6838487972508593, |
| "grad_norm": 0.12884291771360323, |
| "learning_rate": 1.557076100414663e-05, |
| "loss": 0.4771, |
| "step": 1340 |
| }, |
| { |
| "epoch": 3.68659793814433, |
| "grad_norm": 0.14077765554999186, |
| "learning_rate": 1.5509872189965826e-05, |
| "loss": 0.4765, |
| "step": 1341 |
| }, |
| { |
| "epoch": 3.689347079037801, |
| "grad_norm": 0.12097139782888651, |
| "learning_rate": 1.5449074015450416e-05, |
| "loss": 0.4816, |
| "step": 1342 |
| }, |
| { |
| "epoch": 3.6920962199312717, |
| "grad_norm": 0.12394128179144817, |
| "learning_rate": 1.5388366705618656e-05, |
| "loss": 0.4719, |
| "step": 1343 |
| }, |
| { |
| "epoch": 3.6948453608247425, |
| "grad_norm": 0.12482467627547454, |
| "learning_rate": 1.5327750485152546e-05, |
| "loss": 0.4763, |
| "step": 1344 |
| }, |
| { |
| "epoch": 3.6975945017182132, |
| "grad_norm": 0.12282644196525853, |
| "learning_rate": 1.5267225578396923e-05, |
| "loss": 0.4773, |
| "step": 1345 |
| }, |
| { |
| "epoch": 3.700343642611684, |
| "grad_norm": 0.12030648328844293, |
| "learning_rate": 1.5206792209358665e-05, |
| "loss": 0.486, |
| "step": 1346 |
| }, |
| { |
| "epoch": 3.703092783505155, |
| "grad_norm": 0.1242904271715769, |
| "learning_rate": 1.5146450601705866e-05, |
| "loss": 0.4788, |
| "step": 1347 |
| }, |
| { |
| "epoch": 3.7058419243986256, |
| "grad_norm": 0.12938487720592814, |
| "learning_rate": 1.5086200978767051e-05, |
| "loss": 0.482, |
| "step": 1348 |
| }, |
| { |
| "epoch": 3.7085910652920964, |
| "grad_norm": 0.1181010757249468, |
| "learning_rate": 1.5026043563530213e-05, |
| "loss": 0.4817, |
| "step": 1349 |
| }, |
| { |
| "epoch": 3.711340206185567, |
| "grad_norm": 0.12542244186954696, |
| "learning_rate": 1.4965978578642152e-05, |
| "loss": 0.4829, |
| "step": 1350 |
| }, |
| { |
| "epoch": 3.714089347079038, |
| "grad_norm": 0.1301001364291388, |
| "learning_rate": 1.4906006246407531e-05, |
| "loss": 0.4801, |
| "step": 1351 |
| }, |
| { |
| "epoch": 3.7168384879725087, |
| "grad_norm": 0.11752586529006614, |
| "learning_rate": 1.4846126788788153e-05, |
| "loss": 0.4824, |
| "step": 1352 |
| }, |
| { |
| "epoch": 3.7195876288659795, |
| "grad_norm": 0.1250323838865372, |
| "learning_rate": 1.4786340427402034e-05, |
| "loss": 0.4735, |
| "step": 1353 |
| }, |
| { |
| "epoch": 3.7223367697594503, |
| "grad_norm": 0.11762297628012565, |
| "learning_rate": 1.472664738352266e-05, |
| "loss": 0.4738, |
| "step": 1354 |
| }, |
| { |
| "epoch": 3.725085910652921, |
| "grad_norm": 0.1295998839343961, |
| "learning_rate": 1.4667047878078115e-05, |
| "loss": 0.4797, |
| "step": 1355 |
| }, |
| { |
| "epoch": 3.727835051546392, |
| "grad_norm": 0.12318111875882015, |
| "learning_rate": 1.460754213165036e-05, |
| "loss": 0.4749, |
| "step": 1356 |
| }, |
| { |
| "epoch": 3.7305841924398626, |
| "grad_norm": 0.1372696026783743, |
| "learning_rate": 1.4548130364474253e-05, |
| "loss": 0.4828, |
| "step": 1357 |
| }, |
| { |
| "epoch": 3.7333333333333334, |
| "grad_norm": 0.11717512263035454, |
| "learning_rate": 1.4488812796436871e-05, |
| "loss": 0.4672, |
| "step": 1358 |
| }, |
| { |
| "epoch": 3.736082474226804, |
| "grad_norm": 0.13561308791703292, |
| "learning_rate": 1.4429589647076693e-05, |
| "loss": 0.4786, |
| "step": 1359 |
| }, |
| { |
| "epoch": 3.738831615120275, |
| "grad_norm": 0.12422009286706612, |
| "learning_rate": 1.4370461135582691e-05, |
| "loss": 0.4851, |
| "step": 1360 |
| }, |
| { |
| "epoch": 3.7415807560137457, |
| "grad_norm": 0.11832155403134417, |
| "learning_rate": 1.4311427480793597e-05, |
| "loss": 0.4766, |
| "step": 1361 |
| }, |
| { |
| "epoch": 3.7443298969072165, |
| "grad_norm": 0.12867243461981434, |
| "learning_rate": 1.4252488901197064e-05, |
| "loss": 0.4724, |
| "step": 1362 |
| }, |
| { |
| "epoch": 3.7470790378006873, |
| "grad_norm": 0.1313834080728087, |
| "learning_rate": 1.4193645614928886e-05, |
| "loss": 0.4714, |
| "step": 1363 |
| }, |
| { |
| "epoch": 3.749828178694158, |
| "grad_norm": 0.129183260330846, |
| "learning_rate": 1.4134897839772155e-05, |
| "loss": 0.4809, |
| "step": 1364 |
| }, |
| { |
| "epoch": 3.752577319587629, |
| "grad_norm": 0.12556934848044315, |
| "learning_rate": 1.4076245793156477e-05, |
| "loss": 0.4769, |
| "step": 1365 |
| }, |
| { |
| "epoch": 3.7553264604810996, |
| "grad_norm": 0.12147447097394133, |
| "learning_rate": 1.4017689692157114e-05, |
| "loss": 0.4821, |
| "step": 1366 |
| }, |
| { |
| "epoch": 3.7580756013745704, |
| "grad_norm": 0.13051373867173985, |
| "learning_rate": 1.3959229753494312e-05, |
| "loss": 0.472, |
| "step": 1367 |
| }, |
| { |
| "epoch": 3.760824742268041, |
| "grad_norm": 0.1294424563340561, |
| "learning_rate": 1.3900866193532347e-05, |
| "loss": 0.4822, |
| "step": 1368 |
| }, |
| { |
| "epoch": 3.763573883161512, |
| "grad_norm": 0.1319274091460674, |
| "learning_rate": 1.3842599228278819e-05, |
| "loss": 0.4808, |
| "step": 1369 |
| }, |
| { |
| "epoch": 3.7663230240549828, |
| "grad_norm": 0.14039131963053228, |
| "learning_rate": 1.3784429073383794e-05, |
| "loss": 0.4878, |
| "step": 1370 |
| }, |
| { |
| "epoch": 3.7690721649484535, |
| "grad_norm": 0.15726224881254797, |
| "learning_rate": 1.3726355944139091e-05, |
| "loss": 0.4858, |
| "step": 1371 |
| }, |
| { |
| "epoch": 3.7718213058419243, |
| "grad_norm": 0.1414080124007642, |
| "learning_rate": 1.3668380055477383e-05, |
| "loss": 0.4843, |
| "step": 1372 |
| }, |
| { |
| "epoch": 3.774570446735395, |
| "grad_norm": 0.14834309789500114, |
| "learning_rate": 1.3610501621971466e-05, |
| "loss": 0.4817, |
| "step": 1373 |
| }, |
| { |
| "epoch": 3.777319587628866, |
| "grad_norm": 0.17635910401341698, |
| "learning_rate": 1.3552720857833449e-05, |
| "loss": 0.4791, |
| "step": 1374 |
| }, |
| { |
| "epoch": 3.7800687285223367, |
| "grad_norm": 0.12924512390652784, |
| "learning_rate": 1.3495037976913947e-05, |
| "loss": 0.4786, |
| "step": 1375 |
| }, |
| { |
| "epoch": 3.7828178694158074, |
| "grad_norm": 0.15614756263557175, |
| "learning_rate": 1.3437453192701315e-05, |
| "loss": 0.4805, |
| "step": 1376 |
| }, |
| { |
| "epoch": 3.7855670103092782, |
| "grad_norm": 0.13352957029862794, |
| "learning_rate": 1.3379966718320839e-05, |
| "loss": 0.476, |
| "step": 1377 |
| }, |
| { |
| "epoch": 3.788316151202749, |
| "grad_norm": 0.14136665810120994, |
| "learning_rate": 1.3322578766533968e-05, |
| "loss": 0.473, |
| "step": 1378 |
| }, |
| { |
| "epoch": 3.79106529209622, |
| "grad_norm": 0.1527290850046059, |
| "learning_rate": 1.3265289549737501e-05, |
| "loss": 0.4826, |
| "step": 1379 |
| }, |
| { |
| "epoch": 3.7938144329896906, |
| "grad_norm": 0.13142189843945631, |
| "learning_rate": 1.32080992799628e-05, |
| "loss": 0.4807, |
| "step": 1380 |
| }, |
| { |
| "epoch": 3.7965635738831613, |
| "grad_norm": 0.13227897526677368, |
| "learning_rate": 1.3151008168875037e-05, |
| "loss": 0.47, |
| "step": 1381 |
| }, |
| { |
| "epoch": 3.799312714776632, |
| "grad_norm": 0.1292721449879669, |
| "learning_rate": 1.309401642777238e-05, |
| "loss": 0.4747, |
| "step": 1382 |
| }, |
| { |
| "epoch": 3.802061855670103, |
| "grad_norm": 0.11592891333671732, |
| "learning_rate": 1.3037124267585228e-05, |
| "loss": 0.4795, |
| "step": 1383 |
| }, |
| { |
| "epoch": 3.8048109965635737, |
| "grad_norm": 0.12251003727405267, |
| "learning_rate": 1.298033189887541e-05, |
| "loss": 0.4841, |
| "step": 1384 |
| }, |
| { |
| "epoch": 3.8075601374570445, |
| "grad_norm": 0.11989246216482814, |
| "learning_rate": 1.2923639531835433e-05, |
| "loss": 0.4688, |
| "step": 1385 |
| }, |
| { |
| "epoch": 3.8103092783505152, |
| "grad_norm": 0.12756974223833703, |
| "learning_rate": 1.28670473762877e-05, |
| "loss": 0.4746, |
| "step": 1386 |
| }, |
| { |
| "epoch": 3.813058419243986, |
| "grad_norm": 0.1180466527779192, |
| "learning_rate": 1.2810555641683706e-05, |
| "loss": 0.4779, |
| "step": 1387 |
| }, |
| { |
| "epoch": 3.815807560137457, |
| "grad_norm": 0.11529174071752626, |
| "learning_rate": 1.2754164537103289e-05, |
| "loss": 0.4721, |
| "step": 1388 |
| }, |
| { |
| "epoch": 3.8185567010309276, |
| "grad_norm": 0.11876628327578556, |
| "learning_rate": 1.2697874271253844e-05, |
| "loss": 0.4719, |
| "step": 1389 |
| }, |
| { |
| "epoch": 3.8213058419243984, |
| "grad_norm": 0.1212315987997364, |
| "learning_rate": 1.2641685052469556e-05, |
| "loss": 0.4901, |
| "step": 1390 |
| }, |
| { |
| "epoch": 3.824054982817869, |
| "grad_norm": 0.11790905831570075, |
| "learning_rate": 1.2585597088710637e-05, |
| "loss": 0.4755, |
| "step": 1391 |
| }, |
| { |
| "epoch": 3.82680412371134, |
| "grad_norm": 0.11385644404016117, |
| "learning_rate": 1.252961058756252e-05, |
| "loss": 0.4844, |
| "step": 1392 |
| }, |
| { |
| "epoch": 3.829553264604811, |
| "grad_norm": 0.11530386738039233, |
| "learning_rate": 1.2473725756235165e-05, |
| "loss": 0.4756, |
| "step": 1393 |
| }, |
| { |
| "epoch": 3.832302405498282, |
| "grad_norm": 0.12379904902641005, |
| "learning_rate": 1.2417942801562201e-05, |
| "loss": 0.4702, |
| "step": 1394 |
| }, |
| { |
| "epoch": 3.8350515463917527, |
| "grad_norm": 0.12225777044844523, |
| "learning_rate": 1.236226193000023e-05, |
| "loss": 0.4734, |
| "step": 1395 |
| }, |
| { |
| "epoch": 3.8378006872852235, |
| "grad_norm": 0.12161313208268175, |
| "learning_rate": 1.2306683347628012e-05, |
| "loss": 0.4809, |
| "step": 1396 |
| }, |
| { |
| "epoch": 3.8405498281786943, |
| "grad_norm": 0.13058946315743358, |
| "learning_rate": 1.2251207260145775e-05, |
| "loss": 0.4787, |
| "step": 1397 |
| }, |
| { |
| "epoch": 3.843298969072165, |
| "grad_norm": 0.11523497518057338, |
| "learning_rate": 1.2195833872874343e-05, |
| "loss": 0.4723, |
| "step": 1398 |
| }, |
| { |
| "epoch": 3.846048109965636, |
| "grad_norm": 0.13253286053473112, |
| "learning_rate": 1.2140563390754485e-05, |
| "loss": 0.4765, |
| "step": 1399 |
| }, |
| { |
| "epoch": 3.8487972508591066, |
| "grad_norm": 0.12147651056509255, |
| "learning_rate": 1.208539601834608e-05, |
| "loss": 0.4715, |
| "step": 1400 |
| }, |
| { |
| "epoch": 3.8515463917525774, |
| "grad_norm": 0.12552379756828472, |
| "learning_rate": 1.203033195982744e-05, |
| "loss": 0.4785, |
| "step": 1401 |
| }, |
| { |
| "epoch": 3.854295532646048, |
| "grad_norm": 0.12156559670865116, |
| "learning_rate": 1.1975371418994457e-05, |
| "loss": 0.4865, |
| "step": 1402 |
| }, |
| { |
| "epoch": 3.857044673539519, |
| "grad_norm": 0.13699296116050447, |
| "learning_rate": 1.1920514599259908e-05, |
| "loss": 0.4754, |
| "step": 1403 |
| }, |
| { |
| "epoch": 3.8597938144329897, |
| "grad_norm": 0.14619958749768325, |
| "learning_rate": 1.1865761703652691e-05, |
| "loss": 0.4896, |
| "step": 1404 |
| }, |
| { |
| "epoch": 3.8625429553264605, |
| "grad_norm": 0.11322022044471265, |
| "learning_rate": 1.1811112934817115e-05, |
| "loss": 0.4809, |
| "step": 1405 |
| }, |
| { |
| "epoch": 3.8652920962199313, |
| "grad_norm": 0.12792071165635296, |
| "learning_rate": 1.1756568495012032e-05, |
| "loss": 0.4777, |
| "step": 1406 |
| }, |
| { |
| "epoch": 3.868041237113402, |
| "grad_norm": 0.12840707488542488, |
| "learning_rate": 1.1702128586110204e-05, |
| "loss": 0.4704, |
| "step": 1407 |
| }, |
| { |
| "epoch": 3.870790378006873, |
| "grad_norm": 0.12236846928122713, |
| "learning_rate": 1.164779340959755e-05, |
| "loss": 0.4734, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.8735395189003436, |
| "grad_norm": 0.12828972122388524, |
| "learning_rate": 1.1593563166572315e-05, |
| "loss": 0.4728, |
| "step": 1409 |
| }, |
| { |
| "epoch": 3.8762886597938144, |
| "grad_norm": 0.11963041349113263, |
| "learning_rate": 1.1539438057744405e-05, |
| "loss": 0.4798, |
| "step": 1410 |
| }, |
| { |
| "epoch": 3.879037800687285, |
| "grad_norm": 0.11889755394099005, |
| "learning_rate": 1.1485418283434591e-05, |
| "loss": 0.4765, |
| "step": 1411 |
| }, |
| { |
| "epoch": 3.881786941580756, |
| "grad_norm": 0.1220924766559957, |
| "learning_rate": 1.1431504043573845e-05, |
| "loss": 0.4792, |
| "step": 1412 |
| }, |
| { |
| "epoch": 3.8845360824742268, |
| "grad_norm": 0.10991608717200368, |
| "learning_rate": 1.1377695537702506e-05, |
| "loss": 0.4758, |
| "step": 1413 |
| }, |
| { |
| "epoch": 3.8872852233676976, |
| "grad_norm": 0.1185758525402704, |
| "learning_rate": 1.1323992964969568e-05, |
| "loss": 0.4764, |
| "step": 1414 |
| }, |
| { |
| "epoch": 3.8900343642611683, |
| "grad_norm": 0.1285483122063387, |
| "learning_rate": 1.1270396524132016e-05, |
| "loss": 0.4726, |
| "step": 1415 |
| }, |
| { |
| "epoch": 3.892783505154639, |
| "grad_norm": 0.11257438797691342, |
| "learning_rate": 1.1216906413553996e-05, |
| "loss": 0.4853, |
| "step": 1416 |
| }, |
| { |
| "epoch": 3.89553264604811, |
| "grad_norm": 0.1265940578758485, |
| "learning_rate": 1.1163522831206119e-05, |
| "loss": 0.4853, |
| "step": 1417 |
| }, |
| { |
| "epoch": 3.8982817869415807, |
| "grad_norm": 0.12742498500479152, |
| "learning_rate": 1.1110245974664741e-05, |
| "loss": 0.4848, |
| "step": 1418 |
| }, |
| { |
| "epoch": 3.9010309278350515, |
| "grad_norm": 0.11699707249742979, |
| "learning_rate": 1.1057076041111197e-05, |
| "loss": 0.4728, |
| "step": 1419 |
| }, |
| { |
| "epoch": 3.9037800687285222, |
| "grad_norm": 0.12027686818075006, |
| "learning_rate": 1.100401322733113e-05, |
| "loss": 0.4697, |
| "step": 1420 |
| }, |
| { |
| "epoch": 3.906529209621993, |
| "grad_norm": 0.12028837431785216, |
| "learning_rate": 1.0951057729713698e-05, |
| "loss": 0.4735, |
| "step": 1421 |
| }, |
| { |
| "epoch": 3.909278350515464, |
| "grad_norm": 0.11366367375993695, |
| "learning_rate": 1.0898209744250852e-05, |
| "loss": 0.4798, |
| "step": 1422 |
| }, |
| { |
| "epoch": 3.9120274914089346, |
| "grad_norm": 0.12271108404053097, |
| "learning_rate": 1.0845469466536689e-05, |
| "loss": 0.4767, |
| "step": 1423 |
| }, |
| { |
| "epoch": 3.9147766323024054, |
| "grad_norm": 0.11169948199394023, |
| "learning_rate": 1.0792837091766635e-05, |
| "loss": 0.4773, |
| "step": 1424 |
| }, |
| { |
| "epoch": 3.917525773195876, |
| "grad_norm": 0.12185308524300838, |
| "learning_rate": 1.0740312814736766e-05, |
| "loss": 0.481, |
| "step": 1425 |
| }, |
| { |
| "epoch": 3.920274914089347, |
| "grad_norm": 0.11498767957292035, |
| "learning_rate": 1.0687896829843068e-05, |
| "loss": 0.4815, |
| "step": 1426 |
| }, |
| { |
| "epoch": 3.923024054982818, |
| "grad_norm": 0.10760167455092437, |
| "learning_rate": 1.0635589331080771e-05, |
| "loss": 0.4716, |
| "step": 1427 |
| }, |
| { |
| "epoch": 3.925773195876289, |
| "grad_norm": 0.11652282613458785, |
| "learning_rate": 1.0583390512043557e-05, |
| "loss": 0.4716, |
| "step": 1428 |
| }, |
| { |
| "epoch": 3.9285223367697597, |
| "grad_norm": 0.11095078761860352, |
| "learning_rate": 1.0531300565922873e-05, |
| "loss": 0.4773, |
| "step": 1429 |
| }, |
| { |
| "epoch": 3.9312714776632305, |
| "grad_norm": 0.10567263314079245, |
| "learning_rate": 1.0479319685507234e-05, |
| "loss": 0.4818, |
| "step": 1430 |
| }, |
| { |
| "epoch": 3.9340206185567013, |
| "grad_norm": 0.11290338751098791, |
| "learning_rate": 1.04274480631815e-05, |
| "loss": 0.4736, |
| "step": 1431 |
| }, |
| { |
| "epoch": 3.936769759450172, |
| "grad_norm": 0.11142975595031625, |
| "learning_rate": 1.0375685890926142e-05, |
| "loss": 0.4759, |
| "step": 1432 |
| }, |
| { |
| "epoch": 3.939518900343643, |
| "grad_norm": 0.12580032061681007, |
| "learning_rate": 1.0324033360316559e-05, |
| "loss": 0.4787, |
| "step": 1433 |
| }, |
| { |
| "epoch": 3.9422680412371136, |
| "grad_norm": 0.10935541303336552, |
| "learning_rate": 1.0272490662522347e-05, |
| "loss": 0.4771, |
| "step": 1434 |
| }, |
| { |
| "epoch": 3.9450171821305844, |
| "grad_norm": 0.11261196339303021, |
| "learning_rate": 1.0221057988306637e-05, |
| "loss": 0.4711, |
| "step": 1435 |
| }, |
| { |
| "epoch": 3.947766323024055, |
| "grad_norm": 0.11321244000910012, |
| "learning_rate": 1.0169735528025319e-05, |
| "loss": 0.4851, |
| "step": 1436 |
| }, |
| { |
| "epoch": 3.950515463917526, |
| "grad_norm": 0.11089992172259956, |
| "learning_rate": 1.011852347162639e-05, |
| "loss": 0.4751, |
| "step": 1437 |
| }, |
| { |
| "epoch": 3.9532646048109967, |
| "grad_norm": 0.11766021872483373, |
| "learning_rate": 1.0067422008649221e-05, |
| "loss": 0.4797, |
| "step": 1438 |
| }, |
| { |
| "epoch": 3.9560137457044675, |
| "grad_norm": 0.10628228690278965, |
| "learning_rate": 1.0016431328223887e-05, |
| "loss": 0.4782, |
| "step": 1439 |
| }, |
| { |
| "epoch": 3.9587628865979383, |
| "grad_norm": 0.11378648141949942, |
| "learning_rate": 9.96555161907044e-06, |
| "loss": 0.4766, |
| "step": 1440 |
| }, |
| { |
| "epoch": 3.961512027491409, |
| "grad_norm": 0.11864702105913649, |
| "learning_rate": 9.91478306949821e-06, |
| "loss": 0.4786, |
| "step": 1441 |
| }, |
| { |
| "epoch": 3.96426116838488, |
| "grad_norm": 0.12126170054240307, |
| "learning_rate": 9.864125867405146e-06, |
| "loss": 0.4797, |
| "step": 1442 |
| }, |
| { |
| "epoch": 3.9670103092783506, |
| "grad_norm": 0.11758636840862939, |
| "learning_rate": 9.813580200277063e-06, |
| "loss": 0.4768, |
| "step": 1443 |
| }, |
| { |
| "epoch": 3.9697594501718214, |
| "grad_norm": 0.11275441649847655, |
| "learning_rate": 9.76314625518699e-06, |
| "loss": 0.4668, |
| "step": 1444 |
| }, |
| { |
| "epoch": 3.972508591065292, |
| "grad_norm": 0.1131262643697182, |
| "learning_rate": 9.712824218794457e-06, |
| "loss": 0.4733, |
| "step": 1445 |
| }, |
| { |
| "epoch": 3.975257731958763, |
| "grad_norm": 0.10279589610315747, |
| "learning_rate": 9.662614277344846e-06, |
| "loss": 0.479, |
| "step": 1446 |
| }, |
| { |
| "epoch": 3.9780068728522338, |
| "grad_norm": 0.11106926427912463, |
| "learning_rate": 9.612516616668612e-06, |
| "loss": 0.4822, |
| "step": 1447 |
| }, |
| { |
| "epoch": 3.9807560137457045, |
| "grad_norm": 0.10564345176951702, |
| "learning_rate": 9.562531422180674e-06, |
| "loss": 0.4745, |
| "step": 1448 |
| }, |
| { |
| "epoch": 3.9835051546391753, |
| "grad_norm": 0.10829788647274623, |
| "learning_rate": 9.512658878879737e-06, |
| "loss": 0.4846, |
| "step": 1449 |
| }, |
| { |
| "epoch": 3.986254295532646, |
| "grad_norm": 0.11018495327771764, |
| "learning_rate": 9.462899171347538e-06, |
| "loss": 0.4802, |
| "step": 1450 |
| }, |
| { |
| "epoch": 3.989003436426117, |
| "grad_norm": 0.12608551718647237, |
| "learning_rate": 9.413252483748216e-06, |
| "loss": 0.4766, |
| "step": 1451 |
| }, |
| { |
| "epoch": 3.9917525773195877, |
| "grad_norm": 0.11337813442101644, |
| "learning_rate": 9.363718999827612e-06, |
| "loss": 0.479, |
| "step": 1452 |
| }, |
| { |
| "epoch": 3.9945017182130584, |
| "grad_norm": 0.12372671414303754, |
| "learning_rate": 9.314298902912583e-06, |
| "loss": 0.479, |
| "step": 1453 |
| }, |
| { |
| "epoch": 3.997250859106529, |
| "grad_norm": 0.11011001077642653, |
| "learning_rate": 9.26499237591037e-06, |
| "loss": 0.4695, |
| "step": 1454 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.14840181176358444, |
| "learning_rate": 9.215799601307828e-06, |
| "loss": 0.6096, |
| "step": 1455 |
| }, |
| { |
| "epoch": 4.002749140893471, |
| "grad_norm": 0.19421057731547003, |
| "learning_rate": 9.16672076117084e-06, |
| "loss": 0.4433, |
| "step": 1456 |
| }, |
| { |
| "epoch": 4.005498281786942, |
| "grad_norm": 0.15015912458886393, |
| "learning_rate": 9.117756037143622e-06, |
| "loss": 0.4425, |
| "step": 1457 |
| }, |
| { |
| "epoch": 4.008247422680412, |
| "grad_norm": 0.1383285525044493, |
| "learning_rate": 9.068905610448011e-06, |
| "loss": 0.4562, |
| "step": 1458 |
| }, |
| { |
| "epoch": 4.010996563573883, |
| "grad_norm": 0.15430721574490833, |
| "learning_rate": 9.02016966188283e-06, |
| "loss": 0.4484, |
| "step": 1459 |
| }, |
| { |
| "epoch": 4.013745704467354, |
| "grad_norm": 0.14905855161830828, |
| "learning_rate": 8.971548371823205e-06, |
| "loss": 0.446, |
| "step": 1460 |
| }, |
| { |
| "epoch": 4.016494845360825, |
| "grad_norm": 0.15470678416362293, |
| "learning_rate": 8.923041920219927e-06, |
| "loss": 0.4412, |
| "step": 1461 |
| }, |
| { |
| "epoch": 4.0192439862542955, |
| "grad_norm": 0.14564788987737176, |
| "learning_rate": 8.87465048659875e-06, |
| "loss": 0.4393, |
| "step": 1462 |
| }, |
| { |
| "epoch": 4.021993127147766, |
| "grad_norm": 0.13342656861373697, |
| "learning_rate": 8.826374250059704e-06, |
| "loss": 0.4498, |
| "step": 1463 |
| }, |
| { |
| "epoch": 4.024742268041237, |
| "grad_norm": 0.1302414374832906, |
| "learning_rate": 8.778213389276523e-06, |
| "loss": 0.4383, |
| "step": 1464 |
| }, |
| { |
| "epoch": 4.027491408934708, |
| "grad_norm": 0.14007280746183526, |
| "learning_rate": 8.730168082495885e-06, |
| "loss": 0.4474, |
| "step": 1465 |
| }, |
| { |
| "epoch": 4.030240549828179, |
| "grad_norm": 0.13171480431395038, |
| "learning_rate": 8.682238507536823e-06, |
| "loss": 0.4388, |
| "step": 1466 |
| }, |
| { |
| "epoch": 4.032989690721649, |
| "grad_norm": 0.12287866481381172, |
| "learning_rate": 8.634424841790014e-06, |
| "loss": 0.4384, |
| "step": 1467 |
| }, |
| { |
| "epoch": 4.03573883161512, |
| "grad_norm": 0.12293190753633977, |
| "learning_rate": 8.586727262217156e-06, |
| "loss": 0.4444, |
| "step": 1468 |
| }, |
| { |
| "epoch": 4.038487972508591, |
| "grad_norm": 0.12156568952497382, |
| "learning_rate": 8.539145945350324e-06, |
| "loss": 0.4393, |
| "step": 1469 |
| }, |
| { |
| "epoch": 4.041237113402062, |
| "grad_norm": 0.13520454469985085, |
| "learning_rate": 8.491681067291279e-06, |
| "loss": 0.4438, |
| "step": 1470 |
| }, |
| { |
| "epoch": 4.0439862542955325, |
| "grad_norm": 0.12818857279590956, |
| "learning_rate": 8.444332803710806e-06, |
| "loss": 0.4443, |
| "step": 1471 |
| }, |
| { |
| "epoch": 4.046735395189003, |
| "grad_norm": 0.11732641042539134, |
| "learning_rate": 8.397101329848146e-06, |
| "loss": 0.4443, |
| "step": 1472 |
| }, |
| { |
| "epoch": 4.049484536082474, |
| "grad_norm": 0.12736631288786282, |
| "learning_rate": 8.349986820510257e-06, |
| "loss": 0.4427, |
| "step": 1473 |
| }, |
| { |
| "epoch": 4.052233676975945, |
| "grad_norm": 0.1304818919917951, |
| "learning_rate": 8.302989450071219e-06, |
| "loss": 0.4478, |
| "step": 1474 |
| }, |
| { |
| "epoch": 4.054982817869416, |
| "grad_norm": 0.13641319860640636, |
| "learning_rate": 8.256109392471549e-06, |
| "loss": 0.4595, |
| "step": 1475 |
| }, |
| { |
| "epoch": 4.057731958762886, |
| "grad_norm": 0.11825978977034925, |
| "learning_rate": 8.20934682121763e-06, |
| "loss": 0.4461, |
| "step": 1476 |
| }, |
| { |
| "epoch": 4.060481099656357, |
| "grad_norm": 0.13030109067712098, |
| "learning_rate": 8.162701909380967e-06, |
| "loss": 0.4473, |
| "step": 1477 |
| }, |
| { |
| "epoch": 4.063230240549828, |
| "grad_norm": 0.12977389788164925, |
| "learning_rate": 8.116174829597625e-06, |
| "loss": 0.4506, |
| "step": 1478 |
| }, |
| { |
| "epoch": 4.065979381443299, |
| "grad_norm": 0.12148360180077224, |
| "learning_rate": 8.069765754067562e-06, |
| "loss": 0.4454, |
| "step": 1479 |
| }, |
| { |
| "epoch": 4.0687285223367695, |
| "grad_norm": 0.13076375704534893, |
| "learning_rate": 8.023474854553996e-06, |
| "loss": 0.4459, |
| "step": 1480 |
| }, |
| { |
| "epoch": 4.07147766323024, |
| "grad_norm": 0.13403343398060516, |
| "learning_rate": 7.977302302382757e-06, |
| "loss": 0.4392, |
| "step": 1481 |
| }, |
| { |
| "epoch": 4.074226804123711, |
| "grad_norm": 0.11166777845488875, |
| "learning_rate": 7.93124826844167e-06, |
| "loss": 0.4411, |
| "step": 1482 |
| }, |
| { |
| "epoch": 4.076975945017182, |
| "grad_norm": 0.13721899419089614, |
| "learning_rate": 7.885312923179912e-06, |
| "loss": 0.444, |
| "step": 1483 |
| }, |
| { |
| "epoch": 4.079725085910653, |
| "grad_norm": 0.13263433098857536, |
| "learning_rate": 7.839496436607406e-06, |
| "loss": 0.4459, |
| "step": 1484 |
| }, |
| { |
| "epoch": 4.082474226804123, |
| "grad_norm": 0.11590942502418243, |
| "learning_rate": 7.793798978294144e-06, |
| "loss": 0.4453, |
| "step": 1485 |
| }, |
| { |
| "epoch": 4.085223367697594, |
| "grad_norm": 0.13293144505263688, |
| "learning_rate": 7.748220717369594e-06, |
| "loss": 0.4413, |
| "step": 1486 |
| }, |
| { |
| "epoch": 4.087972508591065, |
| "grad_norm": 0.13483699801148805, |
| "learning_rate": 7.702761822522072e-06, |
| "loss": 0.4366, |
| "step": 1487 |
| }, |
| { |
| "epoch": 4.090721649484536, |
| "grad_norm": 0.11307879927149218, |
| "learning_rate": 7.6574224619981e-06, |
| "loss": 0.4352, |
| "step": 1488 |
| }, |
| { |
| "epoch": 4.0934707903780065, |
| "grad_norm": 0.11327258461306543, |
| "learning_rate": 7.612202803601812e-06, |
| "loss": 0.4513, |
| "step": 1489 |
| }, |
| { |
| "epoch": 4.096219931271477, |
| "grad_norm": 0.11998728903473971, |
| "learning_rate": 7.56710301469429e-06, |
| "loss": 0.4462, |
| "step": 1490 |
| }, |
| { |
| "epoch": 4.098969072164948, |
| "grad_norm": 0.11148518956657245, |
| "learning_rate": 7.522123262193011e-06, |
| "loss": 0.4414, |
| "step": 1491 |
| }, |
| { |
| "epoch": 4.101718213058419, |
| "grad_norm": 0.10980782127466164, |
| "learning_rate": 7.477263712571154e-06, |
| "loss": 0.4505, |
| "step": 1492 |
| }, |
| { |
| "epoch": 4.10446735395189, |
| "grad_norm": 0.13007209291051053, |
| "learning_rate": 7.432524531857037e-06, |
| "loss": 0.4404, |
| "step": 1493 |
| }, |
| { |
| "epoch": 4.10721649484536, |
| "grad_norm": 0.10633901487852752, |
| "learning_rate": 7.38790588563346e-06, |
| "loss": 0.4432, |
| "step": 1494 |
| }, |
| { |
| "epoch": 4.109965635738831, |
| "grad_norm": 0.10337495591575992, |
| "learning_rate": 7.343407939037166e-06, |
| "loss": 0.4442, |
| "step": 1495 |
| }, |
| { |
| "epoch": 4.112714776632302, |
| "grad_norm": 0.11699522345473407, |
| "learning_rate": 7.29903085675812e-06, |
| "loss": 0.4404, |
| "step": 1496 |
| }, |
| { |
| "epoch": 4.115463917525773, |
| "grad_norm": 0.1071569023006855, |
| "learning_rate": 7.254774803038986e-06, |
| "loss": 0.4448, |
| "step": 1497 |
| }, |
| { |
| "epoch": 4.118213058419244, |
| "grad_norm": 0.10590236218912877, |
| "learning_rate": 7.210639941674515e-06, |
| "loss": 0.4429, |
| "step": 1498 |
| }, |
| { |
| "epoch": 4.120962199312714, |
| "grad_norm": 0.10210210146806667, |
| "learning_rate": 7.166626436010893e-06, |
| "loss": 0.4415, |
| "step": 1499 |
| }, |
| { |
| "epoch": 4.123711340206185, |
| "grad_norm": 0.11125955970384037, |
| "learning_rate": 7.122734448945157e-06, |
| "loss": 0.4526, |
| "step": 1500 |
| }, |
| { |
| "epoch": 4.126460481099656, |
| "grad_norm": 0.12013542728499116, |
| "learning_rate": 7.078964142924607e-06, |
| "loss": 0.4419, |
| "step": 1501 |
| }, |
| { |
| "epoch": 4.129209621993128, |
| "grad_norm": 0.10484380190061886, |
| "learning_rate": 7.035315679946176e-06, |
| "loss": 0.4489, |
| "step": 1502 |
| }, |
| { |
| "epoch": 4.131958762886598, |
| "grad_norm": 0.09567503999966823, |
| "learning_rate": 6.991789221555887e-06, |
| "loss": 0.4372, |
| "step": 1503 |
| }, |
| { |
| "epoch": 4.134707903780069, |
| "grad_norm": 0.10540468880005585, |
| "learning_rate": 6.94838492884816e-06, |
| "loss": 0.4464, |
| "step": 1504 |
| }, |
| { |
| "epoch": 4.13745704467354, |
| "grad_norm": 0.11225166249471208, |
| "learning_rate": 6.905102962465302e-06, |
| "loss": 0.4439, |
| "step": 1505 |
| }, |
| { |
| "epoch": 4.140206185567011, |
| "grad_norm": 0.10759681388030853, |
| "learning_rate": 6.861943482596896e-06, |
| "loss": 0.4436, |
| "step": 1506 |
| }, |
| { |
| "epoch": 4.1429553264604815, |
| "grad_norm": 0.10130359403512029, |
| "learning_rate": 6.818906648979169e-06, |
| "loss": 0.4388, |
| "step": 1507 |
| }, |
| { |
| "epoch": 4.145704467353952, |
| "grad_norm": 0.1121174956770293, |
| "learning_rate": 6.775992620894434e-06, |
| "loss": 0.4507, |
| "step": 1508 |
| }, |
| { |
| "epoch": 4.148453608247423, |
| "grad_norm": 0.1014146829605852, |
| "learning_rate": 6.733201557170481e-06, |
| "loss": 0.4432, |
| "step": 1509 |
| }, |
| { |
| "epoch": 4.151202749140894, |
| "grad_norm": 0.09837158537375995, |
| "learning_rate": 6.690533616180031e-06, |
| "loss": 0.4416, |
| "step": 1510 |
| }, |
| { |
| "epoch": 4.153951890034365, |
| "grad_norm": 0.10857273055540849, |
| "learning_rate": 6.647988955840099e-06, |
| "loss": 0.4283, |
| "step": 1511 |
| }, |
| { |
| "epoch": 4.156701030927835, |
| "grad_norm": 0.11827148688247635, |
| "learning_rate": 6.6055677336114024e-06, |
| "loss": 0.4435, |
| "step": 1512 |
| }, |
| { |
| "epoch": 4.159450171821306, |
| "grad_norm": 0.10661184402237826, |
| "learning_rate": 6.563270106497866e-06, |
| "loss": 0.4493, |
| "step": 1513 |
| }, |
| { |
| "epoch": 4.162199312714777, |
| "grad_norm": 0.10027884158801532, |
| "learning_rate": 6.521096231045927e-06, |
| "loss": 0.4424, |
| "step": 1514 |
| }, |
| { |
| "epoch": 4.164948453608248, |
| "grad_norm": 0.10677714613702075, |
| "learning_rate": 6.479046263344044e-06, |
| "loss": 0.4407, |
| "step": 1515 |
| }, |
| { |
| "epoch": 4.1676975945017185, |
| "grad_norm": 0.10120025403165747, |
| "learning_rate": 6.43712035902206e-06, |
| "loss": 0.4482, |
| "step": 1516 |
| }, |
| { |
| "epoch": 4.170446735395189, |
| "grad_norm": 0.09576847158285014, |
| "learning_rate": 6.395318673250655e-06, |
| "loss": 0.4455, |
| "step": 1517 |
| }, |
| { |
| "epoch": 4.17319587628866, |
| "grad_norm": 0.10456502041919041, |
| "learning_rate": 6.353641360740796e-06, |
| "loss": 0.4442, |
| "step": 1518 |
| }, |
| { |
| "epoch": 4.175945017182131, |
| "grad_norm": 0.10975215070737157, |
| "learning_rate": 6.312088575743112e-06, |
| "loss": 0.4421, |
| "step": 1519 |
| }, |
| { |
| "epoch": 4.178694158075602, |
| "grad_norm": 0.09670376981272215, |
| "learning_rate": 6.270660472047318e-06, |
| "loss": 0.4357, |
| "step": 1520 |
| }, |
| { |
| "epoch": 4.181443298969072, |
| "grad_norm": 0.0989487056180283, |
| "learning_rate": 6.229357202981736e-06, |
| "loss": 0.4446, |
| "step": 1521 |
| }, |
| { |
| "epoch": 4.184192439862543, |
| "grad_norm": 0.11575820428367671, |
| "learning_rate": 6.188178921412622e-06, |
| "loss": 0.4376, |
| "step": 1522 |
| }, |
| { |
| "epoch": 4.186941580756014, |
| "grad_norm": 0.09506731515085455, |
| "learning_rate": 6.147125779743666e-06, |
| "loss": 0.4399, |
| "step": 1523 |
| }, |
| { |
| "epoch": 4.189690721649485, |
| "grad_norm": 0.10319725228868595, |
| "learning_rate": 6.106197929915385e-06, |
| "loss": 0.4494, |
| "step": 1524 |
| }, |
| { |
| "epoch": 4.1924398625429555, |
| "grad_norm": 0.10930094347383024, |
| "learning_rate": 6.065395523404616e-06, |
| "loss": 0.4416, |
| "step": 1525 |
| }, |
| { |
| "epoch": 4.195189003436426, |
| "grad_norm": 0.10333832350838476, |
| "learning_rate": 6.0247187112238936e-06, |
| "loss": 0.4419, |
| "step": 1526 |
| }, |
| { |
| "epoch": 4.197938144329897, |
| "grad_norm": 0.09863833187141094, |
| "learning_rate": 5.984167643920926e-06, |
| "loss": 0.45, |
| "step": 1527 |
| }, |
| { |
| "epoch": 4.200687285223368, |
| "grad_norm": 0.10338899938814769, |
| "learning_rate": 5.943742471578029e-06, |
| "loss": 0.4432, |
| "step": 1528 |
| }, |
| { |
| "epoch": 4.203436426116839, |
| "grad_norm": 0.10080408781457796, |
| "learning_rate": 5.903443343811583e-06, |
| "loss": 0.4498, |
| "step": 1529 |
| }, |
| { |
| "epoch": 4.206185567010309, |
| "grad_norm": 0.0995238925477759, |
| "learning_rate": 5.863270409771451e-06, |
| "loss": 0.4441, |
| "step": 1530 |
| }, |
| { |
| "epoch": 4.20893470790378, |
| "grad_norm": 0.09871347524798951, |
| "learning_rate": 5.823223818140458e-06, |
| "loss": 0.4465, |
| "step": 1531 |
| }, |
| { |
| "epoch": 4.211683848797251, |
| "grad_norm": 0.09741808192782717, |
| "learning_rate": 5.78330371713383e-06, |
| "loss": 0.4521, |
| "step": 1532 |
| }, |
| { |
| "epoch": 4.214432989690722, |
| "grad_norm": 0.10108809287384478, |
| "learning_rate": 5.7435102544986325e-06, |
| "loss": 0.4476, |
| "step": 1533 |
| }, |
| { |
| "epoch": 4.217182130584193, |
| "grad_norm": 0.09586236341789252, |
| "learning_rate": 5.70384357751323e-06, |
| "loss": 0.4417, |
| "step": 1534 |
| }, |
| { |
| "epoch": 4.219931271477663, |
| "grad_norm": 0.0982846297983466, |
| "learning_rate": 5.664303832986764e-06, |
| "loss": 0.4496, |
| "step": 1535 |
| }, |
| { |
| "epoch": 4.222680412371134, |
| "grad_norm": 0.09404435584314635, |
| "learning_rate": 5.624891167258569e-06, |
| "loss": 0.4415, |
| "step": 1536 |
| }, |
| { |
| "epoch": 4.225429553264605, |
| "grad_norm": 0.09792884175585087, |
| "learning_rate": 5.585605726197663e-06, |
| "loss": 0.4364, |
| "step": 1537 |
| }, |
| { |
| "epoch": 4.228178694158076, |
| "grad_norm": 0.0966404647926693, |
| "learning_rate": 5.54644765520219e-06, |
| "loss": 0.4464, |
| "step": 1538 |
| }, |
| { |
| "epoch": 4.2309278350515465, |
| "grad_norm": 0.09803372963199505, |
| "learning_rate": 5.507417099198886e-06, |
| "loss": 0.443, |
| "step": 1539 |
| }, |
| { |
| "epoch": 4.233676975945017, |
| "grad_norm": 0.10220414170891072, |
| "learning_rate": 5.468514202642574e-06, |
| "loss": 0.4416, |
| "step": 1540 |
| }, |
| { |
| "epoch": 4.236426116838488, |
| "grad_norm": 0.09940583554566296, |
| "learning_rate": 5.429739109515564e-06, |
| "loss": 0.4483, |
| "step": 1541 |
| }, |
| { |
| "epoch": 4.239175257731959, |
| "grad_norm": 0.09639208839569098, |
| "learning_rate": 5.3910919633271755e-06, |
| "loss": 0.442, |
| "step": 1542 |
| }, |
| { |
| "epoch": 4.24192439862543, |
| "grad_norm": 0.1117369320333374, |
| "learning_rate": 5.352572907113178e-06, |
| "loss": 0.4461, |
| "step": 1543 |
| }, |
| { |
| "epoch": 4.2446735395189, |
| "grad_norm": 0.09611791874864287, |
| "learning_rate": 5.314182083435282e-06, |
| "loss": 0.4346, |
| "step": 1544 |
| }, |
| { |
| "epoch": 4.247422680412371, |
| "grad_norm": 0.10794580998258632, |
| "learning_rate": 5.2759196343805885e-06, |
| "loss": 0.4415, |
| "step": 1545 |
| }, |
| { |
| "epoch": 4.250171821305842, |
| "grad_norm": 0.0952864724999734, |
| "learning_rate": 5.237785701561078e-06, |
| "loss": 0.4443, |
| "step": 1546 |
| }, |
| { |
| "epoch": 4.252920962199313, |
| "grad_norm": 0.10156924345931388, |
| "learning_rate": 5.1997804261131015e-06, |
| "loss": 0.449, |
| "step": 1547 |
| }, |
| { |
| "epoch": 4.2556701030927835, |
| "grad_norm": 0.09725478784660825, |
| "learning_rate": 5.161903948696813e-06, |
| "loss": 0.439, |
| "step": 1548 |
| }, |
| { |
| "epoch": 4.258419243986254, |
| "grad_norm": 0.10252085604277364, |
| "learning_rate": 5.124156409495693e-06, |
| "loss": 0.4385, |
| "step": 1549 |
| }, |
| { |
| "epoch": 4.261168384879725, |
| "grad_norm": 0.10131443009760514, |
| "learning_rate": 5.086537948216008e-06, |
| "loss": 0.4407, |
| "step": 1550 |
| }, |
| { |
| "epoch": 4.263917525773196, |
| "grad_norm": 0.09875784055342578, |
| "learning_rate": 5.049048704086295e-06, |
| "loss": 0.4469, |
| "step": 1551 |
| }, |
| { |
| "epoch": 4.266666666666667, |
| "grad_norm": 0.10157130046545569, |
| "learning_rate": 5.011688815856856e-06, |
| "loss": 0.4502, |
| "step": 1552 |
| }, |
| { |
| "epoch": 4.269415807560137, |
| "grad_norm": 0.10941012381717943, |
| "learning_rate": 4.974458421799231e-06, |
| "loss": 0.4411, |
| "step": 1553 |
| }, |
| { |
| "epoch": 4.272164948453608, |
| "grad_norm": 0.11064889415177875, |
| "learning_rate": 4.937357659705688e-06, |
| "loss": 0.4419, |
| "step": 1554 |
| }, |
| { |
| "epoch": 4.274914089347079, |
| "grad_norm": 0.10002641057536234, |
| "learning_rate": 4.90038666688875e-06, |
| "loss": 0.4558, |
| "step": 1555 |
| }, |
| { |
| "epoch": 4.27766323024055, |
| "grad_norm": 0.10016807122767976, |
| "learning_rate": 4.863545580180615e-06, |
| "loss": 0.4435, |
| "step": 1556 |
| }, |
| { |
| "epoch": 4.2804123711340205, |
| "grad_norm": 0.10851136506221658, |
| "learning_rate": 4.8268345359327075e-06, |
| "loss": 0.4398, |
| "step": 1557 |
| }, |
| { |
| "epoch": 4.283161512027491, |
| "grad_norm": 0.09488321817571037, |
| "learning_rate": 4.7902536700151504e-06, |
| "loss": 0.4429, |
| "step": 1558 |
| }, |
| { |
| "epoch": 4.285910652920962, |
| "grad_norm": 0.09962394867509372, |
| "learning_rate": 4.75380311781628e-06, |
| "loss": 0.4435, |
| "step": 1559 |
| }, |
| { |
| "epoch": 4.288659793814433, |
| "grad_norm": 0.10525730743423926, |
| "learning_rate": 4.717483014242134e-06, |
| "loss": 0.4488, |
| "step": 1560 |
| }, |
| { |
| "epoch": 4.291408934707904, |
| "grad_norm": 0.09444016052953329, |
| "learning_rate": 4.681293493715906e-06, |
| "loss": 0.4383, |
| "step": 1561 |
| }, |
| { |
| "epoch": 4.294158075601374, |
| "grad_norm": 0.0935555125757194, |
| "learning_rate": 4.645234690177556e-06, |
| "loss": 0.441, |
| "step": 1562 |
| }, |
| { |
| "epoch": 4.296907216494845, |
| "grad_norm": 0.09875362647621048, |
| "learning_rate": 4.6093067370832145e-06, |
| "loss": 0.4467, |
| "step": 1563 |
| }, |
| { |
| "epoch": 4.299656357388316, |
| "grad_norm": 0.10106687338028049, |
| "learning_rate": 4.573509767404733e-06, |
| "loss": 0.4398, |
| "step": 1564 |
| }, |
| { |
| "epoch": 4.302405498281787, |
| "grad_norm": 0.09430960821120914, |
| "learning_rate": 4.537843913629178e-06, |
| "loss": 0.4498, |
| "step": 1565 |
| }, |
| { |
| "epoch": 4.3051546391752575, |
| "grad_norm": 0.09341919086027475, |
| "learning_rate": 4.502309307758368e-06, |
| "loss": 0.4427, |
| "step": 1566 |
| }, |
| { |
| "epoch": 4.307903780068728, |
| "grad_norm": 0.09579646490730007, |
| "learning_rate": 4.46690608130834e-06, |
| "loss": 0.4504, |
| "step": 1567 |
| }, |
| { |
| "epoch": 4.310652920962199, |
| "grad_norm": 0.10140215061324537, |
| "learning_rate": 4.431634365308904e-06, |
| "loss": 0.4445, |
| "step": 1568 |
| }, |
| { |
| "epoch": 4.31340206185567, |
| "grad_norm": 0.09687704144452364, |
| "learning_rate": 4.39649429030311e-06, |
| "loss": 0.4478, |
| "step": 1569 |
| }, |
| { |
| "epoch": 4.316151202749141, |
| "grad_norm": 0.09438778342768989, |
| "learning_rate": 4.3614859863468425e-06, |
| "loss": 0.4386, |
| "step": 1570 |
| }, |
| { |
| "epoch": 4.318900343642611, |
| "grad_norm": 0.09156131739398227, |
| "learning_rate": 4.326609583008261e-06, |
| "loss": 0.4338, |
| "step": 1571 |
| }, |
| { |
| "epoch": 4.321649484536082, |
| "grad_norm": 0.09078660520618338, |
| "learning_rate": 4.2918652093673606e-06, |
| "loss": 0.442, |
| "step": 1572 |
| }, |
| { |
| "epoch": 4.324398625429553, |
| "grad_norm": 0.10260056112083875, |
| "learning_rate": 4.257252994015466e-06, |
| "loss": 0.4441, |
| "step": 1573 |
| }, |
| { |
| "epoch": 4.327147766323024, |
| "grad_norm": 0.10384708279251556, |
| "learning_rate": 4.222773065054817e-06, |
| "loss": 0.4466, |
| "step": 1574 |
| }, |
| { |
| "epoch": 4.329896907216495, |
| "grad_norm": 0.09444726168415894, |
| "learning_rate": 4.188425550098019e-06, |
| "loss": 0.444, |
| "step": 1575 |
| }, |
| { |
| "epoch": 4.332646048109965, |
| "grad_norm": 0.1139460077036934, |
| "learning_rate": 4.15421057626761e-06, |
| "loss": 0.4446, |
| "step": 1576 |
| }, |
| { |
| "epoch": 4.335395189003436, |
| "grad_norm": 0.09822740636361162, |
| "learning_rate": 4.120128270195585e-06, |
| "loss": 0.4424, |
| "step": 1577 |
| }, |
| { |
| "epoch": 4.338144329896907, |
| "grad_norm": 0.09913730861031846, |
| "learning_rate": 4.086178758022929e-06, |
| "loss": 0.4477, |
| "step": 1578 |
| }, |
| { |
| "epoch": 4.340893470790378, |
| "grad_norm": 0.09892692023825955, |
| "learning_rate": 4.052362165399148e-06, |
| "loss": 0.4462, |
| "step": 1579 |
| }, |
| { |
| "epoch": 4.3436426116838485, |
| "grad_norm": 0.09559985988178789, |
| "learning_rate": 4.018678617481797e-06, |
| "loss": 0.4518, |
| "step": 1580 |
| }, |
| { |
| "epoch": 4.346391752577319, |
| "grad_norm": 0.09645896728376689, |
| "learning_rate": 3.9851282389360336e-06, |
| "loss": 0.4411, |
| "step": 1581 |
| }, |
| { |
| "epoch": 4.34914089347079, |
| "grad_norm": 0.09621093774200687, |
| "learning_rate": 3.951711153934143e-06, |
| "loss": 0.4455, |
| "step": 1582 |
| }, |
| { |
| "epoch": 4.351890034364261, |
| "grad_norm": 0.09860601420290797, |
| "learning_rate": 3.918427486155079e-06, |
| "loss": 0.4452, |
| "step": 1583 |
| }, |
| { |
| "epoch": 4.354639175257732, |
| "grad_norm": 0.09460049776906895, |
| "learning_rate": 3.885277358784003e-06, |
| "loss": 0.4476, |
| "step": 1584 |
| }, |
| { |
| "epoch": 4.357388316151202, |
| "grad_norm": 0.0970914145620547, |
| "learning_rate": 3.85226089451185e-06, |
| "loss": 0.4486, |
| "step": 1585 |
| }, |
| { |
| "epoch": 4.360137457044673, |
| "grad_norm": 0.10970056178061441, |
| "learning_rate": 3.819378215534842e-06, |
| "loss": 0.4404, |
| "step": 1586 |
| }, |
| { |
| "epoch": 4.362886597938144, |
| "grad_norm": 0.09671374353988552, |
| "learning_rate": 3.7866294435540574e-06, |
| "loss": 0.4534, |
| "step": 1587 |
| }, |
| { |
| "epoch": 4.365635738831615, |
| "grad_norm": 0.09276644771447388, |
| "learning_rate": 3.7540146997749793e-06, |
| "loss": 0.4489, |
| "step": 1588 |
| }, |
| { |
| "epoch": 4.368384879725086, |
| "grad_norm": 0.09400993795205304, |
| "learning_rate": 3.72153410490705e-06, |
| "loss": 0.4467, |
| "step": 1589 |
| }, |
| { |
| "epoch": 4.371134020618557, |
| "grad_norm": 0.10312570233843393, |
| "learning_rate": 3.689187779163206e-06, |
| "loss": 0.4491, |
| "step": 1590 |
| }, |
| { |
| "epoch": 4.373883161512028, |
| "grad_norm": 0.10124141768595926, |
| "learning_rate": 3.6569758422594446e-06, |
| "loss": 0.4528, |
| "step": 1591 |
| }, |
| { |
| "epoch": 4.376632302405499, |
| "grad_norm": 0.0947775184692594, |
| "learning_rate": 3.6248984134143794e-06, |
| "loss": 0.4493, |
| "step": 1592 |
| }, |
| { |
| "epoch": 4.3793814432989695, |
| "grad_norm": 0.0949068368311883, |
| "learning_rate": 3.5929556113488117e-06, |
| "loss": 0.4502, |
| "step": 1593 |
| }, |
| { |
| "epoch": 4.38213058419244, |
| "grad_norm": 0.09225617183677981, |
| "learning_rate": 3.5611475542852714e-06, |
| "loss": 0.4378, |
| "step": 1594 |
| }, |
| { |
| "epoch": 4.384879725085911, |
| "grad_norm": 0.09712650449251893, |
| "learning_rate": 3.5294743599475754e-06, |
| "loss": 0.446, |
| "step": 1595 |
| }, |
| { |
| "epoch": 4.387628865979382, |
| "grad_norm": 0.10510272619911021, |
| "learning_rate": 3.4979361455604386e-06, |
| "loss": 0.4438, |
| "step": 1596 |
| }, |
| { |
| "epoch": 4.390378006872853, |
| "grad_norm": 0.10034299553088664, |
| "learning_rate": 3.4665330278489708e-06, |
| "loss": 0.4487, |
| "step": 1597 |
| }, |
| { |
| "epoch": 4.393127147766323, |
| "grad_norm": 0.09360138161898929, |
| "learning_rate": 3.4352651230383026e-06, |
| "loss": 0.4409, |
| "step": 1598 |
| }, |
| { |
| "epoch": 4.395876288659794, |
| "grad_norm": 0.09887187647716278, |
| "learning_rate": 3.4041325468531094e-06, |
| "loss": 0.4409, |
| "step": 1599 |
| }, |
| { |
| "epoch": 4.398625429553265, |
| "grad_norm": 0.10473071373698492, |
| "learning_rate": 3.3731354145172435e-06, |
| "loss": 0.4516, |
| "step": 1600 |
| }, |
| { |
| "epoch": 4.401374570446736, |
| "grad_norm": 0.1007375745415408, |
| "learning_rate": 3.342273840753221e-06, |
| "loss": 0.4383, |
| "step": 1601 |
| }, |
| { |
| "epoch": 4.4041237113402065, |
| "grad_norm": 0.09735792228082213, |
| "learning_rate": 3.311547939781887e-06, |
| "loss": 0.4409, |
| "step": 1602 |
| }, |
| { |
| "epoch": 4.406872852233677, |
| "grad_norm": 0.09459778267684918, |
| "learning_rate": 3.280957825321922e-06, |
| "loss": 0.4442, |
| "step": 1603 |
| }, |
| { |
| "epoch": 4.409621993127148, |
| "grad_norm": 0.09281792628016262, |
| "learning_rate": 3.250503610589482e-06, |
| "loss": 0.4491, |
| "step": 1604 |
| }, |
| { |
| "epoch": 4.412371134020619, |
| "grad_norm": 0.1007767987975053, |
| "learning_rate": 3.2201854082977292e-06, |
| "loss": 0.4517, |
| "step": 1605 |
| }, |
| { |
| "epoch": 4.41512027491409, |
| "grad_norm": 0.09789821294262532, |
| "learning_rate": 3.190003330656435e-06, |
| "loss": 0.4451, |
| "step": 1606 |
| }, |
| { |
| "epoch": 4.41786941580756, |
| "grad_norm": 0.10028034923814605, |
| "learning_rate": 3.159957489371559e-06, |
| "loss": 0.4367, |
| "step": 1607 |
| }, |
| { |
| "epoch": 4.420618556701031, |
| "grad_norm": 0.09081132192790976, |
| "learning_rate": 3.1300479956448693e-06, |
| "loss": 0.4387, |
| "step": 1608 |
| }, |
| { |
| "epoch": 4.423367697594502, |
| "grad_norm": 0.09711016308139389, |
| "learning_rate": 3.1002749601734618e-06, |
| "loss": 0.4428, |
| "step": 1609 |
| }, |
| { |
| "epoch": 4.426116838487973, |
| "grad_norm": 0.09435493012986142, |
| "learning_rate": 3.0706384931494137e-06, |
| "loss": 0.4328, |
| "step": 1610 |
| }, |
| { |
| "epoch": 4.4288659793814436, |
| "grad_norm": 0.09951785033015637, |
| "learning_rate": 3.0411387042593545e-06, |
| "loss": 0.4512, |
| "step": 1611 |
| }, |
| { |
| "epoch": 4.431615120274914, |
| "grad_norm": 0.0960124562000595, |
| "learning_rate": 3.0117757026840543e-06, |
| "loss": 0.4485, |
| "step": 1612 |
| }, |
| { |
| "epoch": 4.434364261168385, |
| "grad_norm": 0.09374569219824876, |
| "learning_rate": 2.9825495970980234e-06, |
| "loss": 0.4448, |
| "step": 1613 |
| }, |
| { |
| "epoch": 4.437113402061856, |
| "grad_norm": 0.09287833110723459, |
| "learning_rate": 2.953460495669096e-06, |
| "loss": 0.4473, |
| "step": 1614 |
| }, |
| { |
| "epoch": 4.439862542955327, |
| "grad_norm": 0.09330518761309428, |
| "learning_rate": 2.924508506058077e-06, |
| "loss": 0.4368, |
| "step": 1615 |
| }, |
| { |
| "epoch": 4.4426116838487975, |
| "grad_norm": 0.09851414675120082, |
| "learning_rate": 2.8956937354182923e-06, |
| "loss": 0.4438, |
| "step": 1616 |
| }, |
| { |
| "epoch": 4.445360824742268, |
| "grad_norm": 0.09503089697922397, |
| "learning_rate": 2.867016290395199e-06, |
| "loss": 0.4557, |
| "step": 1617 |
| }, |
| { |
| "epoch": 4.448109965635739, |
| "grad_norm": 0.09322925477662775, |
| "learning_rate": 2.838476277126012e-06, |
| "loss": 0.4428, |
| "step": 1618 |
| }, |
| { |
| "epoch": 4.45085910652921, |
| "grad_norm": 0.09242195549495436, |
| "learning_rate": 2.810073801239317e-06, |
| "loss": 0.446, |
| "step": 1619 |
| }, |
| { |
| "epoch": 4.453608247422681, |
| "grad_norm": 0.08945993365343624, |
| "learning_rate": 2.781808967854649e-06, |
| "loss": 0.4412, |
| "step": 1620 |
| }, |
| { |
| "epoch": 4.456357388316151, |
| "grad_norm": 0.09428643807864204, |
| "learning_rate": 2.753681881582115e-06, |
| "loss": 0.4416, |
| "step": 1621 |
| }, |
| { |
| "epoch": 4.459106529209622, |
| "grad_norm": 0.0931736838114746, |
| "learning_rate": 2.7256926465220177e-06, |
| "loss": 0.4472, |
| "step": 1622 |
| }, |
| { |
| "epoch": 4.461855670103093, |
| "grad_norm": 0.09181260953087753, |
| "learning_rate": 2.697841366264471e-06, |
| "loss": 0.4481, |
| "step": 1623 |
| }, |
| { |
| "epoch": 4.464604810996564, |
| "grad_norm": 0.09223991013911755, |
| "learning_rate": 2.670128143888988e-06, |
| "loss": 0.4417, |
| "step": 1624 |
| }, |
| { |
| "epoch": 4.4673539518900345, |
| "grad_norm": 0.09242485950713865, |
| "learning_rate": 2.6425530819641364e-06, |
| "loss": 0.4501, |
| "step": 1625 |
| }, |
| { |
| "epoch": 4.470103092783505, |
| "grad_norm": 0.09113105402491876, |
| "learning_rate": 2.6151162825471364e-06, |
| "loss": 0.4442, |
| "step": 1626 |
| }, |
| { |
| "epoch": 4.472852233676976, |
| "grad_norm": 0.09212808247265639, |
| "learning_rate": 2.587817847183489e-06, |
| "loss": 0.4429, |
| "step": 1627 |
| }, |
| { |
| "epoch": 4.475601374570447, |
| "grad_norm": 0.09767261371119439, |
| "learning_rate": 2.5606578769066026e-06, |
| "loss": 0.4432, |
| "step": 1628 |
| }, |
| { |
| "epoch": 4.478350515463918, |
| "grad_norm": 0.09743572389157769, |
| "learning_rate": 2.5336364722374106e-06, |
| "loss": 0.4487, |
| "step": 1629 |
| }, |
| { |
| "epoch": 4.481099656357388, |
| "grad_norm": 0.09102922314529087, |
| "learning_rate": 2.506753733184013e-06, |
| "loss": 0.4503, |
| "step": 1630 |
| }, |
| { |
| "epoch": 4.483848797250859, |
| "grad_norm": 0.09266255759834628, |
| "learning_rate": 2.480009759241302e-06, |
| "loss": 0.4405, |
| "step": 1631 |
| }, |
| { |
| "epoch": 4.48659793814433, |
| "grad_norm": 0.0925039627946117, |
| "learning_rate": 2.453404649390576e-06, |
| "loss": 0.4398, |
| "step": 1632 |
| }, |
| { |
| "epoch": 4.489347079037801, |
| "grad_norm": 0.09521064172876387, |
| "learning_rate": 2.4269385020991987e-06, |
| "loss": 0.4486, |
| "step": 1633 |
| }, |
| { |
| "epoch": 4.4920962199312715, |
| "grad_norm": 0.09113818118580695, |
| "learning_rate": 2.40061141532022e-06, |
| "loss": 0.4431, |
| "step": 1634 |
| }, |
| { |
| "epoch": 4.494845360824742, |
| "grad_norm": 0.10000887227672747, |
| "learning_rate": 2.374423486492021e-06, |
| "loss": 0.4388, |
| "step": 1635 |
| }, |
| { |
| "epoch": 4.497594501718213, |
| "grad_norm": 0.09939158503866538, |
| "learning_rate": 2.3483748125379434e-06, |
| "loss": 0.4526, |
| "step": 1636 |
| }, |
| { |
| "epoch": 4.500343642611684, |
| "grad_norm": 0.08865852194600819, |
| "learning_rate": 2.3224654898659348e-06, |
| "loss": 0.4458, |
| "step": 1637 |
| }, |
| { |
| "epoch": 4.503092783505155, |
| "grad_norm": 0.09118943093226906, |
| "learning_rate": 2.2966956143682094e-06, |
| "loss": 0.444, |
| "step": 1638 |
| }, |
| { |
| "epoch": 4.505841924398625, |
| "grad_norm": 0.09085329322674768, |
| "learning_rate": 2.2710652814208656e-06, |
| "loss": 0.4516, |
| "step": 1639 |
| }, |
| { |
| "epoch": 4.508591065292096, |
| "grad_norm": 0.09474948162033754, |
| "learning_rate": 2.2455745858835474e-06, |
| "loss": 0.4439, |
| "step": 1640 |
| }, |
| { |
| "epoch": 4.511340206185567, |
| "grad_norm": 0.09100873289075298, |
| "learning_rate": 2.2202236220990913e-06, |
| "loss": 0.4441, |
| "step": 1641 |
| }, |
| { |
| "epoch": 4.514089347079038, |
| "grad_norm": 0.09556416343413056, |
| "learning_rate": 2.1950124838931685e-06, |
| "loss": 0.4426, |
| "step": 1642 |
| }, |
| { |
| "epoch": 4.5168384879725085, |
| "grad_norm": 0.09124176627878805, |
| "learning_rate": 2.169941264573967e-06, |
| "loss": 0.4515, |
| "step": 1643 |
| }, |
| { |
| "epoch": 4.519587628865979, |
| "grad_norm": 0.08967523905753207, |
| "learning_rate": 2.1450100569317954e-06, |
| "loss": 0.4427, |
| "step": 1644 |
| }, |
| { |
| "epoch": 4.52233676975945, |
| "grad_norm": 0.08908910909605862, |
| "learning_rate": 2.1202189532387994e-06, |
| "loss": 0.4421, |
| "step": 1645 |
| }, |
| { |
| "epoch": 4.525085910652921, |
| "grad_norm": 0.08930544495492708, |
| "learning_rate": 2.0955680452485795e-06, |
| "loss": 0.4386, |
| "step": 1646 |
| }, |
| { |
| "epoch": 4.527835051546392, |
| "grad_norm": 0.09705957099996088, |
| "learning_rate": 2.0710574241958527e-06, |
| "loss": 0.4499, |
| "step": 1647 |
| }, |
| { |
| "epoch": 4.530584192439862, |
| "grad_norm": 0.09629213253097813, |
| "learning_rate": 2.0466871807961253e-06, |
| "loss": 0.4483, |
| "step": 1648 |
| }, |
| { |
| "epoch": 4.533333333333333, |
| "grad_norm": 0.08724520563465142, |
| "learning_rate": 2.0224574052453817e-06, |
| "loss": 0.4449, |
| "step": 1649 |
| }, |
| { |
| "epoch": 4.536082474226804, |
| "grad_norm": 0.09135773175337733, |
| "learning_rate": 1.9983681872196836e-06, |
| "loss": 0.4399, |
| "step": 1650 |
| }, |
| { |
| "epoch": 4.538831615120275, |
| "grad_norm": 0.09009008354611814, |
| "learning_rate": 1.9744196158749183e-06, |
| "loss": 0.4418, |
| "step": 1651 |
| }, |
| { |
| "epoch": 4.541580756013746, |
| "grad_norm": 0.09186018004376852, |
| "learning_rate": 1.9506117798463944e-06, |
| "loss": 0.4478, |
| "step": 1652 |
| }, |
| { |
| "epoch": 4.544329896907216, |
| "grad_norm": 0.08971975311980089, |
| "learning_rate": 1.9269447672485864e-06, |
| "loss": 0.4414, |
| "step": 1653 |
| }, |
| { |
| "epoch": 4.547079037800687, |
| "grad_norm": 0.08979949876575676, |
| "learning_rate": 1.9034186656747478e-06, |
| "loss": 0.4452, |
| "step": 1654 |
| }, |
| { |
| "epoch": 4.549828178694158, |
| "grad_norm": 0.0936250091293909, |
| "learning_rate": 1.8800335621966148e-06, |
| "loss": 0.4482, |
| "step": 1655 |
| }, |
| { |
| "epoch": 4.552577319587629, |
| "grad_norm": 0.08981143391720423, |
| "learning_rate": 1.8567895433640705e-06, |
| "loss": 0.4448, |
| "step": 1656 |
| }, |
| { |
| "epoch": 4.5553264604810995, |
| "grad_norm": 0.08587438163737723, |
| "learning_rate": 1.8336866952048683e-06, |
| "loss": 0.435, |
| "step": 1657 |
| }, |
| { |
| "epoch": 4.55807560137457, |
| "grad_norm": 0.09326486613993211, |
| "learning_rate": 1.8107251032242335e-06, |
| "loss": 0.4508, |
| "step": 1658 |
| }, |
| { |
| "epoch": 4.560824742268041, |
| "grad_norm": 0.09331518445515743, |
| "learning_rate": 1.7879048524046182e-06, |
| "loss": 0.4499, |
| "step": 1659 |
| }, |
| { |
| "epoch": 4.563573883161512, |
| "grad_norm": 0.08976792064689015, |
| "learning_rate": 1.765226027205369e-06, |
| "loss": 0.4466, |
| "step": 1660 |
| }, |
| { |
| "epoch": 4.566323024054983, |
| "grad_norm": 0.0875193893760576, |
| "learning_rate": 1.7426887115623791e-06, |
| "loss": 0.4452, |
| "step": 1661 |
| }, |
| { |
| "epoch": 4.569072164948453, |
| "grad_norm": 0.08920117183529058, |
| "learning_rate": 1.7202929888878329e-06, |
| "loss": 0.4401, |
| "step": 1662 |
| }, |
| { |
| "epoch": 4.571821305841924, |
| "grad_norm": 0.08832595498688149, |
| "learning_rate": 1.6980389420698395e-06, |
| "loss": 0.4451, |
| "step": 1663 |
| }, |
| { |
| "epoch": 4.574570446735395, |
| "grad_norm": 0.08859855283203406, |
| "learning_rate": 1.6759266534721952e-06, |
| "loss": 0.4431, |
| "step": 1664 |
| }, |
| { |
| "epoch": 4.577319587628866, |
| "grad_norm": 0.09152185332529074, |
| "learning_rate": 1.6539562049340085e-06, |
| "loss": 0.4402, |
| "step": 1665 |
| }, |
| { |
| "epoch": 4.5800687285223365, |
| "grad_norm": 0.08871925909133102, |
| "learning_rate": 1.6321276777694307e-06, |
| "loss": 0.4487, |
| "step": 1666 |
| }, |
| { |
| "epoch": 4.582817869415807, |
| "grad_norm": 0.09057802642441674, |
| "learning_rate": 1.6104411527673613e-06, |
| "loss": 0.4435, |
| "step": 1667 |
| }, |
| { |
| "epoch": 4.585567010309278, |
| "grad_norm": 0.09402629602885212, |
| "learning_rate": 1.588896710191139e-06, |
| "loss": 0.4489, |
| "step": 1668 |
| }, |
| { |
| "epoch": 4.588316151202749, |
| "grad_norm": 0.08990522146704653, |
| "learning_rate": 1.567494429778238e-06, |
| "loss": 0.4427, |
| "step": 1669 |
| }, |
| { |
| "epoch": 4.59106529209622, |
| "grad_norm": 0.09102458376196323, |
| "learning_rate": 1.546234390739998e-06, |
| "loss": 0.4498, |
| "step": 1670 |
| }, |
| { |
| "epoch": 4.59381443298969, |
| "grad_norm": 0.09383015002767303, |
| "learning_rate": 1.525116671761282e-06, |
| "loss": 0.4456, |
| "step": 1671 |
| }, |
| { |
| "epoch": 4.596563573883161, |
| "grad_norm": 0.087955107242829, |
| "learning_rate": 1.5041413510002544e-06, |
| "loss": 0.4495, |
| "step": 1672 |
| }, |
| { |
| "epoch": 4.599312714776632, |
| "grad_norm": 0.08874300385624094, |
| "learning_rate": 1.4833085060880349e-06, |
| "loss": 0.4412, |
| "step": 1673 |
| }, |
| { |
| "epoch": 4.602061855670103, |
| "grad_norm": 0.08466557475239722, |
| "learning_rate": 1.4626182141284085e-06, |
| "loss": 0.4455, |
| "step": 1674 |
| }, |
| { |
| "epoch": 4.6048109965635735, |
| "grad_norm": 0.08895580241312337, |
| "learning_rate": 1.4420705516976097e-06, |
| "loss": 0.4406, |
| "step": 1675 |
| }, |
| { |
| "epoch": 4.607560137457044, |
| "grad_norm": 0.08951149546757349, |
| "learning_rate": 1.421665594843953e-06, |
| "loss": 0.4485, |
| "step": 1676 |
| }, |
| { |
| "epoch": 4.610309278350515, |
| "grad_norm": 0.09239895115644584, |
| "learning_rate": 1.4014034190876057e-06, |
| "loss": 0.448, |
| "step": 1677 |
| }, |
| { |
| "epoch": 4.613058419243986, |
| "grad_norm": 0.08778403074207193, |
| "learning_rate": 1.3812840994202792e-06, |
| "loss": 0.44, |
| "step": 1678 |
| }, |
| { |
| "epoch": 4.615807560137457, |
| "grad_norm": 0.08817137101596591, |
| "learning_rate": 1.361307710304991e-06, |
| "loss": 0.4394, |
| "step": 1679 |
| }, |
| { |
| "epoch": 4.618556701030927, |
| "grad_norm": 0.09227710988150696, |
| "learning_rate": 1.3414743256757334e-06, |
| "loss": 0.4501, |
| "step": 1680 |
| }, |
| { |
| "epoch": 4.621305841924398, |
| "grad_norm": 0.08760382434561047, |
| "learning_rate": 1.3217840189372555e-06, |
| "loss": 0.4461, |
| "step": 1681 |
| }, |
| { |
| "epoch": 4.624054982817869, |
| "grad_norm": 0.09031853519071162, |
| "learning_rate": 1.3022368629647253e-06, |
| "loss": 0.4496, |
| "step": 1682 |
| }, |
| { |
| "epoch": 4.62680412371134, |
| "grad_norm": 0.09206239051131887, |
| "learning_rate": 1.2828329301035481e-06, |
| "loss": 0.4432, |
| "step": 1683 |
| }, |
| { |
| "epoch": 4.6295532646048105, |
| "grad_norm": 0.08843350392995919, |
| "learning_rate": 1.263572292169024e-06, |
| "loss": 0.4501, |
| "step": 1684 |
| }, |
| { |
| "epoch": 4.632302405498281, |
| "grad_norm": 0.09481473881768035, |
| "learning_rate": 1.2444550204461092e-06, |
| "loss": 0.4403, |
| "step": 1685 |
| }, |
| { |
| "epoch": 4.635051546391752, |
| "grad_norm": 0.08689601784166971, |
| "learning_rate": 1.2254811856891524e-06, |
| "loss": 0.4435, |
| "step": 1686 |
| }, |
| { |
| "epoch": 4.637800687285223, |
| "grad_norm": 0.08893870660419893, |
| "learning_rate": 1.2066508581216429e-06, |
| "loss": 0.4503, |
| "step": 1687 |
| }, |
| { |
| "epoch": 4.640549828178694, |
| "grad_norm": 0.08937431163006333, |
| "learning_rate": 1.187964107435926e-06, |
| "loss": 0.4456, |
| "step": 1688 |
| }, |
| { |
| "epoch": 4.643298969072165, |
| "grad_norm": 0.08865313646975571, |
| "learning_rate": 1.169421002792972e-06, |
| "loss": 0.4433, |
| "step": 1689 |
| }, |
| { |
| "epoch": 4.646048109965636, |
| "grad_norm": 0.08865228962259511, |
| "learning_rate": 1.151021612822092e-06, |
| "loss": 0.4491, |
| "step": 1690 |
| }, |
| { |
| "epoch": 4.648797250859107, |
| "grad_norm": 0.08885659723955662, |
| "learning_rate": 1.1327660056207113e-06, |
| "loss": 0.4368, |
| "step": 1691 |
| }, |
| { |
| "epoch": 4.651546391752578, |
| "grad_norm": 0.08962612308335349, |
| "learning_rate": 1.114654248754099e-06, |
| "loss": 0.4495, |
| "step": 1692 |
| }, |
| { |
| "epoch": 4.6542955326460484, |
| "grad_norm": 0.08766658442624285, |
| "learning_rate": 1.0966864092551233e-06, |
| "loss": 0.445, |
| "step": 1693 |
| }, |
| { |
| "epoch": 4.657044673539519, |
| "grad_norm": 0.08709960621618447, |
| "learning_rate": 1.0788625536240206e-06, |
| "loss": 0.4471, |
| "step": 1694 |
| }, |
| { |
| "epoch": 4.65979381443299, |
| "grad_norm": 0.09261985426164675, |
| "learning_rate": 1.0611827478281067e-06, |
| "loss": 0.4456, |
| "step": 1695 |
| }, |
| { |
| "epoch": 4.662542955326461, |
| "grad_norm": 0.08650196395240495, |
| "learning_rate": 1.0436470573015733e-06, |
| "loss": 0.4403, |
| "step": 1696 |
| }, |
| { |
| "epoch": 4.665292096219932, |
| "grad_norm": 0.08590676857219628, |
| "learning_rate": 1.026255546945234e-06, |
| "loss": 0.4449, |
| "step": 1697 |
| }, |
| { |
| "epoch": 4.668041237113402, |
| "grad_norm": 0.08741332778858373, |
| "learning_rate": 1.0090082811262802e-06, |
| "loss": 0.4492, |
| "step": 1698 |
| }, |
| { |
| "epoch": 4.670790378006873, |
| "grad_norm": 0.08738071184604224, |
| "learning_rate": 9.919053236780328e-07, |
| "loss": 0.444, |
| "step": 1699 |
| }, |
| { |
| "epoch": 4.673539518900344, |
| "grad_norm": 0.08949580602300157, |
| "learning_rate": 9.74946737899729e-07, |
| "loss": 0.4376, |
| "step": 1700 |
| }, |
| { |
| "epoch": 4.676288659793815, |
| "grad_norm": 0.08859670553407732, |
| "learning_rate": 9.581325865562775e-07, |
| "loss": 0.4486, |
| "step": 1701 |
| }, |
| { |
| "epoch": 4.6790378006872855, |
| "grad_norm": 0.08694870133225886, |
| "learning_rate": 9.414629318780189e-07, |
| "loss": 0.4483, |
| "step": 1702 |
| }, |
| { |
| "epoch": 4.681786941580756, |
| "grad_norm": 0.08702985035490321, |
| "learning_rate": 9.249378355605043e-07, |
| "loss": 0.4419, |
| "step": 1703 |
| }, |
| { |
| "epoch": 4.684536082474227, |
| "grad_norm": 0.08584597159667766, |
| "learning_rate": 9.085573587642637e-07, |
| "loss": 0.4517, |
| "step": 1704 |
| }, |
| { |
| "epoch": 4.687285223367698, |
| "grad_norm": 0.08578695182725778, |
| "learning_rate": 8.923215621145753e-07, |
| "loss": 0.4399, |
| "step": 1705 |
| }, |
| { |
| "epoch": 4.690034364261169, |
| "grad_norm": 0.08592571403893629, |
| "learning_rate": 8.762305057012654e-07, |
| "loss": 0.4515, |
| "step": 1706 |
| }, |
| { |
| "epoch": 4.692783505154639, |
| "grad_norm": 0.08693493065817048, |
| "learning_rate": 8.60284249078438e-07, |
| "loss": 0.4461, |
| "step": 1707 |
| }, |
| { |
| "epoch": 4.69553264604811, |
| "grad_norm": 0.08872474686819293, |
| "learning_rate": 8.444828512642966e-07, |
| "loss": 0.4545, |
| "step": 1708 |
| }, |
| { |
| "epoch": 4.698281786941581, |
| "grad_norm": 0.09336681238214921, |
| "learning_rate": 8.288263707409272e-07, |
| "loss": 0.4543, |
| "step": 1709 |
| }, |
| { |
| "epoch": 4.701030927835052, |
| "grad_norm": 0.0862849071211661, |
| "learning_rate": 8.133148654540402e-07, |
| "loss": 0.4409, |
| "step": 1710 |
| }, |
| { |
| "epoch": 4.7037800687285225, |
| "grad_norm": 0.08478357720964781, |
| "learning_rate": 7.979483928127974e-07, |
| "loss": 0.4422, |
| "step": 1711 |
| }, |
| { |
| "epoch": 4.706529209621993, |
| "grad_norm": 0.08654065268244254, |
| "learning_rate": 7.827270096895811e-07, |
| "loss": 0.4479, |
| "step": 1712 |
| }, |
| { |
| "epoch": 4.709278350515464, |
| "grad_norm": 0.08647792501003562, |
| "learning_rate": 7.676507724197946e-07, |
| "loss": 0.4484, |
| "step": 1713 |
| }, |
| { |
| "epoch": 4.712027491408935, |
| "grad_norm": 0.08597489991064104, |
| "learning_rate": 7.527197368016437e-07, |
| "loss": 0.4417, |
| "step": 1714 |
| }, |
| { |
| "epoch": 4.714776632302406, |
| "grad_norm": 0.0898277616665661, |
| "learning_rate": 7.37933958095911e-07, |
| "loss": 0.4514, |
| "step": 1715 |
| }, |
| { |
| "epoch": 4.717525773195876, |
| "grad_norm": 0.08697107341130339, |
| "learning_rate": 7.232934910258004e-07, |
| "loss": 0.4456, |
| "step": 1716 |
| }, |
| { |
| "epoch": 4.720274914089347, |
| "grad_norm": 0.08637312554758676, |
| "learning_rate": 7.087983897767059e-07, |
| "loss": 0.4477, |
| "step": 1717 |
| }, |
| { |
| "epoch": 4.723024054982818, |
| "grad_norm": 0.0882085833054912, |
| "learning_rate": 6.944487079959982e-07, |
| "loss": 0.4395, |
| "step": 1718 |
| }, |
| { |
| "epoch": 4.725773195876289, |
| "grad_norm": 0.085015391595313, |
| "learning_rate": 6.802444987928436e-07, |
| "loss": 0.4395, |
| "step": 1719 |
| }, |
| { |
| "epoch": 4.7285223367697595, |
| "grad_norm": 0.08451996713089384, |
| "learning_rate": 6.661858147380118e-07, |
| "loss": 0.437, |
| "step": 1720 |
| }, |
| { |
| "epoch": 4.73127147766323, |
| "grad_norm": 0.08738636017583253, |
| "learning_rate": 6.522727078636681e-07, |
| "loss": 0.4508, |
| "step": 1721 |
| }, |
| { |
| "epoch": 4.734020618556701, |
| "grad_norm": 0.08558858353740155, |
| "learning_rate": 6.385052296631955e-07, |
| "loss": 0.4464, |
| "step": 1722 |
| }, |
| { |
| "epoch": 4.736769759450172, |
| "grad_norm": 0.08661882774459452, |
| "learning_rate": 6.248834310909768e-07, |
| "loss": 0.4464, |
| "step": 1723 |
| }, |
| { |
| "epoch": 4.739518900343643, |
| "grad_norm": 0.08559450787680475, |
| "learning_rate": 6.114073625622396e-07, |
| "loss": 0.4372, |
| "step": 1724 |
| }, |
| { |
| "epoch": 4.742268041237113, |
| "grad_norm": 0.0894381151555916, |
| "learning_rate": 5.980770739528563e-07, |
| "loss": 0.4394, |
| "step": 1725 |
| }, |
| { |
| "epoch": 4.745017182130584, |
| "grad_norm": 0.0897214937051112, |
| "learning_rate": 5.84892614599144e-07, |
| "loss": 0.4511, |
| "step": 1726 |
| }, |
| { |
| "epoch": 4.747766323024055, |
| "grad_norm": 0.08890953858034158, |
| "learning_rate": 5.718540332977007e-07, |
| "loss": 0.4504, |
| "step": 1727 |
| }, |
| { |
| "epoch": 4.750515463917526, |
| "grad_norm": 0.08567497991113034, |
| "learning_rate": 5.589613783052317e-07, |
| "loss": 0.4422, |
| "step": 1728 |
| }, |
| { |
| "epoch": 4.7532646048109966, |
| "grad_norm": 0.08728822000081427, |
| "learning_rate": 5.462146973383453e-07, |
| "loss": 0.4589, |
| "step": 1729 |
| }, |
| { |
| "epoch": 4.756013745704467, |
| "grad_norm": 0.08723350175277615, |
| "learning_rate": 5.336140375733934e-07, |
| "loss": 0.4444, |
| "step": 1730 |
| }, |
| { |
| "epoch": 4.758762886597938, |
| "grad_norm": 0.0895784856053207, |
| "learning_rate": 5.211594456462932e-07, |
| "loss": 0.4502, |
| "step": 1731 |
| }, |
| { |
| "epoch": 4.761512027491409, |
| "grad_norm": 0.08733169216974808, |
| "learning_rate": 5.088509676523545e-07, |
| "loss": 0.4482, |
| "step": 1732 |
| }, |
| { |
| "epoch": 4.76426116838488, |
| "grad_norm": 0.08877952523971627, |
| "learning_rate": 4.966886491461109e-07, |
| "loss": 0.4478, |
| "step": 1733 |
| }, |
| { |
| "epoch": 4.7670103092783505, |
| "grad_norm": 0.08664355725321998, |
| "learning_rate": 4.846725351411507e-07, |
| "loss": 0.4463, |
| "step": 1734 |
| }, |
| { |
| "epoch": 4.769759450171821, |
| "grad_norm": 0.08722460501048601, |
| "learning_rate": 4.7280267010993974e-07, |
| "loss": 0.4484, |
| "step": 1735 |
| }, |
| { |
| "epoch": 4.772508591065292, |
| "grad_norm": 0.0869689702035606, |
| "learning_rate": 4.6107909798368324e-07, |
| "loss": 0.4449, |
| "step": 1736 |
| }, |
| { |
| "epoch": 4.775257731958763, |
| "grad_norm": 0.09408455047074887, |
| "learning_rate": 4.495018621521352e-07, |
| "loss": 0.4436, |
| "step": 1737 |
| }, |
| { |
| "epoch": 4.778006872852234, |
| "grad_norm": 0.09000664206402534, |
| "learning_rate": 4.3807100546344296e-07, |
| "loss": 0.44, |
| "step": 1738 |
| }, |
| { |
| "epoch": 4.780756013745704, |
| "grad_norm": 0.08893238330077334, |
| "learning_rate": 4.267865702240048e-07, |
| "loss": 0.4418, |
| "step": 1739 |
| }, |
| { |
| "epoch": 4.783505154639175, |
| "grad_norm": 0.08753276685918031, |
| "learning_rate": 4.1564859819830607e-07, |
| "loss": 0.4407, |
| "step": 1740 |
| }, |
| { |
| "epoch": 4.786254295532646, |
| "grad_norm": 0.08418852751539188, |
| "learning_rate": 4.046571306087499e-07, |
| "loss": 0.4429, |
| "step": 1741 |
| }, |
| { |
| "epoch": 4.789003436426117, |
| "grad_norm": 0.0880689357278319, |
| "learning_rate": 3.9381220813551555e-07, |
| "loss": 0.4517, |
| "step": 1742 |
| }, |
| { |
| "epoch": 4.7917525773195875, |
| "grad_norm": 0.08935991058559813, |
| "learning_rate": 3.831138709164295e-07, |
| "loss": 0.4464, |
| "step": 1743 |
| }, |
| { |
| "epoch": 4.794501718213058, |
| "grad_norm": 0.086751085582121, |
| "learning_rate": 3.725621585467698e-07, |
| "loss": 0.4504, |
| "step": 1744 |
| }, |
| { |
| "epoch": 4.797250859106529, |
| "grad_norm": 0.08660059471433466, |
| "learning_rate": 3.6215711007916434e-07, |
| "loss": 0.4457, |
| "step": 1745 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.08795409500802658, |
| "learning_rate": 3.5189876402341727e-07, |
| "loss": 0.4487, |
| "step": 1746 |
| }, |
| { |
| "epoch": 4.802749140893471, |
| "grad_norm": 0.08517686102366333, |
| "learning_rate": 3.417871583463805e-07, |
| "loss": 0.442, |
| "step": 1747 |
| }, |
| { |
| "epoch": 4.805498281786941, |
| "grad_norm": 0.08656026625325718, |
| "learning_rate": 3.3182233047181154e-07, |
| "loss": 0.4482, |
| "step": 1748 |
| }, |
| { |
| "epoch": 4.808247422680412, |
| "grad_norm": 0.08742296055893738, |
| "learning_rate": 3.2200431728022676e-07, |
| "loss": 0.4497, |
| "step": 1749 |
| }, |
| { |
| "epoch": 4.810996563573883, |
| "grad_norm": 0.08706269427544457, |
| "learning_rate": 3.1233315510877714e-07, |
| "loss": 0.4399, |
| "step": 1750 |
| }, |
| { |
| "epoch": 4.813745704467354, |
| "grad_norm": 0.08541652658402427, |
| "learning_rate": 3.0280887975111087e-07, |
| "loss": 0.4463, |
| "step": 1751 |
| }, |
| { |
| "epoch": 4.8164948453608245, |
| "grad_norm": 0.08561518369119724, |
| "learning_rate": 2.9343152645723075e-07, |
| "loss": 0.4496, |
| "step": 1752 |
| }, |
| { |
| "epoch": 4.819243986254295, |
| "grad_norm": 0.08671209749993644, |
| "learning_rate": 2.8420112993337026e-07, |
| "loss": 0.4441, |
| "step": 1753 |
| }, |
| { |
| "epoch": 4.821993127147766, |
| "grad_norm": 0.0868113057408611, |
| "learning_rate": 2.75117724341869e-07, |
| "loss": 0.4539, |
| "step": 1754 |
| }, |
| { |
| "epoch": 4.824742268041237, |
| "grad_norm": 0.08787299513468588, |
| "learning_rate": 2.661813433010485e-07, |
| "loss": 0.4469, |
| "step": 1755 |
| }, |
| { |
| "epoch": 4.827491408934708, |
| "grad_norm": 0.08426954798208526, |
| "learning_rate": 2.5739201988506544e-07, |
| "loss": 0.4452, |
| "step": 1756 |
| }, |
| { |
| "epoch": 4.830240549828178, |
| "grad_norm": 0.08925999331930048, |
| "learning_rate": 2.487497866238231e-07, |
| "loss": 0.4431, |
| "step": 1757 |
| }, |
| { |
| "epoch": 4.832989690721649, |
| "grad_norm": 0.09009930022107024, |
| "learning_rate": 2.4025467550283345e-07, |
| "loss": 0.4428, |
| "step": 1758 |
| }, |
| { |
| "epoch": 4.83573883161512, |
| "grad_norm": 0.08903112575869974, |
| "learning_rate": 2.3190671796307963e-07, |
| "loss": 0.4376, |
| "step": 1759 |
| }, |
| { |
| "epoch": 4.838487972508591, |
| "grad_norm": 0.087097271598177, |
| "learning_rate": 2.237059449009449e-07, |
| "loss": 0.4514, |
| "step": 1760 |
| }, |
| { |
| "epoch": 4.8412371134020615, |
| "grad_norm": 0.08665721242897642, |
| "learning_rate": 2.1565238666805266e-07, |
| "loss": 0.4507, |
| "step": 1761 |
| }, |
| { |
| "epoch": 4.843986254295532, |
| "grad_norm": 0.08570901423189047, |
| "learning_rate": 2.0774607307118665e-07, |
| "loss": 0.444, |
| "step": 1762 |
| }, |
| { |
| "epoch": 4.846735395189003, |
| "grad_norm": 0.08855478539796897, |
| "learning_rate": 1.9998703337216207e-07, |
| "loss": 0.4459, |
| "step": 1763 |
| }, |
| { |
| "epoch": 4.849484536082474, |
| "grad_norm": 0.08830013933034192, |
| "learning_rate": 1.9237529628772345e-07, |
| "loss": 0.4471, |
| "step": 1764 |
| }, |
| { |
| "epoch": 4.852233676975945, |
| "grad_norm": 0.08632909822266122, |
| "learning_rate": 1.8491088998943806e-07, |
| "loss": 0.4443, |
| "step": 1765 |
| }, |
| { |
| "epoch": 4.854982817869415, |
| "grad_norm": 0.08788988835658808, |
| "learning_rate": 1.7759384210358943e-07, |
| "loss": 0.4495, |
| "step": 1766 |
| }, |
| { |
| "epoch": 4.857731958762886, |
| "grad_norm": 0.08641182871335162, |
| "learning_rate": 1.7042417971108837e-07, |
| "loss": 0.448, |
| "step": 1767 |
| }, |
| { |
| "epoch": 4.860481099656358, |
| "grad_norm": 0.08684528157216327, |
| "learning_rate": 1.6340192934734879e-07, |
| "loss": 0.4503, |
| "step": 1768 |
| }, |
| { |
| "epoch": 4.863230240549829, |
| "grad_norm": 0.08740681191652898, |
| "learning_rate": 1.5652711700220756e-07, |
| "loss": 0.4476, |
| "step": 1769 |
| }, |
| { |
| "epoch": 4.8659793814432994, |
| "grad_norm": 0.08455281840311087, |
| "learning_rate": 1.4979976811982267e-07, |
| "loss": 0.4501, |
| "step": 1770 |
| }, |
| { |
| "epoch": 4.86872852233677, |
| "grad_norm": 0.08621142637068548, |
| "learning_rate": 1.4321990759858406e-07, |
| "loss": 0.4427, |
| "step": 1771 |
| }, |
| { |
| "epoch": 4.871477663230241, |
| "grad_norm": 0.09048418177504676, |
| "learning_rate": 1.3678755979100734e-07, |
| "loss": 0.4414, |
| "step": 1772 |
| }, |
| { |
| "epoch": 4.874226804123712, |
| "grad_norm": 0.08552997152050767, |
| "learning_rate": 1.3050274850366252e-07, |
| "loss": 0.4452, |
| "step": 1773 |
| }, |
| { |
| "epoch": 4.876975945017183, |
| "grad_norm": 0.09163336347075388, |
| "learning_rate": 1.2436549699706756e-07, |
| "loss": 0.4421, |
| "step": 1774 |
| }, |
| { |
| "epoch": 4.879725085910653, |
| "grad_norm": 0.08581324177498632, |
| "learning_rate": 1.183758279856262e-07, |
| "loss": 0.445, |
| "step": 1775 |
| }, |
| { |
| "epoch": 4.882474226804124, |
| "grad_norm": 0.08520078540460027, |
| "learning_rate": 1.1253376363751234e-07, |
| "loss": 0.4398, |
| "step": 1776 |
| }, |
| { |
| "epoch": 4.885223367697595, |
| "grad_norm": 0.08389381645053694, |
| "learning_rate": 1.0683932557461696e-07, |
| "loss": 0.4509, |
| "step": 1777 |
| }, |
| { |
| "epoch": 4.887972508591066, |
| "grad_norm": 0.08463720514626481, |
| "learning_rate": 1.0129253487244584e-07, |
| "loss": 0.444, |
| "step": 1778 |
| }, |
| { |
| "epoch": 4.8907216494845365, |
| "grad_norm": 0.08376091905777829, |
| "learning_rate": 9.589341206006186e-08, |
| "loss": 0.4424, |
| "step": 1779 |
| }, |
| { |
| "epoch": 4.893470790378007, |
| "grad_norm": 0.0864473265773848, |
| "learning_rate": 9.064197711999178e-08, |
| "loss": 0.436, |
| "step": 1780 |
| }, |
| { |
| "epoch": 4.896219931271478, |
| "grad_norm": 0.08780409654999521, |
| "learning_rate": 8.553824948816402e-08, |
| "loss": 0.4439, |
| "step": 1781 |
| }, |
| { |
| "epoch": 4.898969072164949, |
| "grad_norm": 0.08643060928003499, |
| "learning_rate": 8.058224805382431e-08, |
| "loss": 0.457, |
| "step": 1782 |
| }, |
| { |
| "epoch": 4.90171821305842, |
| "grad_norm": 0.08999003837315114, |
| "learning_rate": 7.577399115948236e-08, |
| "loss": 0.4536, |
| "step": 1783 |
| }, |
| { |
| "epoch": 4.90446735395189, |
| "grad_norm": 0.08500920989818493, |
| "learning_rate": 7.111349660082756e-08, |
| "loss": 0.4524, |
| "step": 1784 |
| }, |
| { |
| "epoch": 4.907216494845361, |
| "grad_norm": 0.08713394706241256, |
| "learning_rate": 6.660078162667561e-08, |
| "loss": 0.4474, |
| "step": 1785 |
| }, |
| { |
| "epoch": 4.909965635738832, |
| "grad_norm": 0.08415301103290775, |
| "learning_rate": 6.223586293890193e-08, |
| "loss": 0.4388, |
| "step": 1786 |
| }, |
| { |
| "epoch": 4.912714776632303, |
| "grad_norm": 0.08641909751480724, |
| "learning_rate": 5.8018756692370673e-08, |
| "loss": 0.4452, |
| "step": 1787 |
| }, |
| { |
| "epoch": 4.9154639175257735, |
| "grad_norm": 0.085407296137395, |
| "learning_rate": 5.3949478494885745e-08, |
| "loss": 0.4442, |
| "step": 1788 |
| }, |
| { |
| "epoch": 4.918213058419244, |
| "grad_norm": 0.08620621228120745, |
| "learning_rate": 5.002804340712875e-08, |
| "loss": 0.4395, |
| "step": 1789 |
| }, |
| { |
| "epoch": 4.920962199312715, |
| "grad_norm": 0.08440435770555102, |
| "learning_rate": 4.625446594260563e-08, |
| "loss": 0.4475, |
| "step": 1790 |
| }, |
| { |
| "epoch": 4.923711340206186, |
| "grad_norm": 0.08498966775862096, |
| "learning_rate": 4.262876006758454e-08, |
| "loss": 0.4461, |
| "step": 1791 |
| }, |
| { |
| "epoch": 4.926460481099657, |
| "grad_norm": 0.0852940765401518, |
| "learning_rate": 3.915093920105584e-08, |
| "loss": 0.4508, |
| "step": 1792 |
| }, |
| { |
| "epoch": 4.929209621993127, |
| "grad_norm": 0.08708725826340913, |
| "learning_rate": 3.582101621467882e-08, |
| "loss": 0.4478, |
| "step": 1793 |
| }, |
| { |
| "epoch": 4.931958762886598, |
| "grad_norm": 0.08563954373853248, |
| "learning_rate": 3.263900343272841e-08, |
| "loss": 0.4506, |
| "step": 1794 |
| }, |
| { |
| "epoch": 4.934707903780069, |
| "grad_norm": 0.08351208793444787, |
| "learning_rate": 2.960491263205523e-08, |
| "loss": 0.4358, |
| "step": 1795 |
| }, |
| { |
| "epoch": 4.93745704467354, |
| "grad_norm": 0.08968304296903096, |
| "learning_rate": 2.6718755042041134e-08, |
| "loss": 0.4408, |
| "step": 1796 |
| }, |
| { |
| "epoch": 4.9402061855670105, |
| "grad_norm": 0.08687120751973657, |
| "learning_rate": 2.3980541344554852e-08, |
| "loss": 0.4472, |
| "step": 1797 |
| }, |
| { |
| "epoch": 4.942955326460481, |
| "grad_norm": 0.0843041818695698, |
| "learning_rate": 2.1390281673916435e-08, |
| "loss": 0.4433, |
| "step": 1798 |
| }, |
| { |
| "epoch": 4.945704467353952, |
| "grad_norm": 0.08678186311343745, |
| "learning_rate": 1.894798561685729e-08, |
| "loss": 0.4401, |
| "step": 1799 |
| }, |
| { |
| "epoch": 4.948453608247423, |
| "grad_norm": 0.08552567460187925, |
| "learning_rate": 1.6653662212484656e-08, |
| "loss": 0.4463, |
| "step": 1800 |
| }, |
| { |
| "epoch": 4.951202749140894, |
| "grad_norm": 0.08327662566119383, |
| "learning_rate": 1.4507319952246079e-08, |
| "loss": 0.4428, |
| "step": 1801 |
| }, |
| { |
| "epoch": 4.953951890034364, |
| "grad_norm": 0.08558816066436331, |
| "learning_rate": 1.2508966779907206e-08, |
| "loss": 0.4439, |
| "step": 1802 |
| }, |
| { |
| "epoch": 4.956701030927835, |
| "grad_norm": 0.08685318316335923, |
| "learning_rate": 1.0658610091507371e-08, |
| "loss": 0.4433, |
| "step": 1803 |
| }, |
| { |
| "epoch": 4.959450171821306, |
| "grad_norm": 0.08552301347515416, |
| "learning_rate": 8.956256735355162e-09, |
| "loss": 0.4462, |
| "step": 1804 |
| }, |
| { |
| "epoch": 4.962199312714777, |
| "grad_norm": 0.08918690199355128, |
| "learning_rate": 7.401913011966244e-09, |
| "loss": 0.4516, |
| "step": 1805 |
| }, |
| { |
| "epoch": 4.9649484536082475, |
| "grad_norm": 0.08439607746487938, |
| "learning_rate": 5.995584674085564e-09, |
| "loss": 0.4443, |
| "step": 1806 |
| }, |
| { |
| "epoch": 4.967697594501718, |
| "grad_norm": 0.0859086823143405, |
| "learning_rate": 4.737276926620738e-09, |
| "loss": 0.4477, |
| "step": 1807 |
| }, |
| { |
| "epoch": 4.970446735395189, |
| "grad_norm": 0.08603802799919767, |
| "learning_rate": 3.6269944266686953e-09, |
| "loss": 0.4398, |
| "step": 1808 |
| }, |
| { |
| "epoch": 4.97319587628866, |
| "grad_norm": 0.08797273612741212, |
| "learning_rate": 2.664741283453509e-09, |
| "loss": 0.4455, |
| "step": 1809 |
| }, |
| { |
| "epoch": 4.975945017182131, |
| "grad_norm": 0.08498147628736628, |
| "learning_rate": 1.8505210583441568e-09, |
| "loss": 0.4417, |
| "step": 1810 |
| }, |
| { |
| "epoch": 4.9786941580756015, |
| "grad_norm": 0.08573341738089828, |
| "learning_rate": 1.184336764823435e-09, |
| "loss": 0.4457, |
| "step": 1811 |
| }, |
| { |
| "epoch": 4.981443298969072, |
| "grad_norm": 0.08322231115466387, |
| "learning_rate": 6.661908684924002e-10, |
| "loss": 0.454, |
| "step": 1812 |
| }, |
| { |
| "epoch": 4.984192439862543, |
| "grad_norm": 0.08656315765889926, |
| "learning_rate": 2.9608528703928274e-10, |
| "loss": 0.4449, |
| "step": 1813 |
| }, |
| { |
| "epoch": 4.986941580756014, |
| "grad_norm": 0.08364091527237393, |
| "learning_rate": 7.402139024836886e-11, |
| "loss": 0.455, |
| "step": 1814 |
| }, |
| { |
| "epoch": 4.989690721649485, |
| "grad_norm": 0.08519652883753787, |
| "learning_rate": 0.0, |
| "loss": 0.4445, |
| "step": 1815 |
| }, |
| { |
| "epoch": 4.989690721649485, |
| "step": 1815, |
| "total_flos": 3.73703706563429e+19, |
| "train_loss": 0.5429748183767986, |
| "train_runtime": 94067.7478, |
| "train_samples_per_second": 9.898, |
| "train_steps_per_second": 0.019 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 1815, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.73703706563429e+19, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|