| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.992911392405063, |
| "eval_steps": 500, |
| "global_step": 1230, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004050632911392405, |
| "grad_norm": 6.565896938639643, |
| "learning_rate": 3.2520325203252037e-07, |
| "loss": 1.0739, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00810126582278481, |
| "grad_norm": 6.424877036189121, |
| "learning_rate": 6.504065040650407e-07, |
| "loss": 1.0304, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012151898734177215, |
| "grad_norm": 6.675688024565499, |
| "learning_rate": 9.75609756097561e-07, |
| "loss": 1.0832, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01620253164556962, |
| "grad_norm": 6.352567655471716, |
| "learning_rate": 1.3008130081300815e-06, |
| "loss": 1.0432, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.020253164556962026, |
| "grad_norm": 6.1394666841542636, |
| "learning_rate": 1.6260162601626018e-06, |
| "loss": 1.0445, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02430379746835443, |
| "grad_norm": 5.872600490506451, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 1.0375, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028354430379746835, |
| "grad_norm": 4.515773706171991, |
| "learning_rate": 2.2764227642276426e-06, |
| "loss": 0.9992, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03240506329113924, |
| "grad_norm": 4.200488354646107, |
| "learning_rate": 2.601626016260163e-06, |
| "loss": 1.0013, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03645569620253165, |
| "grad_norm": 2.7539299966289255, |
| "learning_rate": 2.926829268292683e-06, |
| "loss": 0.9747, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04050632911392405, |
| "grad_norm": 2.5384131132753165, |
| "learning_rate": 3.2520325203252037e-06, |
| "loss": 0.9538, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.044556962025316456, |
| "grad_norm": 3.3940329365895665, |
| "learning_rate": 3.577235772357724e-06, |
| "loss": 1.0117, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04860759493670886, |
| "grad_norm": 3.7615522437070803, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 0.9406, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.052658227848101265, |
| "grad_norm": 4.0566637383891235, |
| "learning_rate": 4.227642276422765e-06, |
| "loss": 0.9513, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05670886075949367, |
| "grad_norm": 3.9336267477151248, |
| "learning_rate": 4.552845528455285e-06, |
| "loss": 0.9571, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.060759493670886074, |
| "grad_norm": 3.6469304032393213, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 0.9393, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06481012658227848, |
| "grad_norm": 2.792596920083004, |
| "learning_rate": 5.203252032520326e-06, |
| "loss": 0.8866, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06886075949367089, |
| "grad_norm": 2.62344696439575, |
| "learning_rate": 5.528455284552846e-06, |
| "loss": 0.894, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0729113924050633, |
| "grad_norm": 2.1098626475414073, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 0.8684, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0769620253164557, |
| "grad_norm": 1.6178665581895086, |
| "learning_rate": 6.178861788617887e-06, |
| "loss": 0.8719, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0810126582278481, |
| "grad_norm": 1.1895765665603557, |
| "learning_rate": 6.504065040650407e-06, |
| "loss": 0.8529, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08506329113924051, |
| "grad_norm": 1.0218687142914817, |
| "learning_rate": 6.829268292682928e-06, |
| "loss": 0.8539, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08911392405063291, |
| "grad_norm": 1.1847985232807718, |
| "learning_rate": 7.154471544715448e-06, |
| "loss": 0.8475, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09316455696202532, |
| "grad_norm": 1.1277919285473021, |
| "learning_rate": 7.4796747967479676e-06, |
| "loss": 0.8133, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09721518987341772, |
| "grad_norm": 1.0140428585227534, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 0.8151, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 0.9087338712275635, |
| "learning_rate": 8.130081300813009e-06, |
| "loss": 0.7938, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10531645569620253, |
| "grad_norm": 0.9235267994779454, |
| "learning_rate": 8.45528455284553e-06, |
| "loss": 0.7922, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10936708860759493, |
| "grad_norm": 0.8441016532415221, |
| "learning_rate": 8.78048780487805e-06, |
| "loss": 0.7916, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11341772151898734, |
| "grad_norm": 0.9591795712065868, |
| "learning_rate": 9.10569105691057e-06, |
| "loss": 0.7853, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11746835443037974, |
| "grad_norm": 0.804902855700884, |
| "learning_rate": 9.43089430894309e-06, |
| "loss": 0.7977, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.12151898734177215, |
| "grad_norm": 0.7975879463325245, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.7747, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12556962025316457, |
| "grad_norm": 0.7974681707822487, |
| "learning_rate": 1.008130081300813e-05, |
| "loss": 0.7577, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12962025316455697, |
| "grad_norm": 0.8343569527571383, |
| "learning_rate": 1.0406504065040652e-05, |
| "loss": 0.7612, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13367088607594937, |
| "grad_norm": 0.6694182326150714, |
| "learning_rate": 1.0731707317073172e-05, |
| "loss": 0.7591, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.13772151898734178, |
| "grad_norm": 0.8789379796682085, |
| "learning_rate": 1.1056910569105692e-05, |
| "loss": 0.7603, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.14177215189873418, |
| "grad_norm": 0.6445767575441089, |
| "learning_rate": 1.1382113821138213e-05, |
| "loss": 0.7405, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.1458227848101266, |
| "grad_norm": 0.6770247955397397, |
| "learning_rate": 1.1707317073170731e-05, |
| "loss": 0.7398, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.149873417721519, |
| "grad_norm": 0.6858280157354305, |
| "learning_rate": 1.2032520325203254e-05, |
| "loss": 0.7456, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1539240506329114, |
| "grad_norm": 0.6403279583365179, |
| "learning_rate": 1.2357723577235774e-05, |
| "loss": 0.7243, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1579746835443038, |
| "grad_norm": 0.6341403951861435, |
| "learning_rate": 1.2682926829268294e-05, |
| "loss": 0.748, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1620253164556962, |
| "grad_norm": 0.672016954235725, |
| "learning_rate": 1.3008130081300815e-05, |
| "loss": 0.7496, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1660759493670886, |
| "grad_norm": 0.6133279680326725, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.7449, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.17012658227848101, |
| "grad_norm": 0.4807301217267841, |
| "learning_rate": 1.3658536585365855e-05, |
| "loss": 0.7462, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17417721518987342, |
| "grad_norm": 0.6426264779936044, |
| "learning_rate": 1.3983739837398376e-05, |
| "loss": 0.7375, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17822784810126582, |
| "grad_norm": 0.5349486950803585, |
| "learning_rate": 1.4308943089430896e-05, |
| "loss": 0.7165, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18227848101265823, |
| "grad_norm": 0.6320496357729215, |
| "learning_rate": 1.4634146341463415e-05, |
| "loss": 0.7315, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18632911392405063, |
| "grad_norm": 0.6111068775955827, |
| "learning_rate": 1.4959349593495935e-05, |
| "loss": 0.7083, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19037974683544304, |
| "grad_norm": 0.4911821136694014, |
| "learning_rate": 1.528455284552846e-05, |
| "loss": 0.7036, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.19443037974683544, |
| "grad_norm": 0.5749373815175803, |
| "learning_rate": 1.5609756097560978e-05, |
| "loss": 0.7249, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19848101265822785, |
| "grad_norm": 0.514168465484637, |
| "learning_rate": 1.5934959349593496e-05, |
| "loss": 0.7134, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 0.5850932044942065, |
| "learning_rate": 1.6260162601626018e-05, |
| "loss": 0.7144, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20658227848101265, |
| "grad_norm": 0.4904677239469707, |
| "learning_rate": 1.6585365853658537e-05, |
| "loss": 0.7287, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21063291139240506, |
| "grad_norm": 0.5161204780147062, |
| "learning_rate": 1.691056910569106e-05, |
| "loss": 0.7019, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.21468354430379746, |
| "grad_norm": 0.4749012557029868, |
| "learning_rate": 1.7235772357723578e-05, |
| "loss": 0.6939, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21873417721518987, |
| "grad_norm": 0.5739663936961527, |
| "learning_rate": 1.75609756097561e-05, |
| "loss": 0.7005, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.22278481012658227, |
| "grad_norm": 0.5839546976448743, |
| "learning_rate": 1.788617886178862e-05, |
| "loss": 0.7156, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22683544303797468, |
| "grad_norm": 0.45211125628125676, |
| "learning_rate": 1.821138211382114e-05, |
| "loss": 0.688, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23088607594936708, |
| "grad_norm": 0.5440773051379888, |
| "learning_rate": 1.8536585365853663e-05, |
| "loss": 0.6982, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23493670886075949, |
| "grad_norm": 0.49066042383790304, |
| "learning_rate": 1.886178861788618e-05, |
| "loss": 0.7108, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2389873417721519, |
| "grad_norm": 0.5404332535023633, |
| "learning_rate": 1.91869918699187e-05, |
| "loss": 0.7035, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2430379746835443, |
| "grad_norm": 0.5749934615563418, |
| "learning_rate": 1.9512195121951222e-05, |
| "loss": 0.6948, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2470886075949367, |
| "grad_norm": 0.6250440749982804, |
| "learning_rate": 1.983739837398374e-05, |
| "loss": 0.6895, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.25113924050632913, |
| "grad_norm": 0.5479702576267381, |
| "learning_rate": 2.016260162601626e-05, |
| "loss": 0.6909, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25518987341772154, |
| "grad_norm": 0.5840045603517738, |
| "learning_rate": 2.048780487804878e-05, |
| "loss": 0.6817, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.25924050632911394, |
| "grad_norm": 0.574793850525269, |
| "learning_rate": 2.0813008130081303e-05, |
| "loss": 0.6924, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.26329113924050634, |
| "grad_norm": 0.6676237052563165, |
| "learning_rate": 2.1138211382113822e-05, |
| "loss": 0.692, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26734177215189875, |
| "grad_norm": 0.9619934541334264, |
| "learning_rate": 2.1463414634146344e-05, |
| "loss": 0.6918, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.27139240506329115, |
| "grad_norm": 1.060898563576492, |
| "learning_rate": 2.1788617886178863e-05, |
| "loss": 0.6791, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.27544303797468356, |
| "grad_norm": 0.904861265958466, |
| "learning_rate": 2.2113821138211385e-05, |
| "loss": 0.7028, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.27949367088607596, |
| "grad_norm": 1.1853527291040817, |
| "learning_rate": 2.2439024390243907e-05, |
| "loss": 0.6812, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.28354430379746837, |
| "grad_norm": 0.7857110585191401, |
| "learning_rate": 2.2764227642276426e-05, |
| "loss": 0.6866, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.28759493670886077, |
| "grad_norm": 0.9682602431491651, |
| "learning_rate": 2.3089430894308948e-05, |
| "loss": 0.6885, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2916455696202532, |
| "grad_norm": 1.4669981427432603, |
| "learning_rate": 2.3414634146341463e-05, |
| "loss": 0.6984, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2956962025316456, |
| "grad_norm": 0.633454232355496, |
| "learning_rate": 2.3739837398373985e-05, |
| "loss": 0.6999, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.299746835443038, |
| "grad_norm": 1.5971751443323539, |
| "learning_rate": 2.4065040650406507e-05, |
| "loss": 0.6849, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 0.5966050905818251, |
| "learning_rate": 2.4390243902439026e-05, |
| "loss": 0.6729, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3078481012658228, |
| "grad_norm": 1.2296293856099136, |
| "learning_rate": 2.4715447154471548e-05, |
| "loss": 0.6686, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3118987341772152, |
| "grad_norm": 0.8070733998144802, |
| "learning_rate": 2.5040650406504066e-05, |
| "loss": 0.6824, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3159493670886076, |
| "grad_norm": 0.8445446872314081, |
| "learning_rate": 2.536585365853659e-05, |
| "loss": 0.6755, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.9463490141931428, |
| "learning_rate": 2.569105691056911e-05, |
| "loss": 0.6931, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3240506329113924, |
| "grad_norm": 1.27968689699765, |
| "learning_rate": 2.601626016260163e-05, |
| "loss": 0.6918, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3281012658227848, |
| "grad_norm": 0.9643659417765228, |
| "learning_rate": 2.634146341463415e-05, |
| "loss": 0.6769, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3321518987341772, |
| "grad_norm": 1.0082010830767494, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.6893, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3362025316455696, |
| "grad_norm": 0.9118520007011225, |
| "learning_rate": 2.699186991869919e-05, |
| "loss": 0.6797, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.34025316455696203, |
| "grad_norm": 0.8467329391054232, |
| "learning_rate": 2.731707317073171e-05, |
| "loss": 0.683, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34430379746835443, |
| "grad_norm": 1.0281287680667526, |
| "learning_rate": 2.764227642276423e-05, |
| "loss": 0.669, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.34835443037974684, |
| "grad_norm": 1.4383431261117385, |
| "learning_rate": 2.796747967479675e-05, |
| "loss": 0.6747, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.35240506329113924, |
| "grad_norm": 1.0668810949475278, |
| "learning_rate": 2.829268292682927e-05, |
| "loss": 0.6786, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35645569620253165, |
| "grad_norm": 1.3109536031574585, |
| "learning_rate": 2.8617886178861792e-05, |
| "loss": 0.6762, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.36050632911392405, |
| "grad_norm": 1.3902022527615097, |
| "learning_rate": 2.8943089430894314e-05, |
| "loss": 0.652, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.36455696202531646, |
| "grad_norm": 1.0406067055206762, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.6741, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.36860759493670886, |
| "grad_norm": 1.7199696920849745, |
| "learning_rate": 2.959349593495935e-05, |
| "loss": 0.6857, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37265822784810126, |
| "grad_norm": 0.9729377453147651, |
| "learning_rate": 2.991869918699187e-05, |
| "loss": 0.6656, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.37670886075949367, |
| "grad_norm": 2.382657070863399, |
| "learning_rate": 3.0243902439024392e-05, |
| "loss": 0.6775, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3807594936708861, |
| "grad_norm": 2.028553778397811, |
| "learning_rate": 3.056910569105692e-05, |
| "loss": 0.6873, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3848101265822785, |
| "grad_norm": 1.3478551809928314, |
| "learning_rate": 3.089430894308943e-05, |
| "loss": 0.6775, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3888607594936709, |
| "grad_norm": 1.374453027501157, |
| "learning_rate": 3.1219512195121955e-05, |
| "loss": 0.6735, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3929113924050633, |
| "grad_norm": 1.2981715944174452, |
| "learning_rate": 3.154471544715447e-05, |
| "loss": 0.6808, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3969620253164557, |
| "grad_norm": 1.068922695797907, |
| "learning_rate": 3.186991869918699e-05, |
| "loss": 0.665, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4010126582278481, |
| "grad_norm": 1.1306548785963957, |
| "learning_rate": 3.2195121951219514e-05, |
| "loss": 0.679, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 1.0620355030662811, |
| "learning_rate": 3.2520325203252037e-05, |
| "loss": 0.6743, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4091139240506329, |
| "grad_norm": 0.9440101070458233, |
| "learning_rate": 3.284552845528456e-05, |
| "loss": 0.6601, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4131645569620253, |
| "grad_norm": 0.9872994254263004, |
| "learning_rate": 3.3170731707317074e-05, |
| "loss": 0.6716, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4172151898734177, |
| "grad_norm": 0.756761571024137, |
| "learning_rate": 3.3495934959349596e-05, |
| "loss": 0.7071, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.4212658227848101, |
| "grad_norm": 1.006814653029631, |
| "learning_rate": 3.382113821138212e-05, |
| "loss": 0.6734, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4253164556962025, |
| "grad_norm": 1.0137736147627878, |
| "learning_rate": 3.414634146341463e-05, |
| "loss": 0.6594, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4293670886075949, |
| "grad_norm": 1.5249612663871746, |
| "learning_rate": 3.4471544715447155e-05, |
| "loss": 0.6566, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43341772151898733, |
| "grad_norm": 0.8150255853333629, |
| "learning_rate": 3.479674796747968e-05, |
| "loss": 0.6433, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.43746835443037974, |
| "grad_norm": 1.2585197395770062, |
| "learning_rate": 3.51219512195122e-05, |
| "loss": 0.6667, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.44151898734177214, |
| "grad_norm": 1.6565133256429025, |
| "learning_rate": 3.544715447154472e-05, |
| "loss": 0.6759, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.44556962025316454, |
| "grad_norm": 1.007712038975302, |
| "learning_rate": 3.577235772357724e-05, |
| "loss": 0.6476, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.44962025316455695, |
| "grad_norm": 1.9229767494277894, |
| "learning_rate": 3.609756097560976e-05, |
| "loss": 0.6556, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.45367088607594935, |
| "grad_norm": 1.301454435867178, |
| "learning_rate": 3.642276422764228e-05, |
| "loss": 0.6657, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.45772151898734176, |
| "grad_norm": 1.7229560416105487, |
| "learning_rate": 3.67479674796748e-05, |
| "loss": 0.6598, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46177215189873416, |
| "grad_norm": 1.4396399477099036, |
| "learning_rate": 3.7073170731707325e-05, |
| "loss": 0.6713, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.46582278481012657, |
| "grad_norm": 1.2966439069361446, |
| "learning_rate": 3.739837398373984e-05, |
| "loss": 0.6661, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.46987341772151897, |
| "grad_norm": 1.0897664154775006, |
| "learning_rate": 3.772357723577236e-05, |
| "loss": 0.6636, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.4739240506329114, |
| "grad_norm": 1.227331278015716, |
| "learning_rate": 3.804878048780488e-05, |
| "loss": 0.6611, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.4779746835443038, |
| "grad_norm": 0.9013064644008313, |
| "learning_rate": 3.83739837398374e-05, |
| "loss": 0.6778, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4820253164556962, |
| "grad_norm": 1.2327668990972602, |
| "learning_rate": 3.869918699186992e-05, |
| "loss": 0.6623, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.4860759493670886, |
| "grad_norm": 1.009995390725031, |
| "learning_rate": 3.9024390243902444e-05, |
| "loss": 0.6718, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.490126582278481, |
| "grad_norm": 1.122273097665521, |
| "learning_rate": 3.9349593495934966e-05, |
| "loss": 0.6671, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4941772151898734, |
| "grad_norm": 1.0498199056181385, |
| "learning_rate": 3.967479674796748e-05, |
| "loss": 0.6571, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.4982278481012658, |
| "grad_norm": 1.2170239936489256, |
| "learning_rate": 4e-05, |
| "loss": 0.6619, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.5022784810126583, |
| "grad_norm": 1.0109905907140755, |
| "learning_rate": 3.999991946137476e-05, |
| "loss": 0.6451, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 0.9530381307445761, |
| "learning_rate": 3.999967784614766e-05, |
| "loss": 0.6567, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5103797468354431, |
| "grad_norm": 1.0405593161569147, |
| "learning_rate": 3.9999275156264656e-05, |
| "loss": 0.675, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5144303797468355, |
| "grad_norm": 1.1189545066408981, |
| "learning_rate": 3.999871139496895e-05, |
| "loss": 0.6481, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5184810126582279, |
| "grad_norm": 1.2534782225609422, |
| "learning_rate": 3.9997986566800995e-05, |
| "loss": 0.6619, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5225316455696203, |
| "grad_norm": 1.5144075297452226, |
| "learning_rate": 3.999710067759846e-05, |
| "loss": 0.6738, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5265822784810127, |
| "grad_norm": 1.0761651131202516, |
| "learning_rate": 3.999605373449617e-05, |
| "loss": 0.673, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5306329113924051, |
| "grad_norm": 1.1779176326124219, |
| "learning_rate": 3.9994845745926075e-05, |
| "loss": 0.6562, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5346835443037975, |
| "grad_norm": 1.3185457139977619, |
| "learning_rate": 3.999347672161713e-05, |
| "loss": 0.6581, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5387341772151899, |
| "grad_norm": 1.019959758162389, |
| "learning_rate": 3.999194667259528e-05, |
| "loss": 0.6619, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5427848101265823, |
| "grad_norm": 1.3504252924134732, |
| "learning_rate": 3.999025561118334e-05, |
| "loss": 0.6587, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5468354430379747, |
| "grad_norm": 0.722263604782836, |
| "learning_rate": 3.998840355100086e-05, |
| "loss": 0.6524, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5508860759493671, |
| "grad_norm": 1.8938087297449309, |
| "learning_rate": 3.998639050696409e-05, |
| "loss": 0.6446, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5549367088607595, |
| "grad_norm": 0.7993275874231304, |
| "learning_rate": 3.998421649528582e-05, |
| "loss": 0.664, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5589873417721519, |
| "grad_norm": 2.374139086546867, |
| "learning_rate": 3.9981881533475234e-05, |
| "loss": 0.6644, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5630379746835443, |
| "grad_norm": 1.7230492616698447, |
| "learning_rate": 3.997938564033779e-05, |
| "loss": 0.653, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5670886075949367, |
| "grad_norm": 2.0119475440335752, |
| "learning_rate": 3.9976728835975064e-05, |
| "loss": 0.6692, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5711392405063291, |
| "grad_norm": 1.9389135348306736, |
| "learning_rate": 3.9973911141784605e-05, |
| "loss": 0.6535, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5751898734177215, |
| "grad_norm": 1.4403349978255873, |
| "learning_rate": 3.997093258045973e-05, |
| "loss": 0.6748, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.579240506329114, |
| "grad_norm": 1.530297008864757, |
| "learning_rate": 3.996779317598936e-05, |
| "loss": 0.6467, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5832911392405064, |
| "grad_norm": 1.1291502250770709, |
| "learning_rate": 3.996449295365782e-05, |
| "loss": 0.6451, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5873417721518988, |
| "grad_norm": 1.4895931863903717, |
| "learning_rate": 3.996103194004467e-05, |
| "loss": 0.6516, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5913924050632912, |
| "grad_norm": 0.8277779348441653, |
| "learning_rate": 3.995741016302441e-05, |
| "loss": 0.645, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5954430379746836, |
| "grad_norm": 1.400029223064251, |
| "learning_rate": 3.9953627651766364e-05, |
| "loss": 0.6358, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.599493670886076, |
| "grad_norm": 0.8151485747909567, |
| "learning_rate": 3.9949684436734325e-05, |
| "loss": 0.6508, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6035443037974684, |
| "grad_norm": 1.127917981409227, |
| "learning_rate": 3.994558054968643e-05, |
| "loss": 0.6461, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 0.8170674425202605, |
| "learning_rate": 3.994131602367481e-05, |
| "loss": 0.6445, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6116455696202532, |
| "grad_norm": 1.0560163118847938, |
| "learning_rate": 3.9936890893045376e-05, |
| "loss": 0.6611, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6156962025316456, |
| "grad_norm": 0.8392257720896447, |
| "learning_rate": 3.993230519343752e-05, |
| "loss": 0.654, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.619746835443038, |
| "grad_norm": 0.8421618110581768, |
| "learning_rate": 3.992755896178383e-05, |
| "loss": 0.6504, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6237974683544304, |
| "grad_norm": 0.928022090820385, |
| "learning_rate": 3.992265223630981e-05, |
| "loss": 0.6764, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6278481012658228, |
| "grad_norm": 1.0583201526437274, |
| "learning_rate": 3.991758505653355e-05, |
| "loss": 0.6634, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6318987341772152, |
| "grad_norm": 0.8148031316339325, |
| "learning_rate": 3.991235746326543e-05, |
| "loss": 0.6323, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6359493670886076, |
| "grad_norm": 0.7237709529946899, |
| "learning_rate": 3.9906969498607745e-05, |
| "loss": 0.6474, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.6692226805131555, |
| "learning_rate": 3.990142120595444e-05, |
| "loss": 0.6368, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6440506329113924, |
| "grad_norm": 0.6527067680990782, |
| "learning_rate": 3.98957126299907e-05, |
| "loss": 0.6373, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6481012658227848, |
| "grad_norm": 0.6955730523626112, |
| "learning_rate": 3.9889843816692596e-05, |
| "loss": 0.6483, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6521518987341772, |
| "grad_norm": 1.9073792810634957, |
| "learning_rate": 3.9883814813326766e-05, |
| "loss": 0.7331, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.6562025316455696, |
| "grad_norm": 0.7039249507523281, |
| "learning_rate": 3.9877625668449956e-05, |
| "loss": 0.6445, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.660253164556962, |
| "grad_norm": 0.7919618526474105, |
| "learning_rate": 3.98712764319087e-05, |
| "loss": 0.654, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6643037974683544, |
| "grad_norm": 0.810962163186228, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.6469, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6683544303797468, |
| "grad_norm": 0.5942821513181452, |
| "learning_rate": 3.9858097889665277e-05, |
| "loss": 0.6352, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6724050632911392, |
| "grad_norm": 0.6195308429522167, |
| "learning_rate": 3.985126869010129e-05, |
| "loss": 0.635, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6764556962025317, |
| "grad_norm": 0.5239498374223093, |
| "learning_rate": 3.984427961114833e-05, |
| "loss": 0.6609, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6805063291139241, |
| "grad_norm": 0.8155264405765932, |
| "learning_rate": 3.9837130709095475e-05, |
| "loss": 0.6426, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6845569620253165, |
| "grad_norm": 0.8064905763534658, |
| "learning_rate": 3.982982204151901e-05, |
| "loss": 0.6496, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6886075949367089, |
| "grad_norm": 0.795942110772038, |
| "learning_rate": 3.982235366728193e-05, |
| "loss": 0.6547, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6926582278481013, |
| "grad_norm": 0.5159545723250966, |
| "learning_rate": 3.9814725646533505e-05, |
| "loss": 0.6193, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.6967088607594937, |
| "grad_norm": 0.6189667183317581, |
| "learning_rate": 3.9806938040708746e-05, |
| "loss": 0.6425, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7007594936708861, |
| "grad_norm": 0.7584327560828357, |
| "learning_rate": 3.9798990912527976e-05, |
| "loss": 0.6354, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7048101265822785, |
| "grad_norm": 0.8269004642838677, |
| "learning_rate": 3.979088432599627e-05, |
| "loss": 0.6342, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 0.5983073457884152, |
| "learning_rate": 3.9782618346402964e-05, |
| "loss": 0.6396, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7129113924050633, |
| "grad_norm": 0.7902187490402278, |
| "learning_rate": 3.977419304032111e-05, |
| "loss": 0.6478, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7169620253164557, |
| "grad_norm": 0.8787042951906945, |
| "learning_rate": 3.976560847560697e-05, |
| "loss": 0.6399, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7210126582278481, |
| "grad_norm": 0.7777724631014674, |
| "learning_rate": 3.9756864721399456e-05, |
| "loss": 0.6429, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7250632911392405, |
| "grad_norm": 0.5027376283406825, |
| "learning_rate": 3.974796184811956e-05, |
| "loss": 0.6314, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7291139240506329, |
| "grad_norm": 0.5876533354709571, |
| "learning_rate": 3.973889992746979e-05, |
| "loss": 0.6343, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7331645569620253, |
| "grad_norm": 0.656008461780158, |
| "learning_rate": 3.972967903243361e-05, |
| "loss": 0.6337, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7372151898734177, |
| "grad_norm": 0.627673955333263, |
| "learning_rate": 3.972029923727486e-05, |
| "loss": 0.6457, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7412658227848101, |
| "grad_norm": 0.5869923804252781, |
| "learning_rate": 3.971076061753709e-05, |
| "loss": 0.6282, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7453164556962025, |
| "grad_norm": 0.5979340937404183, |
| "learning_rate": 3.9701063250043066e-05, |
| "loss": 0.6365, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7493670886075949, |
| "grad_norm": 0.6717037876248872, |
| "learning_rate": 3.969120721289402e-05, |
| "loss": 0.6419, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7534177215189873, |
| "grad_norm": 0.6486136631874181, |
| "learning_rate": 3.9681192585469146e-05, |
| "loss": 0.6325, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7574683544303797, |
| "grad_norm": 0.4775678489789169, |
| "learning_rate": 3.9671019448424865e-05, |
| "loss": 0.6521, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7615189873417721, |
| "grad_norm": 0.4604126451601245, |
| "learning_rate": 3.966068788369422e-05, |
| "loss": 0.6359, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.7655696202531646, |
| "grad_norm": 0.49239548471778155, |
| "learning_rate": 3.965019797448622e-05, |
| "loss": 0.6456, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.769620253164557, |
| "grad_norm": 0.5089155791039236, |
| "learning_rate": 3.963954980528515e-05, |
| "loss": 0.6386, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7736708860759494, |
| "grad_norm": 0.5429054262226821, |
| "learning_rate": 3.9628743461849905e-05, |
| "loss": 0.6326, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7777215189873418, |
| "grad_norm": 0.5161100801700521, |
| "learning_rate": 3.961777903121329e-05, |
| "loss": 0.6373, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7817721518987342, |
| "grad_norm": 0.48923838383485607, |
| "learning_rate": 3.960665660168131e-05, |
| "loss": 0.6118, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7858227848101266, |
| "grad_norm": 0.6129248980503034, |
| "learning_rate": 3.9595376262832485e-05, |
| "loss": 0.6438, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.789873417721519, |
| "grad_norm": 0.6625179518220522, |
| "learning_rate": 3.9583938105517127e-05, |
| "loss": 0.6398, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7939240506329114, |
| "grad_norm": 0.5054913981384187, |
| "learning_rate": 3.957234222185657e-05, |
| "loss": 0.6351, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.7979746835443038, |
| "grad_norm": 0.5632102123455754, |
| "learning_rate": 3.9560588705242474e-05, |
| "loss": 0.6235, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8020253164556962, |
| "grad_norm": 0.6864930362174487, |
| "learning_rate": 3.954867765033605e-05, |
| "loss": 0.6414, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8060759493670886, |
| "grad_norm": 0.43503390222300553, |
| "learning_rate": 3.953660915306728e-05, |
| "loss": 0.6328, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 0.5128692063751873, |
| "learning_rate": 3.952438331063419e-05, |
| "loss": 0.6444, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8141772151898734, |
| "grad_norm": 0.5808243467504917, |
| "learning_rate": 3.951200022150205e-05, |
| "loss": 0.6433, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8182278481012658, |
| "grad_norm": 0.4536090142706016, |
| "learning_rate": 3.949945998540253e-05, |
| "loss": 0.6345, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8222784810126582, |
| "grad_norm": 0.46078668530330275, |
| "learning_rate": 3.9486762703332993e-05, |
| "loss": 0.6368, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8263291139240506, |
| "grad_norm": 0.4542042648346198, |
| "learning_rate": 3.947390847755559e-05, |
| "loss": 0.6337, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.830379746835443, |
| "grad_norm": 0.6116190084199781, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.6399, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8344303797468354, |
| "grad_norm": 0.4686618053440677, |
| "learning_rate": 3.944772961024501e-05, |
| "loss": 0.6337, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8384810126582278, |
| "grad_norm": 0.5465812813619887, |
| "learning_rate": 3.943440517955285e-05, |
| "loss": 0.6268, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8425316455696202, |
| "grad_norm": 0.683566931403071, |
| "learning_rate": 3.9420924226833126e-05, |
| "loss": 0.6194, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8465822784810126, |
| "grad_norm": 0.8456912417524167, |
| "learning_rate": 3.9407286860659566e-05, |
| "loss": 0.6421, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.850632911392405, |
| "grad_norm": 0.7005968590348535, |
| "learning_rate": 3.9393493190865657e-05, |
| "loss": 0.6126, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8546835443037974, |
| "grad_norm": 0.5752150912641875, |
| "learning_rate": 3.937954332854371e-05, |
| "loss": 0.6277, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8587341772151899, |
| "grad_norm": 0.6196384020142428, |
| "learning_rate": 3.9365437386044016e-05, |
| "loss": 0.6275, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8627848101265823, |
| "grad_norm": 0.6488415715968038, |
| "learning_rate": 3.935117547697387e-05, |
| "loss": 0.6309, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8668354430379747, |
| "grad_norm": 0.6982459827435065, |
| "learning_rate": 3.933675771619675e-05, |
| "loss": 0.6337, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8708860759493671, |
| "grad_norm": 0.5688983251630224, |
| "learning_rate": 3.932218421983131e-05, |
| "loss": 0.6343, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8749367088607595, |
| "grad_norm": 0.4160966225198047, |
| "learning_rate": 3.9307455105250484e-05, |
| "loss": 0.6239, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8789873417721519, |
| "grad_norm": 0.524669697339945, |
| "learning_rate": 3.929257049108054e-05, |
| "loss": 0.6178, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8830379746835443, |
| "grad_norm": 0.5152289480745182, |
| "learning_rate": 3.927753049720011e-05, |
| "loss": 0.6369, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8870886075949367, |
| "grad_norm": 0.473699341767848, |
| "learning_rate": 3.9262335244739234e-05, |
| "loss": 0.646, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8911392405063291, |
| "grad_norm": 0.46064652637604775, |
| "learning_rate": 3.92469848560784e-05, |
| "loss": 0.6192, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8951898734177215, |
| "grad_norm": 0.4242902276099433, |
| "learning_rate": 3.923147945484751e-05, |
| "loss": 0.6303, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.8992405063291139, |
| "grad_norm": 0.49401445340889244, |
| "learning_rate": 3.9215819165924956e-05, |
| "loss": 0.6236, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9032911392405063, |
| "grad_norm": 0.4825647028044612, |
| "learning_rate": 3.920000411543654e-05, |
| "loss": 0.6107, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9073417721518987, |
| "grad_norm": 0.4648926720618138, |
| "learning_rate": 3.9184034430754495e-05, |
| "loss": 0.62, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.5764704044149312, |
| "learning_rate": 3.916791024049648e-05, |
| "loss": 0.6264, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9154430379746835, |
| "grad_norm": 0.6926696416581806, |
| "learning_rate": 3.91516316745245e-05, |
| "loss": 0.645, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9194936708860759, |
| "grad_norm": 0.5661594135986946, |
| "learning_rate": 3.913519886394389e-05, |
| "loss": 0.6381, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9235443037974683, |
| "grad_norm": 0.3772105422739256, |
| "learning_rate": 3.911861194110225e-05, |
| "loss": 0.6181, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9275949367088607, |
| "grad_norm": 0.48879146836297693, |
| "learning_rate": 3.910187103958837e-05, |
| "loss": 0.6301, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9316455696202531, |
| "grad_norm": 1.1850082021436543, |
| "learning_rate": 3.908497629423117e-05, |
| "loss": 0.6246, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9356962025316455, |
| "grad_norm": 0.44330537384560836, |
| "learning_rate": 3.9067927841098614e-05, |
| "loss": 0.6257, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.9397468354430379, |
| "grad_norm": 0.5515900442430017, |
| "learning_rate": 3.9050725817496594e-05, |
| "loss": 0.6292, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9437974683544303, |
| "grad_norm": 0.6333829354802747, |
| "learning_rate": 3.9033370361967844e-05, |
| "loss": 0.6223, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9478481012658228, |
| "grad_norm": 0.625907461156176, |
| "learning_rate": 3.901586161429081e-05, |
| "loss": 0.628, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9518987341772152, |
| "grad_norm": 0.5912875446488828, |
| "learning_rate": 3.8998199715478545e-05, |
| "loss": 0.6388, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9559493670886076, |
| "grad_norm": 2.534772848596236, |
| "learning_rate": 3.8980384807777564e-05, |
| "loss": 0.6504, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.7527199049628908, |
| "learning_rate": 3.896241703466667e-05, |
| "loss": 0.6302, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9640506329113924, |
| "grad_norm": 1.195053650025741, |
| "learning_rate": 3.894429654085585e-05, |
| "loss": 0.6402, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9681012658227848, |
| "grad_norm": 0.5980244768520538, |
| "learning_rate": 3.892602347228505e-05, |
| "loss": 0.6091, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9721518987341772, |
| "grad_norm": 0.9985418414588404, |
| "learning_rate": 3.890759797612307e-05, |
| "loss": 0.6147, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9762025316455696, |
| "grad_norm": 1.2747212246697324, |
| "learning_rate": 3.888902020076632e-05, |
| "loss": 0.6365, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.980253164556962, |
| "grad_norm": 0.9805542738778488, |
| "learning_rate": 3.887029029583764e-05, |
| "loss": 0.6365, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9843037974683544, |
| "grad_norm": 1.2598365025741, |
| "learning_rate": 3.8851408412185125e-05, |
| "loss": 0.6164, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9883544303797468, |
| "grad_norm": 0.8484777556375804, |
| "learning_rate": 3.8832374701880855e-05, |
| "loss": 0.6414, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9924050632911392, |
| "grad_norm": 1.3333495679797724, |
| "learning_rate": 3.881318931821972e-05, |
| "loss": 0.6306, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9964556962025316, |
| "grad_norm": 0.8805704782012573, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.6336, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0015189873417722, |
| "grad_norm": 2.5265983356658173, |
| "learning_rate": 3.8774364150112955e-05, |
| "loss": 0.6258, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.0055696202531645, |
| "grad_norm": 2.2481644964234246, |
| "learning_rate": 3.8754724678359884e-05, |
| "loss": 0.6036, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.009620253164557, |
| "grad_norm": 0.6970741170778451, |
| "learning_rate": 3.873493415863256e-05, |
| "loss": 0.5784, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.0136708860759494, |
| "grad_norm": 1.7404018816795028, |
| "learning_rate": 3.871499275032111e-05, |
| "loss": 0.6056, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0177215189873419, |
| "grad_norm": 2.694243508496181, |
| "learning_rate": 3.869490061403091e-05, |
| "loss": 0.6644, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0217721518987342, |
| "grad_norm": 0.8196871020301907, |
| "learning_rate": 3.867465791158124e-05, |
| "loss": 0.5919, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.0258227848101267, |
| "grad_norm": 1.3237796521089902, |
| "learning_rate": 3.865426480600407e-05, |
| "loss": 0.5998, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.029873417721519, |
| "grad_norm": 1.3049962364368444, |
| "learning_rate": 3.863372146154264e-05, |
| "loss": 0.5928, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0339240506329115, |
| "grad_norm": 0.5818349333370841, |
| "learning_rate": 3.861302804365024e-05, |
| "loss": 0.5828, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0379746835443038, |
| "grad_norm": 0.97063078608405, |
| "learning_rate": 3.85921847189888e-05, |
| "loss": 0.5959, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0420253164556963, |
| "grad_norm": 0.7713166513645306, |
| "learning_rate": 3.85711916554276e-05, |
| "loss": 0.5953, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0460759493670886, |
| "grad_norm": 0.8942459730908692, |
| "learning_rate": 3.85500490220419e-05, |
| "loss": 0.6097, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.050126582278481, |
| "grad_norm": 0.8941261968922476, |
| "learning_rate": 3.852875698911154e-05, |
| "loss": 0.6028, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0541772151898734, |
| "grad_norm": 0.7355014858586207, |
| "learning_rate": 3.850731572811963e-05, |
| "loss": 0.5955, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.058227848101266, |
| "grad_norm": 0.8129664540326133, |
| "learning_rate": 3.848572541175116e-05, |
| "loss": 0.5837, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0622784810126582, |
| "grad_norm": 0.5650176145200243, |
| "learning_rate": 3.846398621389154e-05, |
| "loss": 0.5856, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0663291139240507, |
| "grad_norm": 0.6848721166166323, |
| "learning_rate": 3.84420983096253e-05, |
| "loss": 0.5911, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.070379746835443, |
| "grad_norm": 0.5268100411296476, |
| "learning_rate": 3.8420061875234606e-05, |
| "loss": 0.5651, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.0744303797468355, |
| "grad_norm": 0.5792215769193122, |
| "learning_rate": 3.839787708819787e-05, |
| "loss": 0.581, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0784810126582278, |
| "grad_norm": 0.9505730200739876, |
| "learning_rate": 3.8375544127188325e-05, |
| "loss": 0.5807, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.0825316455696203, |
| "grad_norm": 0.5370718196680457, |
| "learning_rate": 3.8353063172072564e-05, |
| "loss": 0.5925, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0865822784810126, |
| "grad_norm": 0.5827574821201854, |
| "learning_rate": 3.8330434403909105e-05, |
| "loss": 0.5742, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0906329113924051, |
| "grad_norm": 0.5656627540660353, |
| "learning_rate": 3.8307658004946934e-05, |
| "loss": 0.5819, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0946835443037974, |
| "grad_norm": 1.0869401351883203, |
| "learning_rate": 3.8284734158624046e-05, |
| "loss": 0.64, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.09873417721519, |
| "grad_norm": 0.5212890436046597, |
| "learning_rate": 3.826166304956594e-05, |
| "loss": 0.5982, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1027848101265822, |
| "grad_norm": 0.541285450616523, |
| "learning_rate": 3.8238444863584164e-05, |
| "loss": 0.5828, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1068354430379748, |
| "grad_norm": 0.6414610142018482, |
| "learning_rate": 3.821507978767479e-05, |
| "loss": 0.579, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.110886075949367, |
| "grad_norm": 0.46427280043738534, |
| "learning_rate": 3.819156801001693e-05, |
| "loss": 0.598, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.1149367088607596, |
| "grad_norm": 0.4966969238220982, |
| "learning_rate": 3.816790971997121e-05, |
| "loss": 0.5885, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.1189873417721519, |
| "grad_norm": 0.5338345494964832, |
| "learning_rate": 3.8144105108078246e-05, |
| "loss": 0.5929, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1230379746835444, |
| "grad_norm": 0.5701820414699905, |
| "learning_rate": 3.81201543660571e-05, |
| "loss": 0.587, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1270886075949367, |
| "grad_norm": 0.42826027474842443, |
| "learning_rate": 3.809605768680377e-05, |
| "loss": 0.5812, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1311392405063292, |
| "grad_norm": 0.46087228282886944, |
| "learning_rate": 3.807181526438958e-05, |
| "loss": 0.5863, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.1351898734177215, |
| "grad_norm": 0.38588376992106554, |
| "learning_rate": 3.8047427294059697e-05, |
| "loss": 0.5707, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.139240506329114, |
| "grad_norm": 0.38219530362553844, |
| "learning_rate": 3.802289397223145e-05, |
| "loss": 0.5748, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1432911392405063, |
| "grad_norm": 0.39926936207110575, |
| "learning_rate": 3.7998215496492854e-05, |
| "loss": 0.5853, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.1473417721518988, |
| "grad_norm": 0.39486826691757215, |
| "learning_rate": 3.797339206560096e-05, |
| "loss": 0.5721, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.151392405063291, |
| "grad_norm": 0.4530402706844666, |
| "learning_rate": 3.794842387948027e-05, |
| "loss": 0.5753, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1554430379746836, |
| "grad_norm": 0.36848362424424375, |
| "learning_rate": 3.7923311139221114e-05, |
| "loss": 0.5666, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.159493670886076, |
| "grad_norm": 0.3939714830086508, |
| "learning_rate": 3.7898054047078054e-05, |
| "loss": 0.563, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1635443037974684, |
| "grad_norm": 0.4185048527791837, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.5848, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1675949367088607, |
| "grad_norm": 0.3779768398992768, |
| "learning_rate": 3.7847107621969786e-05, |
| "loss": 0.5755, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1716455696202532, |
| "grad_norm": 0.397211278485165, |
| "learning_rate": 3.7821418699320064e-05, |
| "loss": 0.5723, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1756962025316455, |
| "grad_norm": 0.36036921811835726, |
| "learning_rate": 3.7795586245414145e-05, |
| "loss": 0.5873, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.179746835443038, |
| "grad_norm": 0.3952858292291357, |
| "learning_rate": 3.776961046830306e-05, |
| "loss": 0.5784, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.1837974683544303, |
| "grad_norm": 0.3830737172540273, |
| "learning_rate": 3.774349157719215e-05, |
| "loss": 0.5737, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1878481012658229, |
| "grad_norm": 0.3918236224719613, |
| "learning_rate": 3.7717229782439365e-05, |
| "loss": 0.5858, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1918987341772151, |
| "grad_norm": 0.3358760445223252, |
| "learning_rate": 3.769082529555359e-05, |
| "loss": 0.5811, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.1959493670886077, |
| "grad_norm": 0.364141189926486, |
| "learning_rate": 3.766427832919294e-05, |
| "loss": 0.5742, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 1.6399048279620578, |
| "learning_rate": 3.7637589097163024e-05, |
| "loss": 0.6369, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2040506329113925, |
| "grad_norm": 0.4026373729251476, |
| "learning_rate": 3.761075781441526e-05, |
| "loss": 0.568, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2081012658227848, |
| "grad_norm": 0.36953722405152084, |
| "learning_rate": 3.75837846970451e-05, |
| "loss": 0.5781, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2121518987341773, |
| "grad_norm": 0.3621272665795014, |
| "learning_rate": 3.755666996229032e-05, |
| "loss": 0.5828, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.2162025316455696, |
| "grad_norm": 0.41254265012021796, |
| "learning_rate": 3.752941382852927e-05, |
| "loss": 0.5724, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.220253164556962, |
| "grad_norm": 0.45427768057765006, |
| "learning_rate": 3.7502016515279115e-05, |
| "loss": 0.5691, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.2243037974683544, |
| "grad_norm": 0.49130353756910294, |
| "learning_rate": 3.7474478243194043e-05, |
| "loss": 0.5691, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.228354430379747, |
| "grad_norm": 0.4382766111443482, |
| "learning_rate": 3.744679923406351e-05, |
| "loss": 0.5799, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2324050632911392, |
| "grad_norm": 0.42072283804106375, |
| "learning_rate": 3.741897971081043e-05, |
| "loss": 0.5735, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2364556962025317, |
| "grad_norm": 0.400473086382162, |
| "learning_rate": 3.739101989748946e-05, |
| "loss": 0.5747, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.240506329113924, |
| "grad_norm": 0.3609719734612689, |
| "learning_rate": 3.7362920019285066e-05, |
| "loss": 0.5629, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2445569620253165, |
| "grad_norm": 0.5073168238187583, |
| "learning_rate": 3.73346803025098e-05, |
| "loss": 0.5756, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2486075949367088, |
| "grad_norm": 0.3809375487334342, |
| "learning_rate": 3.730630097460247e-05, |
| "loss": 0.5758, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2526582278481013, |
| "grad_norm": 0.4932504038487977, |
| "learning_rate": 3.727778226412628e-05, |
| "loss": 0.5683, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2567088607594936, |
| "grad_norm": 0.6909279206928767, |
| "learning_rate": 3.7249124400767006e-05, |
| "loss": 0.5757, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2607594936708861, |
| "grad_norm": 0.44384065428308855, |
| "learning_rate": 3.722032761533114e-05, |
| "loss": 0.5869, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2648101265822784, |
| "grad_norm": 0.4808239799238354, |
| "learning_rate": 3.719139213974403e-05, |
| "loss": 0.5727, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.268860759493671, |
| "grad_norm": 0.4249471900763035, |
| "learning_rate": 3.7162318207048006e-05, |
| "loss": 0.5787, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2729113924050632, |
| "grad_norm": 0.5059898678828224, |
| "learning_rate": 3.713310605140055e-05, |
| "loss": 0.5784, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2769620253164558, |
| "grad_norm": 0.36890927100868337, |
| "learning_rate": 3.710375590807233e-05, |
| "loss": 0.5813, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.281012658227848, |
| "grad_norm": 0.42318635646802805, |
| "learning_rate": 3.7074268013445365e-05, |
| "loss": 0.577, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2850632911392406, |
| "grad_norm": 0.34241492575118926, |
| "learning_rate": 3.7044642605011114e-05, |
| "loss": 0.5809, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2891139240506329, |
| "grad_norm": 0.36325943216014706, |
| "learning_rate": 3.701487992136854e-05, |
| "loss": 0.5711, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2931645569620254, |
| "grad_norm": 0.4921325337403487, |
| "learning_rate": 3.69849802022222e-05, |
| "loss": 0.5708, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.2972151898734177, |
| "grad_norm": 0.3916245950553964, |
| "learning_rate": 3.6954943688380334e-05, |
| "loss": 0.5659, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3012658227848102, |
| "grad_norm": 0.3405700001935763, |
| "learning_rate": 3.692477062175289e-05, |
| "loss": 0.5724, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3053164556962025, |
| "grad_norm": 0.46329867186771023, |
| "learning_rate": 3.689446124534958e-05, |
| "loss": 0.5774, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.309367088607595, |
| "grad_norm": 0.4367737028991729, |
| "learning_rate": 3.686401580327799e-05, |
| "loss": 0.5752, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.3134177215189873, |
| "grad_norm": 0.4386732527192421, |
| "learning_rate": 3.683343454074149e-05, |
| "loss": 0.5946, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3174683544303798, |
| "grad_norm": 0.43535608173207146, |
| "learning_rate": 3.6802717704037386e-05, |
| "loss": 0.5856, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.321518987341772, |
| "grad_norm": 0.3686485160669868, |
| "learning_rate": 3.6771865540554855e-05, |
| "loss": 0.557, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3255696202531646, |
| "grad_norm": 0.39394713290083455, |
| "learning_rate": 3.674087829877297e-05, |
| "loss": 0.5668, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.329620253164557, |
| "grad_norm": 0.42799419674768996, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.5666, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.3336708860759494, |
| "grad_norm": 0.3633642263412647, |
| "learning_rate": 3.667849957966501e-05, |
| "loss": 0.5679, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3377215189873417, |
| "grad_norm": 0.326582172597283, |
| "learning_rate": 3.6647108604728546e-05, |
| "loss": 0.5868, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3417721518987342, |
| "grad_norm": 0.41612279959130416, |
| "learning_rate": 3.661558355626795e-05, |
| "loss": 0.5738, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3458227848101265, |
| "grad_norm": 0.43462062154179326, |
| "learning_rate": 3.658392468818163e-05, |
| "loss": 0.5774, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.349873417721519, |
| "grad_norm": 0.40455081261645787, |
| "learning_rate": 3.655213225544574e-05, |
| "loss": 0.5705, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3539240506329113, |
| "grad_norm": 0.3736869264225449, |
| "learning_rate": 3.652020651411218e-05, |
| "loss": 0.5925, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.3579746835443038, |
| "grad_norm": 0.4430975534899666, |
| "learning_rate": 3.6488147721306474e-05, |
| "loss": 0.5874, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3620253164556961, |
| "grad_norm": 1.0189664958190048, |
| "learning_rate": 3.645595613522574e-05, |
| "loss": 0.5773, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3660759493670886, |
| "grad_norm": 0.48875718723687167, |
| "learning_rate": 3.642363201513657e-05, |
| "loss": 0.5979, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.370126582278481, |
| "grad_norm": 0.3463656250565052, |
| "learning_rate": 3.6391175621373006e-05, |
| "loss": 0.5808, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3741772151898735, |
| "grad_norm": 0.44137718323205477, |
| "learning_rate": 3.6358587215334355e-05, |
| "loss": 0.5766, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.3782278481012658, |
| "grad_norm": 0.34985502503494387, |
| "learning_rate": 3.632586705948318e-05, |
| "loss": 0.576, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.3822784810126583, |
| "grad_norm": 0.5821872820183871, |
| "learning_rate": 3.629301541734311e-05, |
| "loss": 0.5824, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3863291139240506, |
| "grad_norm": 0.3730994503739556, |
| "learning_rate": 3.626003255349676e-05, |
| "loss": 0.5705, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.390379746835443, |
| "grad_norm": 0.3272432308051581, |
| "learning_rate": 3.622691873358357e-05, |
| "loss": 0.5573, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.3944303797468354, |
| "grad_norm": 0.7127634010482963, |
| "learning_rate": 3.61936742242977e-05, |
| "loss": 0.5866, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.3984810126582279, |
| "grad_norm": 0.377930933824937, |
| "learning_rate": 3.6160299293385864e-05, |
| "loss": 0.5852, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4025316455696202, |
| "grad_norm": 0.4335216318983424, |
| "learning_rate": 3.612679420964516e-05, |
| "loss": 0.566, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4065822784810127, |
| "grad_norm": 0.3993077602229474, |
| "learning_rate": 3.609315924292092e-05, |
| "loss": 0.5674, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.410632911392405, |
| "grad_norm": 0.3821932469446836, |
| "learning_rate": 3.6059394664104554e-05, |
| "loss": 0.5834, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4146835443037975, |
| "grad_norm": 0.8872312418284577, |
| "learning_rate": 3.602550074513133e-05, |
| "loss": 0.583, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.4187341772151898, |
| "grad_norm": 0.39572585566474605, |
| "learning_rate": 3.599147775897822e-05, |
| "loss": 0.5699, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.4227848101265823, |
| "grad_norm": 0.4314834289193324, |
| "learning_rate": 3.595732597966167e-05, |
| "loss": 0.5868, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4268354430379746, |
| "grad_norm": 0.4343292769030793, |
| "learning_rate": 3.592304568223542e-05, |
| "loss": 0.5765, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4308860759493671, |
| "grad_norm": 0.3713590404033252, |
| "learning_rate": 3.588863714278826e-05, |
| "loss": 0.5951, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4349367088607594, |
| "grad_norm": 0.44242807680152524, |
| "learning_rate": 3.585410063844186e-05, |
| "loss": 0.591, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.438987341772152, |
| "grad_norm": 0.4696216029325405, |
| "learning_rate": 3.581943644734846e-05, |
| "loss": 0.5658, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4430379746835442, |
| "grad_norm": 0.37964490771073856, |
| "learning_rate": 3.578464484868869e-05, |
| "loss": 0.575, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4470886075949367, |
| "grad_norm": 0.45873216441443215, |
| "learning_rate": 3.5749726122669316e-05, |
| "loss": 0.5727, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.451139240506329, |
| "grad_norm": 0.4092841609229761, |
| "learning_rate": 3.5714680550520943e-05, |
| "loss": 0.5694, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4551898734177215, |
| "grad_norm": 0.3911420062015203, |
| "learning_rate": 3.5679508414495794e-05, |
| "loss": 0.5575, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.459240506329114, |
| "grad_norm": 0.460331058224917, |
| "learning_rate": 3.564420999786543e-05, |
| "loss": 0.5647, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4632911392405064, |
| "grad_norm": 0.4595744589630227, |
| "learning_rate": 3.560878558491842e-05, |
| "loss": 0.5821, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4673417721518986, |
| "grad_norm": 0.44040955878200816, |
| "learning_rate": 3.5573235460958145e-05, |
| "loss": 0.5744, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4713924050632912, |
| "grad_norm": 0.49304288909033295, |
| "learning_rate": 3.553755991230039e-05, |
| "loss": 0.5824, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4754430379746837, |
| "grad_norm": 0.4075531219411201, |
| "learning_rate": 3.5501759226271144e-05, |
| "loss": 0.5658, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.479493670886076, |
| "grad_norm": 0.44950523013918753, |
| "learning_rate": 3.546583369120419e-05, |
| "loss": 0.5743, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4835443037974683, |
| "grad_norm": 0.37867629804392505, |
| "learning_rate": 3.5429783596438864e-05, |
| "loss": 0.5843, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.4875949367088608, |
| "grad_norm": 0.49227265709677204, |
| "learning_rate": 3.539360923231766e-05, |
| "loss": 0.5806, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.4916455696202533, |
| "grad_norm": 0.5195662266219353, |
| "learning_rate": 3.535731089018394e-05, |
| "loss": 0.5625, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.4956962025316456, |
| "grad_norm": 0.47773928505088464, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.5994, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.4997468354430379, |
| "grad_norm": 0.4317675790547549, |
| "learning_rate": 3.528434344224253e-05, |
| "loss": 0.5618, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5037974683544304, |
| "grad_norm": 0.40857417542561797, |
| "learning_rate": 3.524767492410464e-05, |
| "loss": 0.5801, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.507848101265823, |
| "grad_norm": 0.42704847835020093, |
| "learning_rate": 3.521088360328908e-05, |
| "loss": 0.5797, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5118987341772152, |
| "grad_norm": 0.3942534211311338, |
| "learning_rate": 3.517396977610811e-05, |
| "loss": 0.5749, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5159493670886075, |
| "grad_norm": 0.47206623009153625, |
| "learning_rate": 3.5136933739860595e-05, |
| "loss": 0.5715, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.52, |
| "grad_norm": 0.3770079874944735, |
| "learning_rate": 3.509977579282971e-05, |
| "loss": 0.5685, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5240506329113925, |
| "grad_norm": 0.4665570968449033, |
| "learning_rate": 3.5062496234280424e-05, |
| "loss": 0.5616, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.5281012658227848, |
| "grad_norm": 0.39058609635383545, |
| "learning_rate": 3.502509536445719e-05, |
| "loss": 0.5695, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.5321518987341771, |
| "grad_norm": 0.35747363087285233, |
| "learning_rate": 3.498757348458147e-05, |
| "loss": 0.5708, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5362025316455696, |
| "grad_norm": 0.35049406460212873, |
| "learning_rate": 3.4949930896849324e-05, |
| "loss": 0.5726, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.5402531645569622, |
| "grad_norm": 0.3777720756082823, |
| "learning_rate": 3.491216790442899e-05, |
| "loss": 0.5726, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5443037974683544, |
| "grad_norm": 0.36077222637039236, |
| "learning_rate": 3.487428481145839e-05, |
| "loss": 0.5745, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.5483544303797467, |
| "grad_norm": 0.4860481746830461, |
| "learning_rate": 3.483628192304278e-05, |
| "loss": 0.5934, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5524050632911393, |
| "grad_norm": 0.4961833237126473, |
| "learning_rate": 3.479815954525219e-05, |
| "loss": 0.5742, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5564556962025318, |
| "grad_norm": 0.4320571894162534, |
| "learning_rate": 3.475991798511899e-05, |
| "loss": 0.5619, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.560506329113924, |
| "grad_norm": 0.3624282142614308, |
| "learning_rate": 3.4721557550635464e-05, |
| "loss": 0.5754, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5645569620253164, |
| "grad_norm": 0.3792892878372777, |
| "learning_rate": 3.468307855075128e-05, |
| "loss": 0.5639, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5686075949367089, |
| "grad_norm": 0.351906254692954, |
| "learning_rate": 3.4644481295371005e-05, |
| "loss": 0.5462, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.5726582278481014, |
| "grad_norm": 0.35547600407158053, |
| "learning_rate": 3.460576609535163e-05, |
| "loss": 0.568, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.5767088607594937, |
| "grad_norm": 0.41300117675252423, |
| "learning_rate": 3.456693326250006e-05, |
| "loss": 0.5708, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.580759493670886, |
| "grad_norm": 0.31098878674793945, |
| "learning_rate": 3.452798310957058e-05, |
| "loss": 0.5703, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5848101265822785, |
| "grad_norm": 0.4022886818835067, |
| "learning_rate": 3.4488915950262386e-05, |
| "loss": 0.5698, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.588860759493671, |
| "grad_norm": 0.36074986564038686, |
| "learning_rate": 3.4449732099216985e-05, |
| "loss": 0.5828, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.5929113924050633, |
| "grad_norm": 0.3637626692634795, |
| "learning_rate": 3.441043187201574e-05, |
| "loss": 0.5683, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.5969620253164556, |
| "grad_norm": 0.3797078648102642, |
| "learning_rate": 3.437101558517728e-05, |
| "loss": 0.5699, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.601012658227848, |
| "grad_norm": 0.3427214079175517, |
| "learning_rate": 3.433148355615496e-05, |
| "loss": 0.5806, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6050632911392406, |
| "grad_norm": 0.5118782908938658, |
| "learning_rate": 3.4291836103334294e-05, |
| "loss": 0.5729, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.609113924050633, |
| "grad_norm": 0.456414027269528, |
| "learning_rate": 3.425207354603043e-05, |
| "loss": 0.5763, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.6131645569620252, |
| "grad_norm": 0.41139278799296636, |
| "learning_rate": 3.421219620448553e-05, |
| "loss": 0.568, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.6172151898734177, |
| "grad_norm": 0.5029575176874077, |
| "learning_rate": 3.417220439986623e-05, |
| "loss": 0.5744, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.6212658227848102, |
| "grad_norm": 0.4134961342937195, |
| "learning_rate": 3.4132098454261024e-05, |
| "loss": 0.5876, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6253164556962025, |
| "grad_norm": 0.3805649206415535, |
| "learning_rate": 3.4091878690677676e-05, |
| "loss": 0.5703, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.6293670886075948, |
| "grad_norm": 0.44832727737323136, |
| "learning_rate": 3.405154543304065e-05, |
| "loss": 0.5797, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6334177215189873, |
| "grad_norm": 0.4908626913126581, |
| "learning_rate": 3.401109900618843e-05, |
| "loss": 0.5792, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.6374683544303799, |
| "grad_norm": 0.4490695781699458, |
| "learning_rate": 3.3970539735870996e-05, |
| "loss": 0.5724, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.6415189873417722, |
| "grad_norm": 0.43657789176184786, |
| "learning_rate": 3.392986794874714e-05, |
| "loss": 0.5722, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6455696202531644, |
| "grad_norm": 0.6198314733587138, |
| "learning_rate": 3.388908397238184e-05, |
| "loss": 0.5751, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.649620253164557, |
| "grad_norm": 0.43161346418693336, |
| "learning_rate": 3.384818813524362e-05, |
| "loss": 0.5871, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6536708860759495, |
| "grad_norm": 0.39798012794448717, |
| "learning_rate": 3.380718076670195e-05, |
| "loss": 0.5626, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6577215189873418, |
| "grad_norm": 0.39123559382475837, |
| "learning_rate": 3.376606219702454e-05, |
| "loss": 0.569, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.661772151898734, |
| "grad_norm": 0.41276317292923453, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.5609, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.6658227848101266, |
| "grad_norm": 0.3346143871051394, |
| "learning_rate": 3.368349277980861e-05, |
| "loss": 0.574, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.669873417721519, |
| "grad_norm": 0.4160857928414006, |
| "learning_rate": 3.3642042597272844e-05, |
| "loss": 0.5624, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6739240506329114, |
| "grad_norm": 0.31152974836808744, |
| "learning_rate": 3.360048254360144e-05, |
| "loss": 0.562, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.6779746835443037, |
| "grad_norm": 0.4353926176559077, |
| "learning_rate": 3.355881295351336e-05, |
| "loss": 0.5598, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.6820253164556962, |
| "grad_norm": 0.36321846103373717, |
| "learning_rate": 3.351703416260975e-05, |
| "loss": 0.5592, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6860759493670887, |
| "grad_norm": 0.3810605979860431, |
| "learning_rate": 3.347514650737126e-05, |
| "loss": 0.5634, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.690126582278481, |
| "grad_norm": 0.4146387232722059, |
| "learning_rate": 3.3433150325155295e-05, |
| "loss": 0.5697, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.6941772151898733, |
| "grad_norm": 0.3676561380639018, |
| "learning_rate": 3.339104595419334e-05, |
| "loss": 0.5652, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.6982278481012658, |
| "grad_norm": 0.3259274662294342, |
| "learning_rate": 3.3348833733588204e-05, |
| "loss": 0.5863, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.7022784810126583, |
| "grad_norm": 0.4085543042755284, |
| "learning_rate": 3.3306514003311305e-05, |
| "loss": 0.5619, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7063291139240506, |
| "grad_norm": 0.34158724652088956, |
| "learning_rate": 3.326408710419996e-05, |
| "loss": 0.5683, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.710379746835443, |
| "grad_norm": 0.38544286605443906, |
| "learning_rate": 3.322155337795454e-05, |
| "loss": 0.5758, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7144303797468354, |
| "grad_norm": 0.3557036460172178, |
| "learning_rate": 3.317891316713587e-05, |
| "loss": 0.5788, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.718481012658228, |
| "grad_norm": 0.4851167897158188, |
| "learning_rate": 3.313616681516231e-05, |
| "loss": 0.5618, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.7225316455696202, |
| "grad_norm": 0.43205868808580067, |
| "learning_rate": 3.309331466630713e-05, |
| "loss": 0.578, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.7265822784810125, |
| "grad_norm": 0.3778094456896258, |
| "learning_rate": 3.305035706569563e-05, |
| "loss": 0.5721, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.730632911392405, |
| "grad_norm": 0.4319649342234142, |
| "learning_rate": 3.3007294359302433e-05, |
| "loss": 0.5663, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7346835443037976, |
| "grad_norm": 0.4046007707000431, |
| "learning_rate": 3.296412689394864e-05, |
| "loss": 0.5786, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7387341772151899, |
| "grad_norm": 0.3793042988828969, |
| "learning_rate": 3.292085501729909e-05, |
| "loss": 0.5715, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7427848101265822, |
| "grad_norm": 0.44082623138629345, |
| "learning_rate": 3.2877479077859534e-05, |
| "loss": 0.5722, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7468354430379747, |
| "grad_norm": 0.5250454659872341, |
| "learning_rate": 3.283399942497381e-05, |
| "loss": 0.5673, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.7508860759493672, |
| "grad_norm": 0.3752144502732557, |
| "learning_rate": 3.279041640882108e-05, |
| "loss": 0.571, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7549367088607595, |
| "grad_norm": 0.3863601291822436, |
| "learning_rate": 3.2746730380412964e-05, |
| "loss": 0.5757, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7589873417721518, |
| "grad_norm": 0.4994128077937738, |
| "learning_rate": 3.2702941691590726e-05, |
| "loss": 0.5646, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7630379746835443, |
| "grad_norm": 0.5414244421550419, |
| "learning_rate": 3.265905069502244e-05, |
| "loss": 0.5688, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7670886075949368, |
| "grad_norm": 0.3336201510392659, |
| "learning_rate": 3.261505774420016e-05, |
| "loss": 0.5571, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.771139240506329, |
| "grad_norm": 0.5457006648416834, |
| "learning_rate": 3.257096319343707e-05, |
| "loss": 0.5643, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7751898734177214, |
| "grad_norm": 0.5073752741090407, |
| "learning_rate": 3.2526767397864614e-05, |
| "loss": 0.5486, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.779240506329114, |
| "grad_norm": 0.3698975676982019, |
| "learning_rate": 3.248247071342966e-05, |
| "loss": 0.5679, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7832911392405064, |
| "grad_norm": 1.842136528384267, |
| "learning_rate": 3.243807349689161e-05, |
| "loss": 0.6044, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7873417721518987, |
| "grad_norm": 0.4471643135967781, |
| "learning_rate": 3.2393576105819544e-05, |
| "loss": 0.5833, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.791392405063291, |
| "grad_norm": 0.5957429902192082, |
| "learning_rate": 3.2348978898589333e-05, |
| "loss": 0.5628, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.7954430379746835, |
| "grad_norm": 0.3950060285975274, |
| "learning_rate": 3.230428223438075e-05, |
| "loss": 0.5591, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.799493670886076, |
| "grad_norm": 0.40596889504337186, |
| "learning_rate": 3.225948647317459e-05, |
| "loss": 0.5668, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8035443037974683, |
| "grad_norm": 0.47968250839779675, |
| "learning_rate": 3.2214591975749745e-05, |
| "loss": 0.5599, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8075949367088606, |
| "grad_norm": 0.3402847291309061, |
| "learning_rate": 3.216959910368034e-05, |
| "loss": 0.581, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.8116455696202531, |
| "grad_norm": 0.4220764703829784, |
| "learning_rate": 3.212450821933277e-05, |
| "loss": 0.5609, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.8156962025316457, |
| "grad_norm": 0.44540374062912996, |
| "learning_rate": 3.207931968586281e-05, |
| "loss": 0.5753, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.819746835443038, |
| "grad_norm": 0.3719335051159528, |
| "learning_rate": 3.203403386721272e-05, |
| "loss": 0.5748, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8237974683544302, |
| "grad_norm": 0.35686796975091434, |
| "learning_rate": 3.1988651128108245e-05, |
| "loss": 0.5725, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.8278481012658228, |
| "grad_norm": 0.330773493124629, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.5825, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.8318987341772153, |
| "grad_norm": 0.3621578817835101, |
| "learning_rate": 3.189759635133914e-05, |
| "loss": 0.5685, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.8359493670886076, |
| "grad_norm": 0.3115280721229672, |
| "learning_rate": 3.185192504701718e-05, |
| "loss": 0.5729, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.8399999999999999, |
| "grad_norm": 0.3668137335328413, |
| "learning_rate": 3.1806158288920234e-05, |
| "loss": 0.5664, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8440506329113924, |
| "grad_norm": 0.32867430699224925, |
| "learning_rate": 3.1760296445647477e-05, |
| "loss": 0.5592, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8481012658227849, |
| "grad_norm": 0.42040112527832035, |
| "learning_rate": 3.1714339886563896e-05, |
| "loss": 0.565, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8521518987341772, |
| "grad_norm": 0.35926991262489943, |
| "learning_rate": 3.166828898179731e-05, |
| "loss": 0.5619, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.8562025316455695, |
| "grad_norm": 0.3783787101058612, |
| "learning_rate": 3.162214410223536e-05, |
| "loss": 0.5681, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.860253164556962, |
| "grad_norm": 0.4555582302366678, |
| "learning_rate": 3.157590561952257e-05, |
| "loss": 0.5464, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8643037974683545, |
| "grad_norm": 0.37017848126456443, |
| "learning_rate": 3.152957390605732e-05, |
| "loss": 0.5672, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8683544303797468, |
| "grad_norm": 0.4535867353677617, |
| "learning_rate": 3.148314933498886e-05, |
| "loss": 0.5599, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.872405063291139, |
| "grad_norm": 0.3595247742589157, |
| "learning_rate": 3.143663228021431e-05, |
| "loss": 0.5608, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8764556962025316, |
| "grad_norm": 0.3909296354072627, |
| "learning_rate": 3.1390023116375624e-05, |
| "loss": 0.5668, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.8805063291139241, |
| "grad_norm": 0.3850122150894832, |
| "learning_rate": 3.134332221885661e-05, |
| "loss": 0.5626, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.8845569620253164, |
| "grad_norm": 0.35847880610050675, |
| "learning_rate": 3.129652996377987e-05, |
| "loss": 0.553, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8886075949367087, |
| "grad_norm": 0.3890829355667952, |
| "learning_rate": 3.12496467280038e-05, |
| "loss": 0.574, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.8926582278481012, |
| "grad_norm": 0.33215517124161265, |
| "learning_rate": 3.120267288911952e-05, |
| "loss": 0.5612, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.8967088607594937, |
| "grad_norm": 0.3833081590364025, |
| "learning_rate": 3.11556088254479e-05, |
| "loss": 0.5708, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.900759493670886, |
| "grad_norm": 0.3264950642708518, |
| "learning_rate": 3.11084549160364e-05, |
| "loss": 0.56, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9048101265822783, |
| "grad_norm": 0.36742296243500683, |
| "learning_rate": 3.106121154065615e-05, |
| "loss": 0.5678, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.9088607594936708, |
| "grad_norm": 0.3511884013179289, |
| "learning_rate": 3.1013879079798805e-05, |
| "loss": 0.5674, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.9129113924050634, |
| "grad_norm": 0.3391842662460048, |
| "learning_rate": 3.096645791467348e-05, |
| "loss": 0.5681, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9169620253164557, |
| "grad_norm": 0.35842435864496713, |
| "learning_rate": 3.091894842720373e-05, |
| "loss": 0.5624, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.921012658227848, |
| "grad_norm": 0.3146241383321096, |
| "learning_rate": 3.0871351000024425e-05, |
| "loss": 0.577, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9250632911392405, |
| "grad_norm": 0.35950747694425195, |
| "learning_rate": 3.0823666016478716e-05, |
| "loss": 0.5713, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.929113924050633, |
| "grad_norm": 0.36312961020617696, |
| "learning_rate": 3.0775893860614896e-05, |
| "loss": 0.5731, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.9331645569620253, |
| "grad_norm": 0.35514776529298997, |
| "learning_rate": 3.0728034917183336e-05, |
| "loss": 0.5607, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9372151898734176, |
| "grad_norm": 0.37275444549435743, |
| "learning_rate": 3.06800895716334e-05, |
| "loss": 0.5759, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.94126582278481, |
| "grad_norm": 0.34464416643632506, |
| "learning_rate": 3.063205821011029e-05, |
| "loss": 0.5707, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.9453164556962026, |
| "grad_norm": 0.4189715418466446, |
| "learning_rate": 3.0583941219452016e-05, |
| "loss": 0.563, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9493670886075949, |
| "grad_norm": 0.30929382102660324, |
| "learning_rate": 3.053573898718618e-05, |
| "loss": 0.569, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9534177215189872, |
| "grad_norm": 0.43575707681764747, |
| "learning_rate": 3.0487451901526956e-05, |
| "loss": 0.566, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9574683544303797, |
| "grad_norm": 0.3170323202309556, |
| "learning_rate": 3.0439080351371875e-05, |
| "loss": 0.584, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9615189873417722, |
| "grad_norm": 0.38924093100017326, |
| "learning_rate": 3.0390624726298764e-05, |
| "loss": 0.5708, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9655696202531645, |
| "grad_norm": 0.4397525102054439, |
| "learning_rate": 3.034208541656255e-05, |
| "loss": 0.5714, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9696202531645568, |
| "grad_norm": 0.3152575842007319, |
| "learning_rate": 3.029346281309218e-05, |
| "loss": 0.5589, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9736708860759493, |
| "grad_norm": 0.4142831268171398, |
| "learning_rate": 3.0244757307487415e-05, |
| "loss": 0.5762, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9777215189873418, |
| "grad_norm": 0.40787495979316857, |
| "learning_rate": 3.019596929201569e-05, |
| "loss": 0.5728, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9817721518987341, |
| "grad_norm": 0.3976360352039801, |
| "learning_rate": 3.0147099159608985e-05, |
| "loss": 0.5569, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9858227848101264, |
| "grad_norm": 0.36648115705359713, |
| "learning_rate": 3.0098147303860616e-05, |
| "loss": 0.5581, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.989873417721519, |
| "grad_norm": 0.34707105229441365, |
| "learning_rate": 3.0049114119022117e-05, |
| "loss": 0.5571, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.9939240506329114, |
| "grad_norm": 0.3742955549261726, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.5678, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.9979746835443037, |
| "grad_norm": 0.34251800878630034, |
| "learning_rate": 2.995080534235264e-05, |
| "loss": 0.5635, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.0030379746835445, |
| "grad_norm": 0.4362543607761948, |
| "learning_rate": 2.9901530542287044e-05, |
| "loss": 0.5149, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.0070886075949366, |
| "grad_norm": 0.6044338075146388, |
| "learning_rate": 2.9852175996655676e-05, |
| "loss": 0.5255, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.011139240506329, |
| "grad_norm": 0.6094678643782165, |
| "learning_rate": 2.980274210295326e-05, |
| "loss": 0.5172, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.0151898734177216, |
| "grad_norm": 0.6310916311739054, |
| "learning_rate": 2.9753229259313578e-05, |
| "loss": 0.5017, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.019240506329114, |
| "grad_norm": 0.42733899096450423, |
| "learning_rate": 2.9703637864506274e-05, |
| "loss": 0.5065, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.023291139240506, |
| "grad_norm": 0.5091815911683367, |
| "learning_rate": 2.965396831793362e-05, |
| "loss": 0.5079, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.0273417721518987, |
| "grad_norm": 0.4754496627102285, |
| "learning_rate": 2.9604221019627316e-05, |
| "loss": 0.5154, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.031392405063291, |
| "grad_norm": 0.502499817060006, |
| "learning_rate": 2.955439637024526e-05, |
| "loss": 0.5055, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.0354430379746837, |
| "grad_norm": 0.4659285885424091, |
| "learning_rate": 2.9504494771068334e-05, |
| "loss": 0.5084, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.039493670886076, |
| "grad_norm": 0.4328871325999897, |
| "learning_rate": 2.9454516623997156e-05, |
| "loss": 0.5187, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0435443037974683, |
| "grad_norm": 0.5159447474542375, |
| "learning_rate": 2.9404462331548847e-05, |
| "loss": 0.5095, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.047594936708861, |
| "grad_norm": 0.4276938469253526, |
| "learning_rate": 2.93543322968538e-05, |
| "loss": 0.5017, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.0516455696202534, |
| "grad_norm": 0.4502542193506515, |
| "learning_rate": 2.9304126923652428e-05, |
| "loss": 0.5041, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.0556962025316454, |
| "grad_norm": 0.5427226532606654, |
| "learning_rate": 2.9253846616291896e-05, |
| "loss": 0.5166, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.059746835443038, |
| "grad_norm": 0.4180530805087877, |
| "learning_rate": 2.9203491779722896e-05, |
| "loss": 0.5095, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.0637974683544305, |
| "grad_norm": 0.4425293854834803, |
| "learning_rate": 2.9153062819496357e-05, |
| "loss": 0.5105, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.067848101265823, |
| "grad_norm": 0.38739108030297525, |
| "learning_rate": 2.9102560141760178e-05, |
| "loss": 0.4914, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.071898734177215, |
| "grad_norm": 0.4068360744955018, |
| "learning_rate": 2.9051984153256004e-05, |
| "loss": 0.5003, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.0759493670886076, |
| "grad_norm": 0.4405389725400748, |
| "learning_rate": 2.900133526131588e-05, |
| "loss": 0.5046, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.08, |
| "grad_norm": 0.44016508161766765, |
| "learning_rate": 2.8950613873859025e-05, |
| "loss": 0.4963, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.0840506329113926, |
| "grad_norm": 0.4586963782748912, |
| "learning_rate": 2.8899820399388515e-05, |
| "loss": 0.5093, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.0881012658227847, |
| "grad_norm": 0.3945673684778376, |
| "learning_rate": 2.8848955246988012e-05, |
| "loss": 0.4999, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.092151898734177, |
| "grad_norm": 0.5590971818747368, |
| "learning_rate": 2.879801882631847e-05, |
| "loss": 0.5088, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.0962025316455697, |
| "grad_norm": 0.37677115582428233, |
| "learning_rate": 2.8747011547614808e-05, |
| "loss": 0.4979, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.100253164556962, |
| "grad_norm": 0.3661284281355434, |
| "learning_rate": 2.8695933821682635e-05, |
| "loss": 0.5064, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.1043037974683543, |
| "grad_norm": 0.35469905076517183, |
| "learning_rate": 2.864478605989494e-05, |
| "loss": 0.5023, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.108354430379747, |
| "grad_norm": 0.3455233071744697, |
| "learning_rate": 2.8593568674188765e-05, |
| "loss": 0.4994, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.1124050632911393, |
| "grad_norm": 0.32008688016427805, |
| "learning_rate": 2.8542282077061892e-05, |
| "loss": 0.5119, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.116455696202532, |
| "grad_norm": 0.3472003510221478, |
| "learning_rate": 2.8490926681569523e-05, |
| "loss": 0.4959, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.120506329113924, |
| "grad_norm": 0.3335437194000592, |
| "learning_rate": 2.8439502901320956e-05, |
| "loss": 0.5042, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.1245569620253164, |
| "grad_norm": 0.3576435136514813, |
| "learning_rate": 2.8388011150476237e-05, |
| "loss": 0.487, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.128607594936709, |
| "grad_norm": 0.42173238761139303, |
| "learning_rate": 2.8336451843742866e-05, |
| "loss": 0.5191, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.1326582278481014, |
| "grad_norm": 0.331414119593145, |
| "learning_rate": 2.8284825396372387e-05, |
| "loss": 0.4905, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.1367088607594935, |
| "grad_norm": 0.4169111649501582, |
| "learning_rate": 2.8233132224157132e-05, |
| "loss": 0.5093, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.140759493670886, |
| "grad_norm": 0.4186849787866555, |
| "learning_rate": 2.8181372743426805e-05, |
| "loss": 0.5114, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.1448101265822785, |
| "grad_norm": 0.35590311834392824, |
| "learning_rate": 2.8129547371045128e-05, |
| "loss": 0.53, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.148860759493671, |
| "grad_norm": 0.31836013932504165, |
| "learning_rate": 2.8077656524406534e-05, |
| "loss": 0.5142, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.152911392405063, |
| "grad_norm": 0.3203491301601664, |
| "learning_rate": 2.802570062143278e-05, |
| "loss": 0.5161, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.1569620253164556, |
| "grad_norm": 0.35194036117672606, |
| "learning_rate": 2.7973680080569555e-05, |
| "loss": 0.4979, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.161012658227848, |
| "grad_norm": 0.316041489255002, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.4999, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.1650632911392407, |
| "grad_norm": 0.31946017561170714, |
| "learning_rate": 2.7869446761557033e-05, |
| "loss": 0.5101, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.1691139240506327, |
| "grad_norm": 0.32266144042244704, |
| "learning_rate": 2.781723482288857e-05, |
| "loss": 0.5132, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1731645569620253, |
| "grad_norm": 0.2755813254617997, |
| "learning_rate": 2.7764959925285517e-05, |
| "loss": 0.5032, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.1772151898734178, |
| "grad_norm": 0.3396157840034228, |
| "learning_rate": 2.771262248976272e-05, |
| "loss": 0.5092, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.1812658227848103, |
| "grad_norm": 0.2978114487640706, |
| "learning_rate": 2.7660222937838677e-05, |
| "loss": 0.4958, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.1853164556962024, |
| "grad_norm": 0.32513598433424273, |
| "learning_rate": 2.7607761691532186e-05, |
| "loss": 0.4881, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.189367088607595, |
| "grad_norm": 0.31526881797262024, |
| "learning_rate": 2.7555239173358916e-05, |
| "loss": 0.5073, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.1934177215189874, |
| "grad_norm": 0.37850167385502187, |
| "learning_rate": 2.7502655806328e-05, |
| "loss": 0.5023, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.19746835443038, |
| "grad_norm": 0.42637945622650253, |
| "learning_rate": 2.7450012013938648e-05, |
| "loss": 0.5183, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.201518987341772, |
| "grad_norm": 0.3457837449268246, |
| "learning_rate": 2.739730822017673e-05, |
| "loss": 0.5061, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2055696202531645, |
| "grad_norm": 0.3324771563994227, |
| "learning_rate": 2.7344544849511355e-05, |
| "loss": 0.5014, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.209620253164557, |
| "grad_norm": 0.30678553433744526, |
| "learning_rate": 2.7291722326891456e-05, |
| "loss": 0.5099, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.2136708860759495, |
| "grad_norm": 0.3250000712940461, |
| "learning_rate": 2.723884107774236e-05, |
| "loss": 0.5063, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.2177215189873416, |
| "grad_norm": 0.29525961487058333, |
| "learning_rate": 2.718590152796239e-05, |
| "loss": 0.4979, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.221772151898734, |
| "grad_norm": 0.3369261875433186, |
| "learning_rate": 2.71329041039194e-05, |
| "loss": 0.523, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.2258227848101266, |
| "grad_norm": 0.3545224203764559, |
| "learning_rate": 2.7079849232447357e-05, |
| "loss": 0.5104, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.229873417721519, |
| "grad_norm": 0.3064960961789428, |
| "learning_rate": 2.7026737340842895e-05, |
| "loss": 0.5046, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.233924050632911, |
| "grad_norm": 0.34093939837164977, |
| "learning_rate": 2.697356885686189e-05, |
| "loss": 0.5161, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.2379746835443037, |
| "grad_norm": 0.999526476089123, |
| "learning_rate": 2.6920344208716014e-05, |
| "loss": 0.5112, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.2420253164556962, |
| "grad_norm": 0.32974467219978054, |
| "learning_rate": 2.6867063825069252e-05, |
| "loss": 0.5061, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.2460759493670888, |
| "grad_norm": 0.3668415458336695, |
| "learning_rate": 2.6813728135034494e-05, |
| "loss": 0.5161, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.250126582278481, |
| "grad_norm": 0.3011506775998163, |
| "learning_rate": 2.6760337568170056e-05, |
| "loss": 0.5038, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.2541772151898734, |
| "grad_norm": 0.29239335199522387, |
| "learning_rate": 2.6706892554476226e-05, |
| "loss": 0.5045, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.258227848101266, |
| "grad_norm": 0.29023297758714983, |
| "learning_rate": 2.6653393524391795e-05, |
| "loss": 0.5118, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.2622784810126584, |
| "grad_norm": 0.3603401793168279, |
| "learning_rate": 2.6599840908790592e-05, |
| "loss": 0.492, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.2663291139240505, |
| "grad_norm": 0.29910310278560637, |
| "learning_rate": 2.6546235138978028e-05, |
| "loss": 0.5056, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.270379746835443, |
| "grad_norm": 0.36836357651622786, |
| "learning_rate": 2.6492576646687597e-05, |
| "loss": 0.5022, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.2744303797468355, |
| "grad_norm": 0.4156879554904488, |
| "learning_rate": 2.6438865864077425e-05, |
| "loss": 0.5278, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.278481012658228, |
| "grad_norm": 0.30541515154626997, |
| "learning_rate": 2.6385103223726766e-05, |
| "loss": 0.5056, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.28253164556962, |
| "grad_norm": 0.2898988116196288, |
| "learning_rate": 2.6331289158632537e-05, |
| "loss": 0.5181, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2865822784810126, |
| "grad_norm": 0.3520995200184041, |
| "learning_rate": 2.6277424102205817e-05, |
| "loss": 0.5012, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.290632911392405, |
| "grad_norm": 0.27786924085437925, |
| "learning_rate": 2.6223508488268374e-05, |
| "loss": 0.5179, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.2946835443037976, |
| "grad_norm": 0.37059046267903256, |
| "learning_rate": 2.6169542751049148e-05, |
| "loss": 0.4897, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.2987341772151897, |
| "grad_norm": 0.26429510207637785, |
| "learning_rate": 2.6115527325180754e-05, |
| "loss": 0.5162, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.302784810126582, |
| "grad_norm": 0.37209784526692335, |
| "learning_rate": 2.606146264569603e-05, |
| "loss": 0.5065, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3068354430379747, |
| "grad_norm": 0.2888162601608562, |
| "learning_rate": 2.6007349148024447e-05, |
| "loss": 0.5251, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.3108860759493672, |
| "grad_norm": 0.2839004775207586, |
| "learning_rate": 2.5953187267988694e-05, |
| "loss": 0.4858, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.3149367088607593, |
| "grad_norm": 0.309464143222048, |
| "learning_rate": 2.5898977441801097e-05, |
| "loss": 0.507, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.318987341772152, |
| "grad_norm": 0.28009784174259006, |
| "learning_rate": 2.584472010606015e-05, |
| "loss": 0.5084, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.3230379746835443, |
| "grad_norm": 0.32281507136606297, |
| "learning_rate": 2.5790415697746976e-05, |
| "loss": 0.5191, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.327088607594937, |
| "grad_norm": 0.2889899083597477, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.5092, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.331139240506329, |
| "grad_norm": 0.30732774652812334, |
| "learning_rate": 2.568166741322048e-05, |
| "loss": 0.5004, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.3351898734177214, |
| "grad_norm": 0.34099884408405, |
| "learning_rate": 2.56272244128509e-05, |
| "loss": 0.524, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.339240506329114, |
| "grad_norm": 0.2938431487784586, |
| "learning_rate": 2.55727360915895e-05, |
| "loss": 0.5078, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.3432911392405065, |
| "grad_norm": 0.33801125684080935, |
| "learning_rate": 2.5518202888277734e-05, |
| "loss": 0.5028, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.3473417721518985, |
| "grad_norm": 0.27898819541957964, |
| "learning_rate": 2.5463625242118523e-05, |
| "loss": 0.5106, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.351392405063291, |
| "grad_norm": 0.3062629867609308, |
| "learning_rate": 2.5409003592672723e-05, |
| "loss": 0.5061, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.3554430379746836, |
| "grad_norm": 0.3082425801295347, |
| "learning_rate": 2.535433837985559e-05, |
| "loss": 0.5134, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.359493670886076, |
| "grad_norm": 0.31840731765876906, |
| "learning_rate": 2.529963004393324e-05, |
| "loss": 0.5076, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.363544303797468, |
| "grad_norm": 0.433413219210841, |
| "learning_rate": 2.524487902551908e-05, |
| "loss": 0.5101, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.3675949367088607, |
| "grad_norm": 0.309884010672882, |
| "learning_rate": 2.519008576557029e-05, |
| "loss": 0.4893, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.371645569620253, |
| "grad_norm": 0.3374344624519763, |
| "learning_rate": 2.5135250705384254e-05, |
| "loss": 0.5122, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.3756962025316457, |
| "grad_norm": 0.331267612176851, |
| "learning_rate": 2.5080374286595007e-05, |
| "loss": 0.518, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.379746835443038, |
| "grad_norm": 0.9721809024937123, |
| "learning_rate": 2.5025456951169677e-05, |
| "loss": 0.5195, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.3837974683544303, |
| "grad_norm": 0.3409272216112119, |
| "learning_rate": 2.4970499141404942e-05, |
| "loss": 0.5049, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.387848101265823, |
| "grad_norm": 0.37332594679406456, |
| "learning_rate": 2.491550129992345e-05, |
| "loss": 0.5073, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.3918987341772153, |
| "grad_norm": 0.33929043477258025, |
| "learning_rate": 2.486046386967024e-05, |
| "loss": 0.5046, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.3959493670886074, |
| "grad_norm": 0.4650410901820957, |
| "learning_rate": 2.4805387293909214e-05, |
| "loss": 0.5058, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.2986020442691154, |
| "learning_rate": 2.4750272016219552e-05, |
| "loss": 0.5126, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.4040506329113924, |
| "grad_norm": 0.45386360146811444, |
| "learning_rate": 2.4695118480492114e-05, |
| "loss": 0.5118, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.408101265822785, |
| "grad_norm": 0.8489673707365041, |
| "learning_rate": 2.4639927130925898e-05, |
| "loss": 0.5047, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.4121518987341775, |
| "grad_norm": 0.3567944348269758, |
| "learning_rate": 2.458469841202444e-05, |
| "loss": 0.5138, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4162025316455695, |
| "grad_norm": 0.3918411956084219, |
| "learning_rate": 2.452943276859226e-05, |
| "loss": 0.5022, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.420253164556962, |
| "grad_norm": 0.3527432930207906, |
| "learning_rate": 2.447413064573125e-05, |
| "loss": 0.5029, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.4243037974683546, |
| "grad_norm": 0.38997798913102655, |
| "learning_rate": 2.4418792488837095e-05, |
| "loss": 0.4911, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.4283544303797466, |
| "grad_norm": 0.33674283825092577, |
| "learning_rate": 2.4363418743595713e-05, |
| "loss": 0.5091, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.432405063291139, |
| "grad_norm": 0.3948267859575767, |
| "learning_rate": 2.430800985597963e-05, |
| "loss": 0.4937, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4364556962025317, |
| "grad_norm": 0.2993867271495416, |
| "learning_rate": 2.4252566272244415e-05, |
| "loss": 0.5046, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.440506329113924, |
| "grad_norm": 0.2806429003936777, |
| "learning_rate": 2.4197088438925063e-05, |
| "loss": 0.5006, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.4445569620253167, |
| "grad_norm": 0.3432823367656082, |
| "learning_rate": 2.4141576802832417e-05, |
| "loss": 0.5076, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.4486075949367088, |
| "grad_norm": 0.27773351259389273, |
| "learning_rate": 2.408603181104957e-05, |
| "loss": 0.4963, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4526582278481013, |
| "grad_norm": 0.28400004028989323, |
| "learning_rate": 2.4030453910928245e-05, |
| "loss": 0.5094, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.456708860759494, |
| "grad_norm": 0.338456835148589, |
| "learning_rate": 2.397484355008521e-05, |
| "loss": 0.513, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.460759493670886, |
| "grad_norm": 0.2743884488857805, |
| "learning_rate": 2.3919201176398662e-05, |
| "loss": 0.4986, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4648101265822784, |
| "grad_norm": 0.28488857787414157, |
| "learning_rate": 2.3863527238004633e-05, |
| "loss": 0.4915, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.468860759493671, |
| "grad_norm": 0.29045367358523294, |
| "learning_rate": 2.380782218329337e-05, |
| "loss": 0.5122, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.4729113924050634, |
| "grad_norm": 0.29380217034632755, |
| "learning_rate": 2.3752086460905725e-05, |
| "loss": 0.4944, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.476962025316456, |
| "grad_norm": 0.3326448986585829, |
| "learning_rate": 2.3696320519729544e-05, |
| "loss": 0.517, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.481012658227848, |
| "grad_norm": 0.28414991036376325, |
| "learning_rate": 2.3640524808896045e-05, |
| "loss": 0.4979, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4850632911392405, |
| "grad_norm": 0.37423908224394287, |
| "learning_rate": 2.3584699777776222e-05, |
| "loss": 0.4939, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.489113924050633, |
| "grad_norm": 0.28982086767179355, |
| "learning_rate": 2.3528845875977195e-05, |
| "loss": 0.4982, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.493164556962025, |
| "grad_norm": 0.2736574719663669, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.4976, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.4972151898734176, |
| "grad_norm": 0.32835595095044723, |
| "learning_rate": 2.341705325992901e-05, |
| "loss": 0.5076, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.50126582278481, |
| "grad_norm": 0.2668794310915097, |
| "learning_rate": 2.336111544604222e-05, |
| "loss": 0.5026, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.5053164556962026, |
| "grad_norm": 0.3203920518526405, |
| "learning_rate": 2.33051505621937e-05, |
| "loss": 0.5093, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.509367088607595, |
| "grad_norm": 0.3180391411280136, |
| "learning_rate": 2.324915905911693e-05, |
| "loss": 0.5377, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.5134177215189872, |
| "grad_norm": 0.33111819127611186, |
| "learning_rate": 2.319314138775977e-05, |
| "loss": 0.5024, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.5174683544303798, |
| "grad_norm": 0.3492570846425375, |
| "learning_rate": 2.3137097999280856e-05, |
| "loss": 0.5089, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.5215189873417723, |
| "grad_norm": 0.2681811700428865, |
| "learning_rate": 2.308102934504593e-05, |
| "loss": 0.5119, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.5255696202531643, |
| "grad_norm": 0.33221223068069633, |
| "learning_rate": 2.3024935876624222e-05, |
| "loss": 0.5171, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.529620253164557, |
| "grad_norm": 0.2420058706882021, |
| "learning_rate": 2.2968818045784813e-05, |
| "loss": 0.5118, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5336708860759494, |
| "grad_norm": 0.31860573257044494, |
| "learning_rate": 2.2912676304493006e-05, |
| "loss": 0.5087, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.537721518987342, |
| "grad_norm": 0.2843774291495513, |
| "learning_rate": 2.2856511104906668e-05, |
| "loss": 0.5027, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.5417721518987344, |
| "grad_norm": 0.2885851507590268, |
| "learning_rate": 2.2800322899372586e-05, |
| "loss": 0.5113, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.5458227848101265, |
| "grad_norm": 0.26673090244533215, |
| "learning_rate": 2.2744112140422844e-05, |
| "loss": 0.5042, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.549873417721519, |
| "grad_norm": 0.2789874651777166, |
| "learning_rate": 2.2687879280771177e-05, |
| "loss": 0.5089, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.5539240506329115, |
| "grad_norm": 0.2895606835065868, |
| "learning_rate": 2.26316247733093e-05, |
| "loss": 0.5045, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.5579746835443036, |
| "grad_norm": 0.30624937524683893, |
| "learning_rate": 2.257534907110328e-05, |
| "loss": 0.5109, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.562025316455696, |
| "grad_norm": 0.2872117568262824, |
| "learning_rate": 2.2519052627389882e-05, |
| "loss": 0.5116, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.5660759493670886, |
| "grad_norm": 0.3142493509740597, |
| "learning_rate": 2.246273589557294e-05, |
| "loss": 0.4983, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.570126582278481, |
| "grad_norm": 0.3076885612445364, |
| "learning_rate": 2.240639932921966e-05, |
| "loss": 0.4918, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.5741772151898736, |
| "grad_norm": 0.29182501704852243, |
| "learning_rate": 2.2350043382056995e-05, |
| "loss": 0.4967, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.5782278481012657, |
| "grad_norm": 0.2713468499783712, |
| "learning_rate": 2.2293668507968015e-05, |
| "loss": 0.4856, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.5822784810126582, |
| "grad_norm": 0.2933560437629066, |
| "learning_rate": 2.2237275160988186e-05, |
| "loss": 0.5078, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5863291139240507, |
| "grad_norm": 0.27312234099216515, |
| "learning_rate": 2.2180863795301787e-05, |
| "loss": 0.5023, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.590379746835443, |
| "grad_norm": 0.3239137266633085, |
| "learning_rate": 2.212443486523819e-05, |
| "loss": 0.5037, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.5944303797468353, |
| "grad_norm": 0.29625127350734937, |
| "learning_rate": 2.2067988825268243e-05, |
| "loss": 0.5056, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.598481012658228, |
| "grad_norm": 0.2778476385678652, |
| "learning_rate": 2.2011526130000596e-05, |
| "loss": 0.5149, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.6025316455696204, |
| "grad_norm": 1.0382939353768676, |
| "learning_rate": 2.1955047234178038e-05, |
| "loss": 0.5154, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.606582278481013, |
| "grad_norm": 0.29732384955295177, |
| "learning_rate": 2.1898552592673825e-05, |
| "loss": 0.5071, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.610632911392405, |
| "grad_norm": 0.3642196612636565, |
| "learning_rate": 2.184204266048803e-05, |
| "loss": 0.5128, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.6146835443037975, |
| "grad_norm": 0.38472714971044586, |
| "learning_rate": 2.1785517892743887e-05, |
| "loss": 0.5038, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.61873417721519, |
| "grad_norm": 0.3352925783664387, |
| "learning_rate": 2.17289787446841e-05, |
| "loss": 0.4994, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.622784810126582, |
| "grad_norm": 1.6804934027620009, |
| "learning_rate": 2.1672425671667198e-05, |
| "loss": 0.559, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.6268354430379746, |
| "grad_norm": 0.39093278045389956, |
| "learning_rate": 2.161585912916385e-05, |
| "loss": 0.5211, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.630886075949367, |
| "grad_norm": 0.29063479876017695, |
| "learning_rate": 2.1559279572753214e-05, |
| "loss": 0.507, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.6349367088607596, |
| "grad_norm": 0.3606291514430408, |
| "learning_rate": 2.1502687458119268e-05, |
| "loss": 0.5037, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.638987341772152, |
| "grad_norm": 0.3382753238108862, |
| "learning_rate": 2.1446083241047116e-05, |
| "loss": 0.5026, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.643037974683544, |
| "grad_norm": 0.2940172288697098, |
| "learning_rate": 2.1389467377419333e-05, |
| "loss": 0.5136, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6470886075949367, |
| "grad_norm": 0.34534821062628707, |
| "learning_rate": 2.133284032321232e-05, |
| "loss": 0.5085, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.651139240506329, |
| "grad_norm": 0.28100703698032165, |
| "learning_rate": 2.1276202534492566e-05, |
| "loss": 0.5133, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6551898734177213, |
| "grad_norm": 0.4486610323362489, |
| "learning_rate": 2.121955446741306e-05, |
| "loss": 0.5055, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.659240506329114, |
| "grad_norm": 0.3257240650636601, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.4849, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.6632911392405063, |
| "grad_norm": 0.31412279331525406, |
| "learning_rate": 2.1106229323196813e-05, |
| "loss": 0.5, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.667341772151899, |
| "grad_norm": 0.31994061272647706, |
| "learning_rate": 2.1049553158765214e-05, |
| "loss": 0.4975, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.6713924050632913, |
| "grad_norm": 0.2839931566489129, |
| "learning_rate": 2.0992868541376764e-05, |
| "loss": 0.5179, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.6754430379746834, |
| "grad_norm": 0.3016023179954782, |
| "learning_rate": 2.093617592756158e-05, |
| "loss": 0.4949, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.679493670886076, |
| "grad_norm": 0.29850180257790276, |
| "learning_rate": 2.0879475773914167e-05, |
| "loss": 0.5198, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.6835443037974684, |
| "grad_norm": 0.3355756562453831, |
| "learning_rate": 2.082276853708978e-05, |
| "loss": 0.51, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.6875949367088605, |
| "grad_norm": 0.3010081223893235, |
| "learning_rate": 2.076605467380071e-05, |
| "loss": 0.5031, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.691645569620253, |
| "grad_norm": 0.29701508451506253, |
| "learning_rate": 2.0709334640812613e-05, |
| "loss": 0.5085, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.6956962025316455, |
| "grad_norm": 0.2949604570768768, |
| "learning_rate": 2.0652608894940824e-05, |
| "loss": 0.4975, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.699746835443038, |
| "grad_norm": 0.2741732621236274, |
| "learning_rate": 2.0595877893046722e-05, |
| "loss": 0.5146, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.7037974683544306, |
| "grad_norm": 0.2795984973352661, |
| "learning_rate": 2.0539142092033985e-05, |
| "loss": 0.5015, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.7078481012658226, |
| "grad_norm": 0.2932523919974111, |
| "learning_rate": 2.048240194884496e-05, |
| "loss": 0.489, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.711898734177215, |
| "grad_norm": 0.30701762189518805, |
| "learning_rate": 2.042565792045695e-05, |
| "loss": 0.5027, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.7159493670886077, |
| "grad_norm": 0.3664436381983604, |
| "learning_rate": 2.036891046387857e-05, |
| "loss": 0.5173, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.7199999999999998, |
| "grad_norm": 0.37218100023787803, |
| "learning_rate": 2.0312160036146036e-05, |
| "loss": 0.514, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.7240506329113923, |
| "grad_norm": 0.3005183642126687, |
| "learning_rate": 2.025540709431948e-05, |
| "loss": 0.507, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.728101265822785, |
| "grad_norm": 0.33948353510315105, |
| "learning_rate": 2.0198652095479298e-05, |
| "loss": 0.505, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.7321518987341773, |
| "grad_norm": 0.3037107569259369, |
| "learning_rate": 2.014189549672245e-05, |
| "loss": 0.4903, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.73620253164557, |
| "grad_norm": 0.3062561019648404, |
| "learning_rate": 2.0085137755158776e-05, |
| "loss": 0.5129, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.740253164556962, |
| "grad_norm": 0.38242934472108614, |
| "learning_rate": 2.0028379327907327e-05, |
| "loss": 0.5051, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.7443037974683544, |
| "grad_norm": 0.30682596520650823, |
| "learning_rate": 1.9971620672092676e-05, |
| "loss": 0.5047, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.748354430379747, |
| "grad_norm": 0.2832856546198741, |
| "learning_rate": 1.991486224484123e-05, |
| "loss": 0.5128, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.752405063291139, |
| "grad_norm": 0.27914005249526913, |
| "learning_rate": 1.985810450327756e-05, |
| "loss": 0.4957, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.7564556962025315, |
| "grad_norm": 0.2786977797008464, |
| "learning_rate": 1.9801347904520706e-05, |
| "loss": 0.5086, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.760506329113924, |
| "grad_norm": 0.2812342786299232, |
| "learning_rate": 1.974459290568053e-05, |
| "loss": 0.4963, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.7645569620253165, |
| "grad_norm": 0.3023182602774889, |
| "learning_rate": 1.968783996385397e-05, |
| "loss": 0.4913, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.768607594936709, |
| "grad_norm": 0.3024093320076416, |
| "learning_rate": 1.963108953612143e-05, |
| "loss": 0.5159, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.772658227848101, |
| "grad_norm": 0.3061180834281883, |
| "learning_rate": 1.9574342079543056e-05, |
| "loss": 0.51, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.7767088607594936, |
| "grad_norm": 0.2967213374015336, |
| "learning_rate": 1.9517598051155046e-05, |
| "loss": 0.5165, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.780759493670886, |
| "grad_norm": 0.2797594541616186, |
| "learning_rate": 1.9460857907966025e-05, |
| "loss": 0.5132, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.7848101265822782, |
| "grad_norm": 0.29622626681564856, |
| "learning_rate": 1.9404122106953285e-05, |
| "loss": 0.5135, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.7888607594936707, |
| "grad_norm": 0.29217232072580024, |
| "learning_rate": 1.9347391105059176e-05, |
| "loss": 0.5065, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.7929113924050633, |
| "grad_norm": 0.2733708606960058, |
| "learning_rate": 1.92906653591874e-05, |
| "loss": 0.5117, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.7969620253164558, |
| "grad_norm": 0.3147460990130967, |
| "learning_rate": 1.9233945326199295e-05, |
| "loss": 0.5149, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.8010126582278483, |
| "grad_norm": 0.2510551159832791, |
| "learning_rate": 1.917723146291022e-05, |
| "loss": 0.5116, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.8050632911392404, |
| "grad_norm": 0.26420729485531735, |
| "learning_rate": 1.912052422608584e-05, |
| "loss": 0.5045, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.809113924050633, |
| "grad_norm": 0.33359499470871423, |
| "learning_rate": 1.9063824072438428e-05, |
| "loss": 0.5034, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8131645569620254, |
| "grad_norm": 0.26127104051150135, |
| "learning_rate": 1.9007131458623246e-05, |
| "loss": 0.5113, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.8172151898734175, |
| "grad_norm": 0.26965365618786763, |
| "learning_rate": 1.895044684123479e-05, |
| "loss": 0.5003, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.82126582278481, |
| "grad_norm": 0.2898296916682902, |
| "learning_rate": 1.8893770676803194e-05, |
| "loss": 0.5106, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.8253164556962025, |
| "grad_norm": 0.26716898288319324, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.4997, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.829367088607595, |
| "grad_norm": 0.33580901490987125, |
| "learning_rate": 1.8780445532586952e-05, |
| "loss": 0.5121, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.8334177215189875, |
| "grad_norm": 0.23774930144481626, |
| "learning_rate": 1.872379746550743e-05, |
| "loss": 0.5055, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.8374683544303796, |
| "grad_norm": 0.25824578623713296, |
| "learning_rate": 1.866715967678769e-05, |
| "loss": 0.5137, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.841518987341772, |
| "grad_norm": 0.24826119905960592, |
| "learning_rate": 1.861053262258067e-05, |
| "loss": 0.5088, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.8455696202531646, |
| "grad_norm": 0.28138739885303093, |
| "learning_rate": 1.8553916758952897e-05, |
| "loss": 0.5091, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.8496202531645567, |
| "grad_norm": 0.270293555468743, |
| "learning_rate": 1.8497312541880735e-05, |
| "loss": 0.5165, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.853670886075949, |
| "grad_norm": 0.4004094680574716, |
| "learning_rate": 1.8440720427246786e-05, |
| "loss": 0.4946, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8577215189873417, |
| "grad_norm": 0.2377063266423008, |
| "learning_rate": 1.8384140870836157e-05, |
| "loss": 0.499, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.8617721518987342, |
| "grad_norm": 0.32249069739683334, |
| "learning_rate": 1.8327574328332806e-05, |
| "loss": 0.519, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8658227848101268, |
| "grad_norm": 0.2622748114409681, |
| "learning_rate": 1.8271021255315906e-05, |
| "loss": 0.5037, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.869873417721519, |
| "grad_norm": 0.2795567148966245, |
| "learning_rate": 1.8214482107256117e-05, |
| "loss": 0.5064, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.8739240506329113, |
| "grad_norm": 0.2606518401008203, |
| "learning_rate": 1.8157957339511968e-05, |
| "loss": 0.5141, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.877974683544304, |
| "grad_norm": 0.2828063041743011, |
| "learning_rate": 1.8101447407326182e-05, |
| "loss": 0.5213, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.882025316455696, |
| "grad_norm": 0.2698562441303304, |
| "learning_rate": 1.8044952765821966e-05, |
| "loss": 0.5027, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.8860759493670884, |
| "grad_norm": 0.2550079356221659, |
| "learning_rate": 1.7988473869999407e-05, |
| "loss": 0.5035, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.890126582278481, |
| "grad_norm": 0.2635556630343899, |
| "learning_rate": 1.7932011174731764e-05, |
| "loss": 0.5031, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.8941772151898735, |
| "grad_norm": 0.29218630533881185, |
| "learning_rate": 1.7875565134761817e-05, |
| "loss": 0.5067, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.898227848101266, |
| "grad_norm": 0.24441023189111422, |
| "learning_rate": 1.7819136204698226e-05, |
| "loss": 0.506, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.902278481012658, |
| "grad_norm": 0.2703630327642198, |
| "learning_rate": 1.776272483901182e-05, |
| "loss": 0.5201, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.9063291139240506, |
| "grad_norm": 0.28947337318152383, |
| "learning_rate": 1.7706331492031995e-05, |
| "loss": 0.5057, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.910379746835443, |
| "grad_norm": 1.2178768498556587, |
| "learning_rate": 1.764995661794301e-05, |
| "loss": 0.5533, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.9144303797468356, |
| "grad_norm": 0.2995506829503013, |
| "learning_rate": 1.759360067078035e-05, |
| "loss": 0.5007, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.918481012658228, |
| "grad_norm": 0.26203176344907514, |
| "learning_rate": 1.7537264104427064e-05, |
| "loss": 0.5121, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.92253164556962, |
| "grad_norm": 0.42316018259267496, |
| "learning_rate": 1.748094737261012e-05, |
| "loss": 0.5139, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.9265822784810127, |
| "grad_norm": 0.3566059582349299, |
| "learning_rate": 1.7424650928896726e-05, |
| "loss": 0.4989, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.9306329113924052, |
| "grad_norm": 0.29124146890473973, |
| "learning_rate": 1.7368375226690712e-05, |
| "loss": 0.5033, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9346835443037973, |
| "grad_norm": 0.3334143799980178, |
| "learning_rate": 1.731212071922883e-05, |
| "loss": 0.5027, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.93873417721519, |
| "grad_norm": 0.27783302753308775, |
| "learning_rate": 1.7255887859577156e-05, |
| "loss": 0.5107, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.9427848101265823, |
| "grad_norm": 0.3473444837765498, |
| "learning_rate": 1.7199677100627427e-05, |
| "loss": 0.5139, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.946835443037975, |
| "grad_norm": 0.24737234729370994, |
| "learning_rate": 1.7143488895093343e-05, |
| "loss": 0.502, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.9508860759493674, |
| "grad_norm": 0.2916766346539096, |
| "learning_rate": 1.7087323695506994e-05, |
| "loss": 0.4993, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.9549367088607594, |
| "grad_norm": 0.2846705595440091, |
| "learning_rate": 1.7031181954215194e-05, |
| "loss": 0.5128, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.958987341772152, |
| "grad_norm": 0.26862201019889864, |
| "learning_rate": 1.6975064123375788e-05, |
| "loss": 0.4911, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.9630379746835445, |
| "grad_norm": 0.2720967053247452, |
| "learning_rate": 1.6918970654954084e-05, |
| "loss": 0.5048, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.9670886075949365, |
| "grad_norm": 0.2923310712244276, |
| "learning_rate": 1.686290200071915e-05, |
| "loss": 0.5096, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.971139240506329, |
| "grad_norm": 0.26812197569847157, |
| "learning_rate": 1.6806858612240234e-05, |
| "loss": 0.4983, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.9751898734177216, |
| "grad_norm": 0.2956764334134365, |
| "learning_rate": 1.6750840940883078e-05, |
| "loss": 0.511, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.979240506329114, |
| "grad_norm": 0.2578439144208077, |
| "learning_rate": 1.6694849437806305e-05, |
| "loss": 0.5051, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.9832911392405066, |
| "grad_norm": 0.2962687933472538, |
| "learning_rate": 1.663888455395778e-05, |
| "loss": 0.4983, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.9873417721518987, |
| "grad_norm": 0.2618450146312952, |
| "learning_rate": 1.6582946740070995e-05, |
| "loss": 0.5025, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.991392405063291, |
| "grad_norm": 0.2986155683395708, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.5098, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.9954430379746837, |
| "grad_norm": 0.27189423924183115, |
| "learning_rate": 1.6471154124022818e-05, |
| "loss": 0.5045, |
| "step": 739 |
| }, |
| { |
| "epoch": 3.008101265822785, |
| "grad_norm": 0.4480462078259469, |
| "learning_rate": 1.6415300222223788e-05, |
| "loss": 0.4455, |
| "step": 740 |
| }, |
| { |
| "epoch": 3.012151898734177, |
| "grad_norm": 0.2948988602091823, |
| "learning_rate": 1.6359475191103958e-05, |
| "loss": 0.448, |
| "step": 741 |
| }, |
| { |
| "epoch": 3.0162025316455696, |
| "grad_norm": 0.5850443445119945, |
| "learning_rate": 1.6303679480270466e-05, |
| "loss": 0.441, |
| "step": 742 |
| }, |
| { |
| "epoch": 3.020253164556962, |
| "grad_norm": 0.37690646069046735, |
| "learning_rate": 1.624791353909428e-05, |
| "loss": 0.4445, |
| "step": 743 |
| }, |
| { |
| "epoch": 3.0243037974683546, |
| "grad_norm": 0.411535817005082, |
| "learning_rate": 1.619217781670663e-05, |
| "loss": 0.4532, |
| "step": 744 |
| }, |
| { |
| "epoch": 3.0283544303797467, |
| "grad_norm": 0.3736115784927986, |
| "learning_rate": 1.6136472761995373e-05, |
| "loss": 0.4464, |
| "step": 745 |
| }, |
| { |
| "epoch": 3.0324050632911392, |
| "grad_norm": 0.40085115167038027, |
| "learning_rate": 1.608079882360134e-05, |
| "loss": 0.4517, |
| "step": 746 |
| }, |
| { |
| "epoch": 3.0364556962025318, |
| "grad_norm": 0.3923020087422178, |
| "learning_rate": 1.60251564499148e-05, |
| "loss": 0.456, |
| "step": 747 |
| }, |
| { |
| "epoch": 3.0405063291139243, |
| "grad_norm": 0.4140926002430996, |
| "learning_rate": 1.596954608907176e-05, |
| "loss": 0.4592, |
| "step": 748 |
| }, |
| { |
| "epoch": 3.0445569620253163, |
| "grad_norm": 0.3346573211826702, |
| "learning_rate": 1.591396818895043e-05, |
| "loss": 0.4437, |
| "step": 749 |
| }, |
| { |
| "epoch": 3.048607594936709, |
| "grad_norm": 0.44256164162147354, |
| "learning_rate": 1.585842319716759e-05, |
| "loss": 0.4539, |
| "step": 750 |
| }, |
| { |
| "epoch": 3.0526582278481014, |
| "grad_norm": 0.34391866918648784, |
| "learning_rate": 1.5802911561074944e-05, |
| "loss": 0.4482, |
| "step": 751 |
| }, |
| { |
| "epoch": 3.056708860759494, |
| "grad_norm": 0.4418765080788587, |
| "learning_rate": 1.5747433727755595e-05, |
| "loss": 0.4331, |
| "step": 752 |
| }, |
| { |
| "epoch": 3.060759493670886, |
| "grad_norm": 0.389599409757935, |
| "learning_rate": 1.5691990144020376e-05, |
| "loss": 0.4419, |
| "step": 753 |
| }, |
| { |
| "epoch": 3.0648101265822785, |
| "grad_norm": 0.35153763922883846, |
| "learning_rate": 1.5636581256404297e-05, |
| "loss": 0.4445, |
| "step": 754 |
| }, |
| { |
| "epoch": 3.068860759493671, |
| "grad_norm": 0.4194834431533087, |
| "learning_rate": 1.558120751116291e-05, |
| "loss": 0.4354, |
| "step": 755 |
| }, |
| { |
| "epoch": 3.0729113924050635, |
| "grad_norm": 0.3617572854329102, |
| "learning_rate": 1.552586935426876e-05, |
| "loss": 0.4397, |
| "step": 756 |
| }, |
| { |
| "epoch": 3.0769620253164556, |
| "grad_norm": 0.37285406949562017, |
| "learning_rate": 1.547056723140774e-05, |
| "loss": 0.4674, |
| "step": 757 |
| }, |
| { |
| "epoch": 3.081012658227848, |
| "grad_norm": 0.3556184555337555, |
| "learning_rate": 1.5415301587975565e-05, |
| "loss": 0.4337, |
| "step": 758 |
| }, |
| { |
| "epoch": 3.0850632911392406, |
| "grad_norm": 0.2955860619750152, |
| "learning_rate": 1.536007286907411e-05, |
| "loss": 0.4533, |
| "step": 759 |
| }, |
| { |
| "epoch": 3.089113924050633, |
| "grad_norm": 0.3851654576654011, |
| "learning_rate": 1.5304881519507896e-05, |
| "loss": 0.4514, |
| "step": 760 |
| }, |
| { |
| "epoch": 3.093164556962025, |
| "grad_norm": 0.29445458166909577, |
| "learning_rate": 1.5249727983780453e-05, |
| "loss": 0.4393, |
| "step": 761 |
| }, |
| { |
| "epoch": 3.0972151898734177, |
| "grad_norm": 0.32275087962698024, |
| "learning_rate": 1.5194612706090786e-05, |
| "loss": 0.4469, |
| "step": 762 |
| }, |
| { |
| "epoch": 3.1012658227848102, |
| "grad_norm": 0.2846129893688357, |
| "learning_rate": 1.5139536130329771e-05, |
| "loss": 0.4511, |
| "step": 763 |
| }, |
| { |
| "epoch": 3.1053164556962027, |
| "grad_norm": 0.3174150928038643, |
| "learning_rate": 1.508449870007656e-05, |
| "loss": 0.4423, |
| "step": 764 |
| }, |
| { |
| "epoch": 3.109367088607595, |
| "grad_norm": 0.2745155597743709, |
| "learning_rate": 1.5029500858595056e-05, |
| "loss": 0.4596, |
| "step": 765 |
| }, |
| { |
| "epoch": 3.1134177215189873, |
| "grad_norm": 0.2949468858830593, |
| "learning_rate": 1.4974543048830328e-05, |
| "loss": 0.4412, |
| "step": 766 |
| }, |
| { |
| "epoch": 3.11746835443038, |
| "grad_norm": 0.26635986929998134, |
| "learning_rate": 1.4919625713405e-05, |
| "loss": 0.4434, |
| "step": 767 |
| }, |
| { |
| "epoch": 3.1215189873417724, |
| "grad_norm": 3.9024923411316554, |
| "learning_rate": 1.4864749294615756e-05, |
| "loss": 0.4763, |
| "step": 768 |
| }, |
| { |
| "epoch": 3.1255696202531644, |
| "grad_norm": 0.3570001426008588, |
| "learning_rate": 1.4809914234429716e-05, |
| "loss": 0.4393, |
| "step": 769 |
| }, |
| { |
| "epoch": 3.129620253164557, |
| "grad_norm": 0.2519058863117581, |
| "learning_rate": 1.4755120974480923e-05, |
| "loss": 0.453, |
| "step": 770 |
| }, |
| { |
| "epoch": 3.1336708860759495, |
| "grad_norm": 0.36146682605093167, |
| "learning_rate": 1.4700369956066771e-05, |
| "loss": 0.4446, |
| "step": 771 |
| }, |
| { |
| "epoch": 3.137721518987342, |
| "grad_norm": 0.25301849905408996, |
| "learning_rate": 1.4645661620144413e-05, |
| "loss": 0.4494, |
| "step": 772 |
| }, |
| { |
| "epoch": 3.141772151898734, |
| "grad_norm": 0.33066283387812356, |
| "learning_rate": 1.4590996407327284e-05, |
| "loss": 0.4461, |
| "step": 773 |
| }, |
| { |
| "epoch": 3.1458227848101266, |
| "grad_norm": 0.3083280283960555, |
| "learning_rate": 1.4536374757881487e-05, |
| "loss": 0.4475, |
| "step": 774 |
| }, |
| { |
| "epoch": 3.149873417721519, |
| "grad_norm": 0.29458631749354147, |
| "learning_rate": 1.4481797111722271e-05, |
| "loss": 0.4539, |
| "step": 775 |
| }, |
| { |
| "epoch": 3.1539240506329116, |
| "grad_norm": 0.3192958256439885, |
| "learning_rate": 1.4427263908410507e-05, |
| "loss": 0.4402, |
| "step": 776 |
| }, |
| { |
| "epoch": 3.1579746835443037, |
| "grad_norm": 0.2529527692710603, |
| "learning_rate": 1.4372775587149108e-05, |
| "loss": 0.4415, |
| "step": 777 |
| }, |
| { |
| "epoch": 3.162025316455696, |
| "grad_norm": 0.28468829635749326, |
| "learning_rate": 1.4318332586779522e-05, |
| "loss": 0.4618, |
| "step": 778 |
| }, |
| { |
| "epoch": 3.1660759493670887, |
| "grad_norm": 0.2816960244180696, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.4346, |
| "step": 779 |
| }, |
| { |
| "epoch": 3.170126582278481, |
| "grad_norm": 0.2748734179578441, |
| "learning_rate": 1.420958430225303e-05, |
| "loss": 0.4387, |
| "step": 780 |
| }, |
| { |
| "epoch": 3.1741772151898733, |
| "grad_norm": 0.2684885437452951, |
| "learning_rate": 1.415527989393985e-05, |
| "loss": 0.4309, |
| "step": 781 |
| }, |
| { |
| "epoch": 3.178227848101266, |
| "grad_norm": 0.2896179872344483, |
| "learning_rate": 1.410102255819891e-05, |
| "loss": 0.4505, |
| "step": 782 |
| }, |
| { |
| "epoch": 3.1822784810126583, |
| "grad_norm": 0.24752096145610797, |
| "learning_rate": 1.404681273201131e-05, |
| "loss": 0.4465, |
| "step": 783 |
| }, |
| { |
| "epoch": 3.186329113924051, |
| "grad_norm": 0.25796722192870225, |
| "learning_rate": 1.399265085197556e-05, |
| "loss": 0.4405, |
| "step": 784 |
| }, |
| { |
| "epoch": 3.190379746835443, |
| "grad_norm": 0.26483190540661855, |
| "learning_rate": 1.393853735430398e-05, |
| "loss": 0.4519, |
| "step": 785 |
| }, |
| { |
| "epoch": 3.1944303797468354, |
| "grad_norm": 0.26023574728845195, |
| "learning_rate": 1.3884472674819246e-05, |
| "loss": 0.452, |
| "step": 786 |
| }, |
| { |
| "epoch": 3.198481012658228, |
| "grad_norm": 0.2894361315507193, |
| "learning_rate": 1.3830457248950864e-05, |
| "loss": 0.4387, |
| "step": 787 |
| }, |
| { |
| "epoch": 3.2025316455696204, |
| "grad_norm": 0.23722568890535395, |
| "learning_rate": 1.377649151173163e-05, |
| "loss": 0.4483, |
| "step": 788 |
| }, |
| { |
| "epoch": 3.2065822784810125, |
| "grad_norm": 0.27603711179915474, |
| "learning_rate": 1.3722575897794181e-05, |
| "loss": 0.4517, |
| "step": 789 |
| }, |
| { |
| "epoch": 3.210632911392405, |
| "grad_norm": 0.30752846596528577, |
| "learning_rate": 1.3668710841367472e-05, |
| "loss": 0.4463, |
| "step": 790 |
| }, |
| { |
| "epoch": 3.2146835443037975, |
| "grad_norm": 0.23334341701426106, |
| "learning_rate": 1.361489677627324e-05, |
| "loss": 0.4467, |
| "step": 791 |
| }, |
| { |
| "epoch": 3.21873417721519, |
| "grad_norm": 0.32447748363594636, |
| "learning_rate": 1.3561134135922585e-05, |
| "loss": 0.45, |
| "step": 792 |
| }, |
| { |
| "epoch": 3.222784810126582, |
| "grad_norm": 0.2690635491430007, |
| "learning_rate": 1.350742335331241e-05, |
| "loss": 0.447, |
| "step": 793 |
| }, |
| { |
| "epoch": 3.2268354430379746, |
| "grad_norm": 0.26988975088017486, |
| "learning_rate": 1.345376486102198e-05, |
| "loss": 0.4391, |
| "step": 794 |
| }, |
| { |
| "epoch": 3.230886075949367, |
| "grad_norm": 0.29515956410696, |
| "learning_rate": 1.3400159091209414e-05, |
| "loss": 0.4574, |
| "step": 795 |
| }, |
| { |
| "epoch": 3.2349367088607597, |
| "grad_norm": 0.23898253831567903, |
| "learning_rate": 1.3346606475608216e-05, |
| "loss": 0.4451, |
| "step": 796 |
| }, |
| { |
| "epoch": 3.2389873417721518, |
| "grad_norm": 0.2862788859016862, |
| "learning_rate": 1.3293107445523781e-05, |
| "loss": 0.4642, |
| "step": 797 |
| }, |
| { |
| "epoch": 3.2430379746835443, |
| "grad_norm": 0.2682210163323285, |
| "learning_rate": 1.3239662431829949e-05, |
| "loss": 0.4391, |
| "step": 798 |
| }, |
| { |
| "epoch": 3.247088607594937, |
| "grad_norm": 0.2740096986392261, |
| "learning_rate": 1.3186271864965509e-05, |
| "loss": 0.4436, |
| "step": 799 |
| }, |
| { |
| "epoch": 3.2511392405063293, |
| "grad_norm": 0.272987103020129, |
| "learning_rate": 1.3132936174930756e-05, |
| "loss": 0.4444, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.2551898734177214, |
| "grad_norm": 0.2609251359543171, |
| "learning_rate": 1.3079655791283995e-05, |
| "loss": 0.4532, |
| "step": 801 |
| }, |
| { |
| "epoch": 3.259240506329114, |
| "grad_norm": 0.2857062156041436, |
| "learning_rate": 1.3026431143138108e-05, |
| "loss": 0.4362, |
| "step": 802 |
| }, |
| { |
| "epoch": 3.2632911392405064, |
| "grad_norm": 0.26781072641550147, |
| "learning_rate": 1.2973262659157114e-05, |
| "loss": 0.4498, |
| "step": 803 |
| }, |
| { |
| "epoch": 3.267341772151899, |
| "grad_norm": 0.2988879474196617, |
| "learning_rate": 1.2920150767552651e-05, |
| "loss": 0.4415, |
| "step": 804 |
| }, |
| { |
| "epoch": 3.271392405063291, |
| "grad_norm": 0.2551880645168497, |
| "learning_rate": 1.2867095896080607e-05, |
| "loss": 0.4528, |
| "step": 805 |
| }, |
| { |
| "epoch": 3.2754430379746835, |
| "grad_norm": 0.2725509304335135, |
| "learning_rate": 1.2814098472037612e-05, |
| "loss": 0.4446, |
| "step": 806 |
| }, |
| { |
| "epoch": 3.279493670886076, |
| "grad_norm": 0.28860753767449704, |
| "learning_rate": 1.276115892225764e-05, |
| "loss": 0.4467, |
| "step": 807 |
| }, |
| { |
| "epoch": 3.2835443037974685, |
| "grad_norm": 0.24835368663811058, |
| "learning_rate": 1.2708277673108555e-05, |
| "loss": 0.4379, |
| "step": 808 |
| }, |
| { |
| "epoch": 3.2875949367088606, |
| "grad_norm": 0.24188395073520846, |
| "learning_rate": 1.2655455150488649e-05, |
| "loss": 0.4477, |
| "step": 809 |
| }, |
| { |
| "epoch": 3.291645569620253, |
| "grad_norm": 0.24053812386589704, |
| "learning_rate": 1.2602691779823272e-05, |
| "loss": 0.4502, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.2956962025316456, |
| "grad_norm": 0.2517940224683123, |
| "learning_rate": 1.2549987986061355e-05, |
| "loss": 0.4447, |
| "step": 811 |
| }, |
| { |
| "epoch": 3.299746835443038, |
| "grad_norm": 2.226880128609337, |
| "learning_rate": 1.2497344193672005e-05, |
| "loss": 0.4597, |
| "step": 812 |
| }, |
| { |
| "epoch": 3.3037974683544302, |
| "grad_norm": 0.2647040862390811, |
| "learning_rate": 1.2444760826641092e-05, |
| "loss": 0.4477, |
| "step": 813 |
| }, |
| { |
| "epoch": 3.3078481012658227, |
| "grad_norm": 0.24177747676376915, |
| "learning_rate": 1.2392238308467817e-05, |
| "loss": 0.4546, |
| "step": 814 |
| }, |
| { |
| "epoch": 3.3118987341772153, |
| "grad_norm": 0.23438126592184597, |
| "learning_rate": 1.2339777062161326e-05, |
| "loss": 0.4505, |
| "step": 815 |
| }, |
| { |
| "epoch": 3.3159493670886078, |
| "grad_norm": 0.2549665078136357, |
| "learning_rate": 1.2287377510237293e-05, |
| "loss": 0.4495, |
| "step": 816 |
| }, |
| { |
| "epoch": 3.32, |
| "grad_norm": 0.26213025519731087, |
| "learning_rate": 1.2235040074714488e-05, |
| "loss": 0.4428, |
| "step": 817 |
| }, |
| { |
| "epoch": 3.3240506329113924, |
| "grad_norm": 0.2512791673526738, |
| "learning_rate": 1.2182765177111434e-05, |
| "loss": 0.4431, |
| "step": 818 |
| }, |
| { |
| "epoch": 3.328101265822785, |
| "grad_norm": 0.24736045495283526, |
| "learning_rate": 1.213055323844297e-05, |
| "loss": 0.4616, |
| "step": 819 |
| }, |
| { |
| "epoch": 3.3321518987341774, |
| "grad_norm": 0.2724070508413059, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.4439, |
| "step": 820 |
| }, |
| { |
| "epoch": 3.3362025316455695, |
| "grad_norm": 0.24874681109841867, |
| "learning_rate": 1.2026319919430458e-05, |
| "loss": 0.4466, |
| "step": 821 |
| }, |
| { |
| "epoch": 3.340253164556962, |
| "grad_norm": 0.2827229298471516, |
| "learning_rate": 1.1974299378567227e-05, |
| "loss": 0.4453, |
| "step": 822 |
| }, |
| { |
| "epoch": 3.3443037974683545, |
| "grad_norm": 0.2745954551055071, |
| "learning_rate": 1.1922343475593462e-05, |
| "loss": 0.4435, |
| "step": 823 |
| }, |
| { |
| "epoch": 3.348354430379747, |
| "grad_norm": 0.2523249361418178, |
| "learning_rate": 1.187045262895488e-05, |
| "loss": 0.461, |
| "step": 824 |
| }, |
| { |
| "epoch": 3.352405063291139, |
| "grad_norm": 0.26698312868456403, |
| "learning_rate": 1.1818627256573203e-05, |
| "loss": 0.4532, |
| "step": 825 |
| }, |
| { |
| "epoch": 3.3564556962025316, |
| "grad_norm": 0.253023092418923, |
| "learning_rate": 1.1766867775842864e-05, |
| "loss": 0.4533, |
| "step": 826 |
| }, |
| { |
| "epoch": 3.360506329113924, |
| "grad_norm": 0.30229793676303024, |
| "learning_rate": 1.1715174603627615e-05, |
| "loss": 0.4675, |
| "step": 827 |
| }, |
| { |
| "epoch": 3.3645569620253166, |
| "grad_norm": 0.26276392140515453, |
| "learning_rate": 1.1663548156257147e-05, |
| "loss": 0.428, |
| "step": 828 |
| }, |
| { |
| "epoch": 3.3686075949367087, |
| "grad_norm": 0.3081109354948334, |
| "learning_rate": 1.161198884952377e-05, |
| "loss": 0.4528, |
| "step": 829 |
| }, |
| { |
| "epoch": 3.372658227848101, |
| "grad_norm": 0.37218640233484546, |
| "learning_rate": 1.1560497098679056e-05, |
| "loss": 0.4519, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.3767088607594937, |
| "grad_norm": 0.27083842631305505, |
| "learning_rate": 1.1509073318430479e-05, |
| "loss": 0.4495, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.3807594936708862, |
| "grad_norm": 0.3079507867821389, |
| "learning_rate": 1.1457717922938116e-05, |
| "loss": 0.4618, |
| "step": 832 |
| }, |
| { |
| "epoch": 3.3848101265822783, |
| "grad_norm": 0.3220071376342831, |
| "learning_rate": 1.1406431325811233e-05, |
| "loss": 0.4546, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.388860759493671, |
| "grad_norm": 0.3015492628183811, |
| "learning_rate": 1.135521394010506e-05, |
| "loss": 0.4395, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.3929113924050633, |
| "grad_norm": 0.24249247331781212, |
| "learning_rate": 1.1304066178317367e-05, |
| "loss": 0.4678, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.396962025316456, |
| "grad_norm": 0.2840200763663764, |
| "learning_rate": 1.1252988452385199e-05, |
| "loss": 0.436, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.401012658227848, |
| "grad_norm": 0.26631631750450696, |
| "learning_rate": 1.1201981173681536e-05, |
| "loss": 0.4344, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.4050632911392404, |
| "grad_norm": 0.29143816210129225, |
| "learning_rate": 1.1151044753011991e-05, |
| "loss": 0.4413, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.409113924050633, |
| "grad_norm": 0.2816631895809057, |
| "learning_rate": 1.1100179600611491e-05, |
| "loss": 0.4521, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.4131645569620255, |
| "grad_norm": 0.22337083360077625, |
| "learning_rate": 1.1049386126140985e-05, |
| "loss": 0.4357, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.4172151898734175, |
| "grad_norm": 0.3392948090399526, |
| "learning_rate": 1.0998664738684128e-05, |
| "loss": 0.4497, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.42126582278481, |
| "grad_norm": 0.25969747032477064, |
| "learning_rate": 1.0948015846744e-05, |
| "loss": 0.4638, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.4253164556962026, |
| "grad_norm": 0.2862919584469846, |
| "learning_rate": 1.0897439858239832e-05, |
| "loss": 0.4534, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.429367088607595, |
| "grad_norm": 0.30334437203138054, |
| "learning_rate": 1.0846937180503652e-05, |
| "loss": 0.4363, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.433417721518987, |
| "grad_norm": 0.2522226542497337, |
| "learning_rate": 1.0796508220277117e-05, |
| "loss": 0.4411, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.4374683544303797, |
| "grad_norm": 0.2815994171099675, |
| "learning_rate": 1.0746153383708107e-05, |
| "loss": 0.4497, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.441518987341772, |
| "grad_norm": 0.27197342153356907, |
| "learning_rate": 1.0695873076347579e-05, |
| "loss": 0.4507, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.4455696202531647, |
| "grad_norm": 0.2841837567830409, |
| "learning_rate": 1.0645667703146205e-05, |
| "loss": 0.4296, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.449620253164557, |
| "grad_norm": 0.2216782555517014, |
| "learning_rate": 1.0595537668451161e-05, |
| "loss": 0.4486, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.4536708860759493, |
| "grad_norm": 0.25716901697109323, |
| "learning_rate": 1.0545483376002854e-05, |
| "loss": 0.4486, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.457721518987342, |
| "grad_norm": 0.26514314143696105, |
| "learning_rate": 1.0495505228931676e-05, |
| "loss": 0.4568, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.4617721518987343, |
| "grad_norm": 0.2515763866947208, |
| "learning_rate": 1.044560362975474e-05, |
| "loss": 0.4387, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.4658227848101264, |
| "grad_norm": 0.2454910705621161, |
| "learning_rate": 1.0395778980372695e-05, |
| "loss": 0.4433, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.469873417721519, |
| "grad_norm": 0.2527162010266985, |
| "learning_rate": 1.0346031682066381e-05, |
| "loss": 0.4451, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.4739240506329114, |
| "grad_norm": 0.24149295559415318, |
| "learning_rate": 1.0296362135493724e-05, |
| "loss": 0.4415, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.477974683544304, |
| "grad_norm": 0.24505161308120224, |
| "learning_rate": 1.0246770740686422e-05, |
| "loss": 0.4391, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.482025316455696, |
| "grad_norm": 0.22912696980809436, |
| "learning_rate": 1.0197257897046743e-05, |
| "loss": 0.45, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.4860759493670885, |
| "grad_norm": 0.25416343558753696, |
| "learning_rate": 1.014782400334433e-05, |
| "loss": 0.4556, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.490126582278481, |
| "grad_norm": 0.2515696741367962, |
| "learning_rate": 1.009846945771296e-05, |
| "loss": 0.4573, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.4941772151898736, |
| "grad_norm": 0.23157930162649745, |
| "learning_rate": 1.0049194657647363e-05, |
| "loss": 0.4435, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.4982278481012656, |
| "grad_norm": 0.22223191236002657, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.4456, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.502278481012658, |
| "grad_norm": 0.2397879415844327, |
| "learning_rate": 9.950885880977891e-06, |
| "loss": 0.4423, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.5063291139240507, |
| "grad_norm": 0.2489180025093492, |
| "learning_rate": 9.901852696139382e-06, |
| "loss": 0.4599, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.510379746835443, |
| "grad_norm": 0.22590353001969093, |
| "learning_rate": 9.852900840391027e-06, |
| "loss": 0.4439, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.5144303797468357, |
| "grad_norm": 0.2361955313702804, |
| "learning_rate": 9.804030707984313e-06, |
| "loss": 0.4466, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.5184810126582278, |
| "grad_norm": 0.22210611264513594, |
| "learning_rate": 9.755242692512599e-06, |
| "loss": 0.4377, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.5225316455696203, |
| "grad_norm": 0.22744615892987946, |
| "learning_rate": 9.70653718690782e-06, |
| "loss": 0.4626, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.526582278481013, |
| "grad_norm": 0.2340469351934775, |
| "learning_rate": 9.657914583437454e-06, |
| "loss": 0.4458, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.530632911392405, |
| "grad_norm": 0.22921031149353957, |
| "learning_rate": 9.609375273701246e-06, |
| "loss": 0.4503, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.5346835443037974, |
| "grad_norm": 0.2345786490767904, |
| "learning_rate": 9.560919648628133e-06, |
| "loss": 0.4426, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.53873417721519, |
| "grad_norm": 0.24888013491079877, |
| "learning_rate": 9.512548098473047e-06, |
| "loss": 0.4425, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.5427848101265824, |
| "grad_norm": 0.23733618538630719, |
| "learning_rate": 9.464261012813825e-06, |
| "loss": 0.4443, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.546835443037975, |
| "grad_norm": 0.22496742797445174, |
| "learning_rate": 9.416058780547987e-06, |
| "loss": 0.4434, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.550886075949367, |
| "grad_norm": 0.27609391798249, |
| "learning_rate": 9.367941789889714e-06, |
| "loss": 0.4557, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.5549367088607595, |
| "grad_norm": 0.22858604378540825, |
| "learning_rate": 9.319910428366607e-06, |
| "loss": 0.4471, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.558987341772152, |
| "grad_norm": 0.22417600322898307, |
| "learning_rate": 9.271965082816667e-06, |
| "loss": 0.4344, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.563037974683544, |
| "grad_norm": 0.24037256575518703, |
| "learning_rate": 9.224106139385111e-06, |
| "loss": 0.4449, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.5670886075949366, |
| "grad_norm": 0.25305201712794245, |
| "learning_rate": 9.176333983521291e-06, |
| "loss": 0.4632, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.571139240506329, |
| "grad_norm": 0.2215682843750075, |
| "learning_rate": 9.12864899997558e-06, |
| "loss": 0.4551, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.5751898734177217, |
| "grad_norm": 0.23854250045351436, |
| "learning_rate": 9.08105157279628e-06, |
| "loss": 0.4516, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.579240506329114, |
| "grad_norm": 0.226723674508071, |
| "learning_rate": 9.03354208532653e-06, |
| "loss": 0.4452, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.5832911392405062, |
| "grad_norm": 0.25468929098778575, |
| "learning_rate": 8.986120920201205e-06, |
| "loss": 0.4544, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.5873417721518988, |
| "grad_norm": 0.25374394294438435, |
| "learning_rate": 8.938788459343852e-06, |
| "loss": 0.4487, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.5913924050632913, |
| "grad_norm": 0.22759570553222533, |
| "learning_rate": 8.8915450839636e-06, |
| "loss": 0.4413, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.5954430379746833, |
| "grad_norm": 0.268226173248734, |
| "learning_rate": 8.844391174552116e-06, |
| "loss": 0.4485, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.599493670886076, |
| "grad_norm": 0.23537054500964977, |
| "learning_rate": 8.797327110880479e-06, |
| "loss": 0.4432, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.6035443037974684, |
| "grad_norm": 0.2142051016260504, |
| "learning_rate": 8.750353271996206e-06, |
| "loss": 0.448, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.607594936708861, |
| "grad_norm": 0.24795470270948086, |
| "learning_rate": 8.703470036220132e-06, |
| "loss": 0.4422, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.6116455696202534, |
| "grad_norm": 0.5342923385225277, |
| "learning_rate": 8.656677781143394e-06, |
| "loss": 0.464, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.6156962025316455, |
| "grad_norm": 0.2413480866882507, |
| "learning_rate": 8.609976883624377e-06, |
| "loss": 0.4533, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.619746835443038, |
| "grad_norm": 0.2537287965388932, |
| "learning_rate": 8.563367719785698e-06, |
| "loss": 0.4569, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.6237974683544305, |
| "grad_norm": 0.25139056613126115, |
| "learning_rate": 8.516850665011138e-06, |
| "loss": 0.4348, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.6278481012658226, |
| "grad_norm": 0.25598509267375175, |
| "learning_rate": 8.47042609394269e-06, |
| "loss": 0.4479, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.631898734177215, |
| "grad_norm": 0.23801143148708834, |
| "learning_rate": 8.424094380477432e-06, |
| "loss": 0.4504, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.6359493670886076, |
| "grad_norm": 0.2468857041739758, |
| "learning_rate": 8.37785589776465e-06, |
| "loss": 0.4498, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.64, |
| "grad_norm": 0.2364118922271143, |
| "learning_rate": 8.331711018202694e-06, |
| "loss": 0.4523, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.6440506329113926, |
| "grad_norm": 0.24193704711965638, |
| "learning_rate": 8.285660113436104e-06, |
| "loss": 0.4455, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.6481012658227847, |
| "grad_norm": 0.26198892925627765, |
| "learning_rate": 8.239703554352527e-06, |
| "loss": 0.4536, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.6521518987341772, |
| "grad_norm": 0.22236305804444, |
| "learning_rate": 8.193841711079775e-06, |
| "loss": 0.4435, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.6562025316455697, |
| "grad_norm": 0.2260602281779419, |
| "learning_rate": 8.148074952982828e-06, |
| "loss": 0.4561, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.660253164556962, |
| "grad_norm": 0.22945659532711024, |
| "learning_rate": 8.102403648660859e-06, |
| "loss": 0.46, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.6643037974683543, |
| "grad_norm": 0.21871301064341642, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.4466, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.668354430379747, |
| "grad_norm": 0.2254517732551904, |
| "learning_rate": 8.011348871891762e-06, |
| "loss": 0.4348, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.6724050632911394, |
| "grad_norm": 0.25381001695210226, |
| "learning_rate": 7.965966132787287e-06, |
| "loss": 0.4527, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.676455696202532, |
| "grad_norm": 0.22907779040566473, |
| "learning_rate": 7.920680314137189e-06, |
| "loss": 0.4449, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.680506329113924, |
| "grad_norm": 0.2418655489582189, |
| "learning_rate": 7.875491780667246e-06, |
| "loss": 0.4643, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.6845569620253165, |
| "grad_norm": 0.2377352751705066, |
| "learning_rate": 7.830400896319667e-06, |
| "loss": 0.4667, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.688607594936709, |
| "grad_norm": 0.24174064294084707, |
| "learning_rate": 7.785408024250259e-06, |
| "loss": 0.4583, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.692658227848101, |
| "grad_norm": 0.2546052177369739, |
| "learning_rate": 7.74051352682542e-06, |
| "loss": 0.4366, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.6967088607594936, |
| "grad_norm": 0.2285217037700982, |
| "learning_rate": 7.695717765619257e-06, |
| "loss": 0.4501, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.700759493670886, |
| "grad_norm": 0.23001459168555385, |
| "learning_rate": 7.651021101410673e-06, |
| "loss": 0.4305, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.7048101265822786, |
| "grad_norm": 0.23858650179108795, |
| "learning_rate": 7.606423894180464e-06, |
| "loss": 0.46, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.708860759493671, |
| "grad_norm": 0.2227432347725773, |
| "learning_rate": 7.56192650310839e-06, |
| "loss": 0.4564, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.712911392405063, |
| "grad_norm": 0.20953672534272721, |
| "learning_rate": 7.517529286570349e-06, |
| "loss": 0.4516, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.7169620253164557, |
| "grad_norm": 0.22869256132794716, |
| "learning_rate": 7.473232602135387e-06, |
| "loss": 0.4437, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.721012658227848, |
| "grad_norm": 0.24610419773558198, |
| "learning_rate": 7.429036806562935e-06, |
| "loss": 0.4394, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.7250632911392403, |
| "grad_norm": 0.22243630350240703, |
| "learning_rate": 7.3849422557998455e-06, |
| "loss": 0.4544, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.729113924050633, |
| "grad_norm": 0.24396594579508624, |
| "learning_rate": 7.340949304977567e-06, |
| "loss": 0.4554, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.7331645569620253, |
| "grad_norm": 0.2233596355008928, |
| "learning_rate": 7.297058308409282e-06, |
| "loss": 0.4596, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.737215189873418, |
| "grad_norm": 0.2393959449095979, |
| "learning_rate": 7.25326961958704e-06, |
| "loss": 0.4243, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.7412658227848103, |
| "grad_norm": 0.22245187631564373, |
| "learning_rate": 7.209583591178921e-06, |
| "loss": 0.452, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.7453164556962024, |
| "grad_norm": 0.223403522703384, |
| "learning_rate": 7.1660005750261925e-06, |
| "loss": 0.4504, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.749367088607595, |
| "grad_norm": 0.2130012608944047, |
| "learning_rate": 7.1225209221404765e-06, |
| "loss": 0.4589, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.7534177215189874, |
| "grad_norm": 0.21288510841544123, |
| "learning_rate": 7.079144982700909e-06, |
| "loss": 0.4394, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.7574683544303795, |
| "grad_norm": 0.2697782112494956, |
| "learning_rate": 7.0358731060513695e-06, |
| "loss": 0.445, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.761518987341772, |
| "grad_norm": 0.21382157161791107, |
| "learning_rate": 6.99270564069757e-06, |
| "loss": 0.4494, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.7655696202531646, |
| "grad_norm": 0.22398970803140192, |
| "learning_rate": 6.949642934304375e-06, |
| "loss": 0.4353, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.769620253164557, |
| "grad_norm": 0.21131930630123275, |
| "learning_rate": 6.906685333692871e-06, |
| "loss": 0.4596, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.7736708860759496, |
| "grad_norm": 0.22417152537100674, |
| "learning_rate": 6.86383318483769e-06, |
| "loss": 0.4506, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.7777215189873417, |
| "grad_norm": 0.2113275651541379, |
| "learning_rate": 6.821086832864139e-06, |
| "loss": 0.45, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.781772151898734, |
| "grad_norm": 0.8529575841964336, |
| "learning_rate": 6.77844662204546e-06, |
| "loss": 0.4719, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.7858227848101267, |
| "grad_norm": 0.2151269479584616, |
| "learning_rate": 6.7359128958000455e-06, |
| "loss": 0.4339, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.7898734177215188, |
| "grad_norm": 0.20803957717161944, |
| "learning_rate": 6.693485996688695e-06, |
| "loss": 0.4515, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.7939240506329113, |
| "grad_norm": 0.23175305221136308, |
| "learning_rate": 6.651166266411801e-06, |
| "loss": 0.4397, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.797974683544304, |
| "grad_norm": 0.23161130502118327, |
| "learning_rate": 6.6089540458066725e-06, |
| "loss": 0.4414, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.8020253164556963, |
| "grad_norm": 0.22337419434479508, |
| "learning_rate": 6.566849674844711e-06, |
| "loss": 0.4424, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.806075949367089, |
| "grad_norm": 0.23319178895912746, |
| "learning_rate": 6.524853492628747e-06, |
| "loss": 0.4442, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.810126582278481, |
| "grad_norm": 0.6170049189132342, |
| "learning_rate": 6.4829658373902536e-06, |
| "loss": 0.4482, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.8141772151898734, |
| "grad_norm": 0.20672579224235424, |
| "learning_rate": 6.441187046486648e-06, |
| "loss": 0.4438, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.818227848101266, |
| "grad_norm": 0.23920739804333963, |
| "learning_rate": 6.399517456398567e-06, |
| "loss": 0.4576, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.822278481012658, |
| "grad_norm": 0.2447337282034391, |
| "learning_rate": 6.357957402727164e-06, |
| "loss": 0.4415, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.8263291139240505, |
| "grad_norm": 0.21470285332470745, |
| "learning_rate": 6.316507220191395e-06, |
| "loss": 0.4437, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.830379746835443, |
| "grad_norm": 0.2217994448210214, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.4538, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.8344303797468355, |
| "grad_norm": 0.23443749523337348, |
| "learning_rate": 6.233937802975471e-06, |
| "loss": 0.4423, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.838481012658228, |
| "grad_norm": 0.2374445398141554, |
| "learning_rate": 6.192819233298046e-06, |
| "loss": 0.4556, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.84253164556962, |
| "grad_norm": 0.21801224244705117, |
| "learning_rate": 6.151811864756383e-06, |
| "loss": 0.4489, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.8465822784810126, |
| "grad_norm": 0.21699896641917796, |
| "learning_rate": 6.1109160276181655e-06, |
| "loss": 0.4497, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.850632911392405, |
| "grad_norm": 0.20788344234666484, |
| "learning_rate": 6.070132051252868e-06, |
| "loss": 0.4593, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.8546835443037972, |
| "grad_norm": 0.21257145944352526, |
| "learning_rate": 6.0294602641290034e-06, |
| "loss": 0.4359, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.8587341772151897, |
| "grad_norm": 0.22901971510706456, |
| "learning_rate": 5.988900993811575e-06, |
| "loss": 0.4483, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.8627848101265823, |
| "grad_norm": 0.21517154700746094, |
| "learning_rate": 5.948454566959363e-06, |
| "loss": 0.429, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.8668354430379748, |
| "grad_norm": 0.23004092937900136, |
| "learning_rate": 5.908121309322328e-06, |
| "loss": 0.4138, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.8708860759493673, |
| "grad_norm": 0.22605559472909653, |
| "learning_rate": 5.867901545738976e-06, |
| "loss": 0.4412, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.8749367088607594, |
| "grad_norm": 0.2283233452177007, |
| "learning_rate": 5.827795600133774e-06, |
| "loss": 0.4523, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.878987341772152, |
| "grad_norm": 0.21848194441852775, |
| "learning_rate": 5.787803795514466e-06, |
| "loss": 0.432, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.8830379746835444, |
| "grad_norm": 0.21777171915400334, |
| "learning_rate": 5.747926453969576e-06, |
| "loss": 0.4438, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.8870886075949365, |
| "grad_norm": 0.236211157361395, |
| "learning_rate": 5.708163896665708e-06, |
| "loss": 0.4537, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.891139240506329, |
| "grad_norm": 0.23636039876049214, |
| "learning_rate": 5.668516443845047e-06, |
| "loss": 0.4498, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.8951898734177215, |
| "grad_norm": 0.23417170184720734, |
| "learning_rate": 5.6289844148227225e-06, |
| "loss": 0.4483, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.899240506329114, |
| "grad_norm": 0.22080921954914626, |
| "learning_rate": 5.5895681279842615e-06, |
| "loss": 0.4679, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.9032911392405065, |
| "grad_norm": 0.22608126140570667, |
| "learning_rate": 5.550267900783019e-06, |
| "loss": 0.4427, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.9073417721518986, |
| "grad_norm": 0.22125043574001615, |
| "learning_rate": 5.511084049737623e-06, |
| "loss": 0.4584, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.911392405063291, |
| "grad_norm": 0.2154229094092577, |
| "learning_rate": 5.4720168904294215e-06, |
| "loss": 0.4482, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.9154430379746836, |
| "grad_norm": 0.2144872996289274, |
| "learning_rate": 5.433066737499948e-06, |
| "loss": 0.4595, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.9194936708860757, |
| "grad_norm": 0.21686982382197117, |
| "learning_rate": 5.394233904648376e-06, |
| "loss": 0.4466, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.923544303797468, |
| "grad_norm": 0.22961980312072844, |
| "learning_rate": 5.355518704628997e-06, |
| "loss": 0.4504, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.9275949367088607, |
| "grad_norm": 0.2092791024157416, |
| "learning_rate": 5.316921449248731e-06, |
| "loss": 0.4435, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.9316455696202532, |
| "grad_norm": 0.22353783551699197, |
| "learning_rate": 5.278442449364538e-06, |
| "loss": 0.4552, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.9356962025316458, |
| "grad_norm": 0.21355801601007518, |
| "learning_rate": 5.240082014881016e-06, |
| "loss": 0.4331, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.939746835443038, |
| "grad_norm": 0.2182688753308274, |
| "learning_rate": 5.201840454747822e-06, |
| "loss": 0.4389, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.9437974683544303, |
| "grad_norm": 0.21407128965451894, |
| "learning_rate": 5.163718076957223e-06, |
| "loss": 0.4417, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.947848101265823, |
| "grad_norm": 0.21897518749412, |
| "learning_rate": 5.125715188541609e-06, |
| "loss": 0.4567, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.951898734177215, |
| "grad_norm": 0.20282833683806223, |
| "learning_rate": 5.087832095571021e-06, |
| "loss": 0.4581, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.9559493670886074, |
| "grad_norm": 0.20086137430407427, |
| "learning_rate": 5.0500691031506766e-06, |
| "loss": 0.4353, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.96, |
| "grad_norm": 0.21329418498097105, |
| "learning_rate": 5.01242651541854e-06, |
| "loss": 0.4517, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.9640506329113925, |
| "grad_norm": 0.21709363524029268, |
| "learning_rate": 4.974904635542815e-06, |
| "loss": 0.4436, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.968101265822785, |
| "grad_norm": 0.20489594684554546, |
| "learning_rate": 4.937503765719582e-06, |
| "loss": 0.4494, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.972151898734177, |
| "grad_norm": 0.203016242345807, |
| "learning_rate": 4.900224207170299e-06, |
| "loss": 0.4523, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.9762025316455696, |
| "grad_norm": 0.191967543318881, |
| "learning_rate": 4.8630662601394065e-06, |
| "loss": 0.4509, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.980253164556962, |
| "grad_norm": 0.21084258805680936, |
| "learning_rate": 4.8260302238918995e-06, |
| "loss": 0.4488, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.984303797468354, |
| "grad_norm": 0.202437011909258, |
| "learning_rate": 4.789116396710924e-06, |
| "loss": 0.4441, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.9883544303797467, |
| "grad_norm": 0.21011902077835237, |
| "learning_rate": 4.752325075895368e-06, |
| "loss": 0.4264, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.992405063291139, |
| "grad_norm": 0.20708427512884406, |
| "learning_rate": 4.715656557757473e-06, |
| "loss": 0.4539, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.9964556962025317, |
| "grad_norm": 0.20333666798156108, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.4534, |
| "step": 984 |
| }, |
| { |
| "epoch": 4.000506329113924, |
| "grad_norm": 0.22218992212198657, |
| "learning_rate": 4.6426891098160585e-06, |
| "loss": 0.4267, |
| "step": 985 |
| }, |
| { |
| "epoch": 4.004556962025316, |
| "grad_norm": 0.39968292538602546, |
| "learning_rate": 4.6063907676823474e-06, |
| "loss": 0.4025, |
| "step": 986 |
| }, |
| { |
| "epoch": 4.008607594936709, |
| "grad_norm": 0.2771926101773036, |
| "learning_rate": 4.570216403561141e-06, |
| "loss": 0.3942, |
| "step": 987 |
| }, |
| { |
| "epoch": 4.012658227848101, |
| "grad_norm": 0.2971660642048338, |
| "learning_rate": 4.534166308795815e-06, |
| "loss": 0.4035, |
| "step": 988 |
| }, |
| { |
| "epoch": 4.016708860759493, |
| "grad_norm": 0.44422915371298055, |
| "learning_rate": 4.498240773728859e-06, |
| "loss": 0.4161, |
| "step": 989 |
| }, |
| { |
| "epoch": 4.020759493670886, |
| "grad_norm": 0.3306704307138634, |
| "learning_rate": 4.462440087699609e-06, |
| "loss": 0.3917, |
| "step": 990 |
| }, |
| { |
| "epoch": 4.024810126582278, |
| "grad_norm": 0.25659051159985014, |
| "learning_rate": 4.426764539041861e-06, |
| "loss": 0.414, |
| "step": 991 |
| }, |
| { |
| "epoch": 4.0288607594936705, |
| "grad_norm": 0.354026356823384, |
| "learning_rate": 4.391214415081582e-06, |
| "loss": 0.4306, |
| "step": 992 |
| }, |
| { |
| "epoch": 4.0329113924050635, |
| "grad_norm": 0.3833769762852071, |
| "learning_rate": 4.355790002134579e-06, |
| "loss": 0.4014, |
| "step": 993 |
| }, |
| { |
| "epoch": 4.0369620253164555, |
| "grad_norm": 0.26465634253817855, |
| "learning_rate": 4.320491585504207e-06, |
| "loss": 0.4076, |
| "step": 994 |
| }, |
| { |
| "epoch": 4.0410126582278485, |
| "grad_norm": 0.2387210183874982, |
| "learning_rate": 4.2853194494790615e-06, |
| "loss": 0.4202, |
| "step": 995 |
| }, |
| { |
| "epoch": 4.045063291139241, |
| "grad_norm": 0.320700137650539, |
| "learning_rate": 4.250273877330691e-06, |
| "loss": 0.4045, |
| "step": 996 |
| }, |
| { |
| "epoch": 4.049113924050633, |
| "grad_norm": 0.31978988712437006, |
| "learning_rate": 4.215355151311313e-06, |
| "loss": 0.4063, |
| "step": 997 |
| }, |
| { |
| "epoch": 4.053164556962026, |
| "grad_norm": 0.26364898827145433, |
| "learning_rate": 4.180563552651542e-06, |
| "loss": 0.4149, |
| "step": 998 |
| }, |
| { |
| "epoch": 4.057215189873418, |
| "grad_norm": 0.21776925807999903, |
| "learning_rate": 4.145899361558147e-06, |
| "loss": 0.3982, |
| "step": 999 |
| }, |
| { |
| "epoch": 4.06126582278481, |
| "grad_norm": 0.24636623195282276, |
| "learning_rate": 4.111362857211738e-06, |
| "loss": 0.4191, |
| "step": 1000 |
| }, |
| { |
| "epoch": 4.065316455696203, |
| "grad_norm": 0.29169332637141365, |
| "learning_rate": 4.076954317764592e-06, |
| "loss": 0.4003, |
| "step": 1001 |
| }, |
| { |
| "epoch": 4.069367088607595, |
| "grad_norm": 0.2510059674699072, |
| "learning_rate": 4.042674020338335e-06, |
| "loss": 0.4118, |
| "step": 1002 |
| }, |
| { |
| "epoch": 4.073417721518988, |
| "grad_norm": 0.2204207439831788, |
| "learning_rate": 4.0085222410217835e-06, |
| "loss": 0.4041, |
| "step": 1003 |
| }, |
| { |
| "epoch": 4.07746835443038, |
| "grad_norm": 0.24585067873094368, |
| "learning_rate": 3.974499254868674e-06, |
| "loss": 0.4044, |
| "step": 1004 |
| }, |
| { |
| "epoch": 4.081518987341772, |
| "grad_norm": 0.2397601962996456, |
| "learning_rate": 3.940605335895451e-06, |
| "loss": 0.4058, |
| "step": 1005 |
| }, |
| { |
| "epoch": 4.085569620253165, |
| "grad_norm": 0.2574102471181848, |
| "learning_rate": 3.90684075707908e-06, |
| "loss": 0.4087, |
| "step": 1006 |
| }, |
| { |
| "epoch": 4.089620253164557, |
| "grad_norm": 0.23599988408076225, |
| "learning_rate": 3.8732057903548505e-06, |
| "loss": 0.4087, |
| "step": 1007 |
| }, |
| { |
| "epoch": 4.093670886075949, |
| "grad_norm": 0.22091197010493976, |
| "learning_rate": 3.8397007066141375e-06, |
| "loss": 0.4242, |
| "step": 1008 |
| }, |
| { |
| "epoch": 4.097721518987342, |
| "grad_norm": 0.24898062662916065, |
| "learning_rate": 3.806325775702304e-06, |
| "loss": 0.401, |
| "step": 1009 |
| }, |
| { |
| "epoch": 4.101772151898734, |
| "grad_norm": 0.25378626946621113, |
| "learning_rate": 3.773081266416434e-06, |
| "loss": 0.3983, |
| "step": 1010 |
| }, |
| { |
| "epoch": 4.105822784810127, |
| "grad_norm": 0.22433504421496767, |
| "learning_rate": 3.739967446503245e-06, |
| "loss": 0.4146, |
| "step": 1011 |
| }, |
| { |
| "epoch": 4.109873417721519, |
| "grad_norm": 0.23713901751110952, |
| "learning_rate": 3.706984582656894e-06, |
| "loss": 0.4188, |
| "step": 1012 |
| }, |
| { |
| "epoch": 4.113924050632911, |
| "grad_norm": 0.21893544084768105, |
| "learning_rate": 3.6741329405168237e-06, |
| "loss": 0.4087, |
| "step": 1013 |
| }, |
| { |
| "epoch": 4.117974683544304, |
| "grad_norm": 0.23256870395332865, |
| "learning_rate": 3.641412784665648e-06, |
| "loss": 0.4044, |
| "step": 1014 |
| }, |
| { |
| "epoch": 4.122025316455696, |
| "grad_norm": 0.2387361205305584, |
| "learning_rate": 3.608824378627005e-06, |
| "loss": 0.4029, |
| "step": 1015 |
| }, |
| { |
| "epoch": 4.126075949367088, |
| "grad_norm": 0.21070985612404913, |
| "learning_rate": 3.5763679848634337e-06, |
| "loss": 0.3954, |
| "step": 1016 |
| }, |
| { |
| "epoch": 4.130126582278481, |
| "grad_norm": 0.2366806559314245, |
| "learning_rate": 3.544043864774269e-06, |
| "loss": 0.4169, |
| "step": 1017 |
| }, |
| { |
| "epoch": 4.134177215189873, |
| "grad_norm": 0.2342468242978536, |
| "learning_rate": 3.5118522786935282e-06, |
| "loss": 0.4002, |
| "step": 1018 |
| }, |
| { |
| "epoch": 4.138227848101266, |
| "grad_norm": 0.22948624376573448, |
| "learning_rate": 3.479793485887819e-06, |
| "loss": 0.4049, |
| "step": 1019 |
| }, |
| { |
| "epoch": 4.142278481012658, |
| "grad_norm": 0.23078589376497644, |
| "learning_rate": 3.4478677445542653e-06, |
| "loss": 0.4224, |
| "step": 1020 |
| }, |
| { |
| "epoch": 4.14632911392405, |
| "grad_norm": 0.21449286621947375, |
| "learning_rate": 3.4160753118183767e-06, |
| "loss": 0.4089, |
| "step": 1021 |
| }, |
| { |
| "epoch": 4.150379746835443, |
| "grad_norm": 0.2370912362019178, |
| "learning_rate": 3.3844164437320527e-06, |
| "loss": 0.4112, |
| "step": 1022 |
| }, |
| { |
| "epoch": 4.154430379746835, |
| "grad_norm": 0.21891111792515203, |
| "learning_rate": 3.3528913952714558e-06, |
| "loss": 0.3981, |
| "step": 1023 |
| }, |
| { |
| "epoch": 4.1584810126582274, |
| "grad_norm": 0.25925301081768826, |
| "learning_rate": 3.321500420335e-06, |
| "loss": 0.404, |
| "step": 1024 |
| }, |
| { |
| "epoch": 4.16253164556962, |
| "grad_norm": 0.21255797141231741, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.4153, |
| "step": 1025 |
| }, |
| { |
| "epoch": 4.1665822784810125, |
| "grad_norm": 0.21103839575315822, |
| "learning_rate": 3.2591217012270325e-06, |
| "loss": 0.4106, |
| "step": 1026 |
| }, |
| { |
| "epoch": 4.170632911392405, |
| "grad_norm": 0.20778501937979213, |
| "learning_rate": 3.228134459445149e-06, |
| "loss": 0.4051, |
| "step": 1027 |
| }, |
| { |
| "epoch": 4.1746835443037975, |
| "grad_norm": 0.2241552673557978, |
| "learning_rate": 3.1972822959626205e-06, |
| "loss": 0.3909, |
| "step": 1028 |
| }, |
| { |
| "epoch": 4.17873417721519, |
| "grad_norm": 0.20249382762460283, |
| "learning_rate": 3.166565459258513e-06, |
| "loss": 0.4073, |
| "step": 1029 |
| }, |
| { |
| "epoch": 4.1827848101265825, |
| "grad_norm": 0.2136935552622026, |
| "learning_rate": 3.1359841967220193e-06, |
| "loss": 0.3919, |
| "step": 1030 |
| }, |
| { |
| "epoch": 4.186835443037975, |
| "grad_norm": 0.21337940682604117, |
| "learning_rate": 3.105538754650419e-06, |
| "loss": 0.418, |
| "step": 1031 |
| }, |
| { |
| "epoch": 4.190886075949367, |
| "grad_norm": 0.22055835347878183, |
| "learning_rate": 3.07522937824712e-06, |
| "loss": 0.4108, |
| "step": 1032 |
| }, |
| { |
| "epoch": 4.19493670886076, |
| "grad_norm": 0.22758801487764477, |
| "learning_rate": 3.0450563116196697e-06, |
| "loss": 0.4055, |
| "step": 1033 |
| }, |
| { |
| "epoch": 4.198987341772152, |
| "grad_norm": 0.3624594180179317, |
| "learning_rate": 3.0150197977778008e-06, |
| "loss": 0.4158, |
| "step": 1034 |
| }, |
| { |
| "epoch": 4.203037974683545, |
| "grad_norm": 0.20871345253760487, |
| "learning_rate": 2.985120078631465e-06, |
| "loss": 0.4064, |
| "step": 1035 |
| }, |
| { |
| "epoch": 4.207088607594937, |
| "grad_norm": 0.20380199223923667, |
| "learning_rate": 2.9553573949888893e-06, |
| "loss": 0.427, |
| "step": 1036 |
| }, |
| { |
| "epoch": 4.211139240506329, |
| "grad_norm": 0.20952319637807804, |
| "learning_rate": 2.9257319865546384e-06, |
| "loss": 0.4112, |
| "step": 1037 |
| }, |
| { |
| "epoch": 4.215189873417722, |
| "grad_norm": 0.22130972622472667, |
| "learning_rate": 2.896244091927678e-06, |
| "loss": 0.4056, |
| "step": 1038 |
| }, |
| { |
| "epoch": 4.219240506329114, |
| "grad_norm": 0.21280740863593872, |
| "learning_rate": 2.8668939485994584e-06, |
| "loss": 0.3978, |
| "step": 1039 |
| }, |
| { |
| "epoch": 4.223291139240506, |
| "grad_norm": 0.21176582761576637, |
| "learning_rate": 2.837681792951994e-06, |
| "loss": 0.4001, |
| "step": 1040 |
| }, |
| { |
| "epoch": 4.227341772151899, |
| "grad_norm": 0.2150208972132154, |
| "learning_rate": 2.808607860255981e-06, |
| "loss": 0.4172, |
| "step": 1041 |
| }, |
| { |
| "epoch": 4.231392405063291, |
| "grad_norm": 0.2368063542275589, |
| "learning_rate": 2.7796723846688634e-06, |
| "loss": 0.4067, |
| "step": 1042 |
| }, |
| { |
| "epoch": 4.235443037974684, |
| "grad_norm": 0.21460494789803117, |
| "learning_rate": 2.7508755992329937e-06, |
| "loss": 0.405, |
| "step": 1043 |
| }, |
| { |
| "epoch": 4.239493670886076, |
| "grad_norm": 0.20967153926382215, |
| "learning_rate": 2.722217735873718e-06, |
| "loss": 0.4226, |
| "step": 1044 |
| }, |
| { |
| "epoch": 4.243544303797468, |
| "grad_norm": 0.21900332027450736, |
| "learning_rate": 2.6936990253975315e-06, |
| "loss": 0.3921, |
| "step": 1045 |
| }, |
| { |
| "epoch": 4.247594936708861, |
| "grad_norm": 0.23239371135777678, |
| "learning_rate": 2.665319697490205e-06, |
| "loss": 0.407, |
| "step": 1046 |
| }, |
| { |
| "epoch": 4.251645569620253, |
| "grad_norm": 0.20160247221396504, |
| "learning_rate": 2.637079980714945e-06, |
| "loss": 0.419, |
| "step": 1047 |
| }, |
| { |
| "epoch": 4.255696202531645, |
| "grad_norm": 0.21141178170434677, |
| "learning_rate": 2.6089801025105453e-06, |
| "loss": 0.4077, |
| "step": 1048 |
| }, |
| { |
| "epoch": 4.259746835443038, |
| "grad_norm": 0.21723079138319984, |
| "learning_rate": 2.581020289189571e-06, |
| "loss": 0.4064, |
| "step": 1049 |
| }, |
| { |
| "epoch": 4.26379746835443, |
| "grad_norm": 0.20257416501196068, |
| "learning_rate": 2.553200765936501e-06, |
| "loss": 0.4044, |
| "step": 1050 |
| }, |
| { |
| "epoch": 4.267848101265823, |
| "grad_norm": 0.20541315035589677, |
| "learning_rate": 2.525521756805962e-06, |
| "loss": 0.3908, |
| "step": 1051 |
| }, |
| { |
| "epoch": 4.271898734177215, |
| "grad_norm": 0.20184588804459785, |
| "learning_rate": 2.497983484720885e-06, |
| "loss": 0.4093, |
| "step": 1052 |
| }, |
| { |
| "epoch": 4.275949367088607, |
| "grad_norm": 0.21645415733723788, |
| "learning_rate": 2.470586171470728e-06, |
| "loss": 0.3987, |
| "step": 1053 |
| }, |
| { |
| "epoch": 4.28, |
| "grad_norm": 0.20496248603072353, |
| "learning_rate": 2.4433300377096836e-06, |
| "loss": 0.4134, |
| "step": 1054 |
| }, |
| { |
| "epoch": 4.284050632911392, |
| "grad_norm": 0.1925908208032047, |
| "learning_rate": 2.4162153029549073e-06, |
| "loss": 0.4139, |
| "step": 1055 |
| }, |
| { |
| "epoch": 4.288101265822785, |
| "grad_norm": 0.20483259191127556, |
| "learning_rate": 2.3892421855847458e-06, |
| "loss": 0.3866, |
| "step": 1056 |
| }, |
| { |
| "epoch": 4.292151898734177, |
| "grad_norm": 0.21024021394313674, |
| "learning_rate": 2.362410902836978e-06, |
| "loss": 0.3968, |
| "step": 1057 |
| }, |
| { |
| "epoch": 4.296202531645569, |
| "grad_norm": 0.2052231066564629, |
| "learning_rate": 2.3357216708070653e-06, |
| "loss": 0.4043, |
| "step": 1058 |
| }, |
| { |
| "epoch": 4.300253164556962, |
| "grad_norm": 0.21341223330107098, |
| "learning_rate": 2.309174704446411e-06, |
| "loss": 0.3869, |
| "step": 1059 |
| }, |
| { |
| "epoch": 4.3043037974683545, |
| "grad_norm": 0.21548818863830582, |
| "learning_rate": 2.2827702175606437e-06, |
| "loss": 0.4197, |
| "step": 1060 |
| }, |
| { |
| "epoch": 4.3083544303797465, |
| "grad_norm": 0.20269856952825668, |
| "learning_rate": 2.256508422807855e-06, |
| "loss": 0.3997, |
| "step": 1061 |
| }, |
| { |
| "epoch": 4.3124050632911395, |
| "grad_norm": 0.2004000202760375, |
| "learning_rate": 2.230389531696946e-06, |
| "loss": 0.4104, |
| "step": 1062 |
| }, |
| { |
| "epoch": 4.3164556962025316, |
| "grad_norm": 0.18937084470328602, |
| "learning_rate": 2.204413754585857e-06, |
| "loss": 0.4134, |
| "step": 1063 |
| }, |
| { |
| "epoch": 4.320506329113924, |
| "grad_norm": 0.19864639441663118, |
| "learning_rate": 2.1785813006799406e-06, |
| "loss": 0.4085, |
| "step": 1064 |
| }, |
| { |
| "epoch": 4.324556962025317, |
| "grad_norm": 0.20983440818341706, |
| "learning_rate": 2.1528923780302224e-06, |
| "loss": 0.4052, |
| "step": 1065 |
| }, |
| { |
| "epoch": 4.328607594936709, |
| "grad_norm": 0.2046796994952921, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.3998, |
| "step": 1066 |
| }, |
| { |
| "epoch": 4.332658227848102, |
| "grad_norm": 0.1949948575565398, |
| "learning_rate": 2.101945952921942e-06, |
| "loss": 0.4067, |
| "step": 1067 |
| }, |
| { |
| "epoch": 4.336708860759494, |
| "grad_norm": 0.1958311443069672, |
| "learning_rate": 2.0766888607788906e-06, |
| "loss": 0.4175, |
| "step": 1068 |
| }, |
| { |
| "epoch": 4.340759493670886, |
| "grad_norm": 0.21302169795783985, |
| "learning_rate": 2.0515761205197337e-06, |
| "loss": 0.4197, |
| "step": 1069 |
| }, |
| { |
| "epoch": 4.344810126582279, |
| "grad_norm": 0.20879585758156688, |
| "learning_rate": 2.0266079343990453e-06, |
| "loss": 0.406, |
| "step": 1070 |
| }, |
| { |
| "epoch": 4.348860759493671, |
| "grad_norm": 0.20273942434392045, |
| "learning_rate": 2.0017845035071494e-06, |
| "loss": 0.4166, |
| "step": 1071 |
| }, |
| { |
| "epoch": 4.352911392405064, |
| "grad_norm": 0.1986512450684521, |
| "learning_rate": 1.9771060277685537e-06, |
| "loss": 0.4037, |
| "step": 1072 |
| }, |
| { |
| "epoch": 4.356962025316456, |
| "grad_norm": 0.19028994142065184, |
| "learning_rate": 1.95257270594031e-06, |
| "loss": 0.4215, |
| "step": 1073 |
| }, |
| { |
| "epoch": 4.361012658227848, |
| "grad_norm": 0.19673314520536642, |
| "learning_rate": 1.9281847356104188e-06, |
| "loss": 0.4059, |
| "step": 1074 |
| }, |
| { |
| "epoch": 4.365063291139241, |
| "grad_norm": 0.1996437089826445, |
| "learning_rate": 1.9039423131962365e-06, |
| "loss": 0.4079, |
| "step": 1075 |
| }, |
| { |
| "epoch": 4.369113924050633, |
| "grad_norm": 0.19868587592022566, |
| "learning_rate": 1.8798456339429027e-06, |
| "loss": 0.4066, |
| "step": 1076 |
| }, |
| { |
| "epoch": 4.373164556962025, |
| "grad_norm": 0.19843659089121346, |
| "learning_rate": 1.8558948919217612e-06, |
| "loss": 0.4201, |
| "step": 1077 |
| }, |
| { |
| "epoch": 4.377215189873418, |
| "grad_norm": 0.20075972462808211, |
| "learning_rate": 1.8320902800287954e-06, |
| "loss": 0.4032, |
| "step": 1078 |
| }, |
| { |
| "epoch": 4.38126582278481, |
| "grad_norm": 0.2043812950964407, |
| "learning_rate": 1.8084319899830726e-06, |
| "loss": 0.3985, |
| "step": 1079 |
| }, |
| { |
| "epoch": 4.385316455696202, |
| "grad_norm": 0.20164998196815076, |
| "learning_rate": 1.7849202123252097e-06, |
| "loss": 0.3974, |
| "step": 1080 |
| }, |
| { |
| "epoch": 4.389367088607595, |
| "grad_norm": 0.20036682958281063, |
| "learning_rate": 1.7615551364158401e-06, |
| "loss": 0.3995, |
| "step": 1081 |
| }, |
| { |
| "epoch": 4.393417721518987, |
| "grad_norm": 0.19781502592390007, |
| "learning_rate": 1.738336950434061e-06, |
| "loss": 0.4272, |
| "step": 1082 |
| }, |
| { |
| "epoch": 4.39746835443038, |
| "grad_norm": 0.19731198073507403, |
| "learning_rate": 1.715265841375957e-06, |
| "loss": 0.4047, |
| "step": 1083 |
| }, |
| { |
| "epoch": 4.401518987341772, |
| "grad_norm": 0.21095595427030997, |
| "learning_rate": 1.6923419950530684e-06, |
| "loss": 0.4016, |
| "step": 1084 |
| }, |
| { |
| "epoch": 4.405569620253164, |
| "grad_norm": 0.21177090636246354, |
| "learning_rate": 1.6695655960909008e-06, |
| "loss": 0.4071, |
| "step": 1085 |
| }, |
| { |
| "epoch": 4.409620253164557, |
| "grad_norm": 0.19368885640026698, |
| "learning_rate": 1.646936827927441e-06, |
| "loss": 0.4098, |
| "step": 1086 |
| }, |
| { |
| "epoch": 4.413670886075949, |
| "grad_norm": 0.19719948804618387, |
| "learning_rate": 1.6244558728116766e-06, |
| "loss": 0.4033, |
| "step": 1087 |
| }, |
| { |
| "epoch": 4.417721518987342, |
| "grad_norm": 0.20065414409516802, |
| "learning_rate": 1.6021229118021265e-06, |
| "loss": 0.4003, |
| "step": 1088 |
| }, |
| { |
| "epoch": 4.421772151898734, |
| "grad_norm": 0.31253618278740614, |
| "learning_rate": 1.5799381247653967e-06, |
| "loss": 0.3849, |
| "step": 1089 |
| }, |
| { |
| "epoch": 4.425822784810126, |
| "grad_norm": 0.19842650687770316, |
| "learning_rate": 1.5579016903747013e-06, |
| "loss": 0.397, |
| "step": 1090 |
| }, |
| { |
| "epoch": 4.429873417721519, |
| "grad_norm": 0.20287128683094288, |
| "learning_rate": 1.5360137861084656e-06, |
| "loss": 0.4035, |
| "step": 1091 |
| }, |
| { |
| "epoch": 4.433924050632911, |
| "grad_norm": 0.2061143611217459, |
| "learning_rate": 1.5142745882488475e-06, |
| "loss": 0.4356, |
| "step": 1092 |
| }, |
| { |
| "epoch": 4.4379746835443035, |
| "grad_norm": 0.1933633790537764, |
| "learning_rate": 1.4926842718803691e-06, |
| "loss": 0.4243, |
| "step": 1093 |
| }, |
| { |
| "epoch": 4.442025316455696, |
| "grad_norm": 0.19650481841408374, |
| "learning_rate": 1.4712430108884657e-06, |
| "loss": 0.4198, |
| "step": 1094 |
| }, |
| { |
| "epoch": 4.4460759493670885, |
| "grad_norm": 0.19901968265054434, |
| "learning_rate": 1.4499509779581078e-06, |
| "loss": 0.4062, |
| "step": 1095 |
| }, |
| { |
| "epoch": 4.450126582278481, |
| "grad_norm": 0.19929823776817737, |
| "learning_rate": 1.4288083445723988e-06, |
| "loss": 0.4102, |
| "step": 1096 |
| }, |
| { |
| "epoch": 4.4541772151898735, |
| "grad_norm": 0.20789276606331528, |
| "learning_rate": 1.4078152810112045e-06, |
| "loss": 0.3947, |
| "step": 1097 |
| }, |
| { |
| "epoch": 4.458227848101266, |
| "grad_norm": 0.19640310783438553, |
| "learning_rate": 1.3869719563497697e-06, |
| "loss": 0.4081, |
| "step": 1098 |
| }, |
| { |
| "epoch": 4.462278481012659, |
| "grad_norm": 0.20353642714714354, |
| "learning_rate": 1.3662785384573663e-06, |
| "loss": 0.407, |
| "step": 1099 |
| }, |
| { |
| "epoch": 4.466329113924051, |
| "grad_norm": 0.1936980994774482, |
| "learning_rate": 1.3457351939959383e-06, |
| "loss": 0.4243, |
| "step": 1100 |
| }, |
| { |
| "epoch": 4.470379746835443, |
| "grad_norm": 0.2013438080027856, |
| "learning_rate": 1.3253420884187551e-06, |
| "loss": 0.4043, |
| "step": 1101 |
| }, |
| { |
| "epoch": 4.474430379746836, |
| "grad_norm": 0.19741984089922573, |
| "learning_rate": 1.3050993859690953e-06, |
| "loss": 0.3979, |
| "step": 1102 |
| }, |
| { |
| "epoch": 4.478481012658228, |
| "grad_norm": 0.21278735025311674, |
| "learning_rate": 1.2850072496788869e-06, |
| "loss": 0.4029, |
| "step": 1103 |
| }, |
| { |
| "epoch": 4.482531645569621, |
| "grad_norm": 0.19600032037701556, |
| "learning_rate": 1.2650658413674434e-06, |
| "loss": 0.4159, |
| "step": 1104 |
| }, |
| { |
| "epoch": 4.486582278481013, |
| "grad_norm": 0.19048065693642574, |
| "learning_rate": 1.2452753216401226e-06, |
| "loss": 0.3994, |
| "step": 1105 |
| }, |
| { |
| "epoch": 4.490632911392405, |
| "grad_norm": 0.19449058625454602, |
| "learning_rate": 1.2256358498870503e-06, |
| "loss": 0.4177, |
| "step": 1106 |
| }, |
| { |
| "epoch": 4.494683544303798, |
| "grad_norm": 0.19707018061437773, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.4069, |
| "step": 1107 |
| }, |
| { |
| "epoch": 4.49873417721519, |
| "grad_norm": 0.2018138565282781, |
| "learning_rate": 1.1868106817802816e-06, |
| "loss": 0.426, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.502784810126582, |
| "grad_norm": 0.19943849060186936, |
| "learning_rate": 1.1676252981191482e-06, |
| "loss": 0.397, |
| "step": 1109 |
| }, |
| { |
| "epoch": 4.506835443037975, |
| "grad_norm": 0.19487864594432122, |
| "learning_rate": 1.1485915878148823e-06, |
| "loss": 0.4166, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.510886075949367, |
| "grad_norm": 0.19441819871752783, |
| "learning_rate": 1.1297097041623584e-06, |
| "loss": 0.4132, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.514936708860759, |
| "grad_norm": 0.19442294270755198, |
| "learning_rate": 1.1109797992336847e-06, |
| "loss": 0.4133, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.518987341772152, |
| "grad_norm": 0.1917967537745069, |
| "learning_rate": 1.092402023876933e-06, |
| "loss": 0.3969, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.523037974683544, |
| "grad_norm": 0.2013911719872365, |
| "learning_rate": 1.0739765277149527e-06, |
| "loss": 0.4081, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.527088607594937, |
| "grad_norm": 0.19131336543516486, |
| "learning_rate": 1.0557034591441596e-06, |
| "loss": 0.427, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.531139240506329, |
| "grad_norm": 0.19685136376219184, |
| "learning_rate": 1.0375829653333324e-06, |
| "loss": 0.4226, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.535189873417721, |
| "grad_norm": 0.19389834390549243, |
| "learning_rate": 1.0196151922224385e-06, |
| "loss": 0.4123, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.539240506329114, |
| "grad_norm": 0.19699226662396452, |
| "learning_rate": 1.0018002845214526e-06, |
| "loss": 0.413, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.543291139240506, |
| "grad_norm": 0.19782175385497514, |
| "learning_rate": 9.841383857091947e-07, |
| "loss": 0.4168, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.547341772151899, |
| "grad_norm": 0.2058143940421534, |
| "learning_rate": 9.666296380321616e-07, |
| "loss": 0.4011, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.551392405063291, |
| "grad_norm": 0.1892912621106266, |
| "learning_rate": 9.492741825034124e-07, |
| "loss": 0.432, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.555443037974683, |
| "grad_norm": 0.1911324355140839, |
| "learning_rate": 9.320721589013892e-07, |
| "loss": 0.4059, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.559493670886076, |
| "grad_norm": 0.20546617819948967, |
| "learning_rate": 9.150237057688339e-07, |
| "loss": 0.4041, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.563544303797468, |
| "grad_norm": 0.2379317276672048, |
| "learning_rate": 8.981289604116328e-07, |
| "loss": 0.4072, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.56759493670886, |
| "grad_norm": 0.19436980815844065, |
| "learning_rate": 8.813880588977542e-07, |
| "loss": 0.4214, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.571645569620253, |
| "grad_norm": 0.1909807429183004, |
| "learning_rate": 8.648011360561126e-07, |
| "loss": 0.3931, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.575696202531645, |
| "grad_norm": 0.19543970630120377, |
| "learning_rate": 8.483683254755037e-07, |
| "loss": 0.4157, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.5797468354430375, |
| "grad_norm": 0.19621320682917884, |
| "learning_rate": 8.320897595035227e-07, |
| "loss": 0.4219, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.5837974683544305, |
| "grad_norm": 0.19848108621617705, |
| "learning_rate": 8.159655692455093e-07, |
| "loss": 0.403, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.5878481012658225, |
| "grad_norm": 0.268144410225894, |
| "learning_rate": 7.999958845634648e-07, |
| "loss": 0.4049, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.5918987341772155, |
| "grad_norm": 0.19915661509101446, |
| "learning_rate": 7.841808340750478e-07, |
| "loss": 0.4026, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.595949367088608, |
| "grad_norm": 0.19005772015544425, |
| "learning_rate": 7.685205451524869e-07, |
| "loss": 0.3966, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.6, |
| "grad_norm": 0.19663846136475643, |
| "learning_rate": 7.530151439216027e-07, |
| "loss": 0.4043, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.604050632911393, |
| "grad_norm": 0.1926782073253448, |
| "learning_rate": 7.376647552607675e-07, |
| "loss": 0.3922, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.608101265822785, |
| "grad_norm": 0.20224089494094388, |
| "learning_rate": 7.224695027998963e-07, |
| "loss": 0.4094, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.612151898734178, |
| "grad_norm": 0.2009618790535245, |
| "learning_rate": 7.07429508919466e-07, |
| "loss": 0.4145, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.61620253164557, |
| "grad_norm": 0.1917329577765777, |
| "learning_rate": 6.925448947495206e-07, |
| "loss": 0.4056, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.620253164556962, |
| "grad_norm": 0.1892481972939858, |
| "learning_rate": 6.778157801686936e-07, |
| "loss": 0.4068, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.624303797468355, |
| "grad_norm": 0.20532372251931647, |
| "learning_rate": 6.632422838032515e-07, |
| "loss": 0.3981, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.628354430379747, |
| "grad_norm": 0.1948336164757943, |
| "learning_rate": 6.488245230261281e-07, |
| "loss": 0.4019, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.632405063291139, |
| "grad_norm": 0.20192826013147716, |
| "learning_rate": 6.345626139559868e-07, |
| "loss": 0.3984, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.636455696202532, |
| "grad_norm": 0.18592417943669104, |
| "learning_rate": 6.204566714562866e-07, |
| "loss": 0.4224, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.640506329113924, |
| "grad_norm": 0.20037193323217048, |
| "learning_rate": 6.06506809134344e-07, |
| "loss": 0.3905, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.644556962025316, |
| "grad_norm": 0.1942735124759689, |
| "learning_rate": 5.927131393404373e-07, |
| "loss": 0.407, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.648607594936709, |
| "grad_norm": 0.19661159184744803, |
| "learning_rate": 5.790757731668817e-07, |
| "loss": 0.3975, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.652658227848101, |
| "grad_norm": 0.1955082344899654, |
| "learning_rate": 5.655948204471507e-07, |
| "loss": 0.4126, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.656708860759494, |
| "grad_norm": 0.2165645847003483, |
| "learning_rate": 5.522703897549875e-07, |
| "loss": 0.4162, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.660759493670886, |
| "grad_norm": 0.19152749637056807, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.4236, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.664810126582278, |
| "grad_norm": 0.1908724332393585, |
| "learning_rate": 5.260915224444207e-07, |
| "loss": 0.4166, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.668860759493671, |
| "grad_norm": 0.19387659183449066, |
| "learning_rate": 5.132372966670129e-07, |
| "loss": 0.4175, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.672911392405063, |
| "grad_norm": 0.19478543740739343, |
| "learning_rate": 5.005400145974704e-07, |
| "loss": 0.4034, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.676962025316456, |
| "grad_norm": 0.19726977762514972, |
| "learning_rate": 4.879997784979562e-07, |
| "loss": 0.3864, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.681012658227848, |
| "grad_norm": 0.1987206671686503, |
| "learning_rate": 4.7561668936580984e-07, |
| "loss": 0.3998, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.68506329113924, |
| "grad_norm": 0.19712011652284303, |
| "learning_rate": 4.6339084693272306e-07, |
| "loss": 0.3969, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.689113924050633, |
| "grad_norm": 0.2127136667685158, |
| "learning_rate": 4.5132234966395847e-07, |
| "loss": 0.3848, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.693164556962025, |
| "grad_norm": 0.199708408412489, |
| "learning_rate": 4.3941129475752795e-07, |
| "loss": 0.4074, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.697215189873417, |
| "grad_norm": 0.19985563863453643, |
| "learning_rate": 4.27657778143431e-07, |
| "loss": 0.4101, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.70126582278481, |
| "grad_norm": 0.1936775807336959, |
| "learning_rate": 4.1606189448287757e-07, |
| "loss": 0.4163, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.705316455696202, |
| "grad_norm": 0.18804738712775498, |
| "learning_rate": 4.046237371675177e-07, |
| "loss": 0.4157, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.7093670886075945, |
| "grad_norm": 0.18953256319310097, |
| "learning_rate": 3.9334339831869963e-07, |
| "loss": 0.4052, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.713417721518987, |
| "grad_norm": 0.20678604856908117, |
| "learning_rate": 3.8222096878671955e-07, |
| "loss": 0.3979, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.7174683544303795, |
| "grad_norm": 0.19405685388328026, |
| "learning_rate": 3.7125653815009545e-07, |
| "loss": 0.409, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.7215189873417724, |
| "grad_norm": 0.20433228419384958, |
| "learning_rate": 3.6045019471484974e-07, |
| "loss": 0.4156, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.7255696202531645, |
| "grad_norm": 0.1971794748286958, |
| "learning_rate": 3.498020255137813e-07, |
| "loss": 0.4193, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.729620253164557, |
| "grad_norm": 0.1968983370573593, |
| "learning_rate": 3.393121163057811e-07, |
| "loss": 0.4008, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.7336708860759495, |
| "grad_norm": 0.20170768001612216, |
| "learning_rate": 3.289805515751399e-07, |
| "loss": 0.4093, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.737721518987342, |
| "grad_norm": 0.1993102731656133, |
| "learning_rate": 3.188074145308573e-07, |
| "loss": 0.4027, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.741772151898735, |
| "grad_norm": 0.19829749231068863, |
| "learning_rate": 3.087927871059804e-07, |
| "loss": 0.4069, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.745822784810127, |
| "grad_norm": 0.18917158830349837, |
| "learning_rate": 2.989367499569418e-07, |
| "loss": 0.4111, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.749873417721519, |
| "grad_norm": 0.1938693435821872, |
| "learning_rate": 2.8923938246290917e-07, |
| "loss": 0.4136, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.753924050632912, |
| "grad_norm": 0.1968048922502675, |
| "learning_rate": 2.7970076272514804e-07, |
| "loss": 0.4129, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.757974683544304, |
| "grad_norm": 0.1922001043529497, |
| "learning_rate": 2.703209675663887e-07, |
| "loss": 0.4177, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.762025316455696, |
| "grad_norm": 0.19570487848771065, |
| "learning_rate": 2.6110007253021374e-07, |
| "loss": 0.4122, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.766075949367089, |
| "grad_norm": 0.1969134259230168, |
| "learning_rate": 2.520381518804471e-07, |
| "loss": 0.3921, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.770126582278481, |
| "grad_norm": 0.19102248268397867, |
| "learning_rate": 2.4313527860054585e-07, |
| "loss": 0.4154, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.774177215189873, |
| "grad_norm": 0.2067022765139133, |
| "learning_rate": 2.343915243930317e-07, |
| "loss": 0.3966, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.778227848101266, |
| "grad_norm": 0.19471559086327075, |
| "learning_rate": 2.2580695967889367e-07, |
| "loss": 0.4008, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.782278481012658, |
| "grad_norm": 0.19398652633304167, |
| "learning_rate": 2.1738165359704189e-07, |
| "loss": 0.3979, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.786329113924051, |
| "grad_norm": 0.19522359290894015, |
| "learning_rate": 2.0911567400373257e-07, |
| "loss": 0.4077, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.790379746835443, |
| "grad_norm": 0.189950203243214, |
| "learning_rate": 2.0100908747202607e-07, |
| "loss": 0.4152, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.794430379746835, |
| "grad_norm": 0.19324423312288133, |
| "learning_rate": 1.9306195929125638e-07, |
| "loss": 0.408, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.798481012658228, |
| "grad_norm": 0.19478245809441858, |
| "learning_rate": 1.8527435346650247e-07, |
| "loss": 0.4047, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.80253164556962, |
| "grad_norm": 0.19130517220825186, |
| "learning_rate": 1.7764633271807108e-07, |
| "loss": 0.4173, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.806582278481013, |
| "grad_norm": 0.18678710119920797, |
| "learning_rate": 1.7017795848099262e-07, |
| "loss": 0.4121, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.810632911392405, |
| "grad_norm": 0.19784703077808952, |
| "learning_rate": 1.6286929090452596e-07, |
| "loss": 0.3998, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.814683544303797, |
| "grad_norm": 0.19590286701770654, |
| "learning_rate": 1.557203888516745e-07, |
| "loss": 0.4198, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.81873417721519, |
| "grad_norm": 0.21039256128858974, |
| "learning_rate": 1.487313098987131e-07, |
| "loss": 0.4225, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.822784810126582, |
| "grad_norm": 0.19639123037185469, |
| "learning_rate": 1.4190211033472402e-07, |
| "loss": 0.4146, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.826835443037975, |
| "grad_norm": 0.20134686389534417, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.4111, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.830886075949367, |
| "grad_norm": 0.19643064749663203, |
| "learning_rate": 1.2872356809130682e-07, |
| "loss": 0.409, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.834936708860759, |
| "grad_norm": 0.19019150007159574, |
| "learning_rate": 1.2237433155004807e-07, |
| "loss": 0.4018, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.838987341772152, |
| "grad_norm": 0.19554607621213563, |
| "learning_rate": 1.1618518667323886e-07, |
| "loss": 0.4036, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.843037974683544, |
| "grad_norm": 0.2025314340224753, |
| "learning_rate": 1.1015618330740385e-07, |
| "loss": 0.4102, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.847088607594936, |
| "grad_norm": 0.1924195773776015, |
| "learning_rate": 1.042873700093061e-07, |
| "loss": 0.408, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.851139240506329, |
| "grad_norm": 0.19321977931224246, |
| "learning_rate": 9.857879404556291e-08, |
| "loss": 0.406, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.8551898734177215, |
| "grad_norm": 0.1935975000057012, |
| "learning_rate": 9.303050139225722e-08, |
| "loss": 0.3981, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.8592405063291135, |
| "grad_norm": 0.19912951851897948, |
| "learning_rate": 8.76425367345779e-08, |
| "loss": 0.3933, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.8632911392405065, |
| "grad_norm": 0.1910697932318618, |
| "learning_rate": 8.241494346644897e-08, |
| "loss": 0.4247, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.867341772151899, |
| "grad_norm": 0.20903917111655343, |
| "learning_rate": 7.734776369019204e-08, |
| "loss": 0.3964, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.8713924050632915, |
| "grad_norm": 0.1956168149430498, |
| "learning_rate": 7.244103821617332e-08, |
| "loss": 0.4085, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.875443037974684, |
| "grad_norm": 0.20056352395985239, |
| "learning_rate": 6.769480656248606e-08, |
| "loss": 0.4087, |
| "step": 1201 |
| }, |
| { |
| "epoch": 4.879493670886076, |
| "grad_norm": 0.19359013776617792, |
| "learning_rate": 6.310910695462635e-08, |
| "loss": 0.4145, |
| "step": 1202 |
| }, |
| { |
| "epoch": 4.883544303797469, |
| "grad_norm": 0.1963318238376584, |
| "learning_rate": 5.8683976325191185e-08, |
| "loss": 0.4019, |
| "step": 1203 |
| }, |
| { |
| "epoch": 4.887594936708861, |
| "grad_norm": 0.20250618720140826, |
| "learning_rate": 5.4419450313571984e-08, |
| "loss": 0.3965, |
| "step": 1204 |
| }, |
| { |
| "epoch": 4.891645569620254, |
| "grad_norm": 0.1965590931767785, |
| "learning_rate": 5.031556326567488e-08, |
| "loss": 0.3908, |
| "step": 1205 |
| }, |
| { |
| "epoch": 4.895696202531646, |
| "grad_norm": 0.19970576600769369, |
| "learning_rate": 4.637234823364312e-08, |
| "loss": 0.4085, |
| "step": 1206 |
| }, |
| { |
| "epoch": 4.899746835443038, |
| "grad_norm": 1.092825416188952, |
| "learning_rate": 4.258983697558838e-08, |
| "loss": 0.4312, |
| "step": 1207 |
| }, |
| { |
| "epoch": 4.903797468354431, |
| "grad_norm": 0.19376106812055138, |
| "learning_rate": 3.896805995533548e-08, |
| "loss": 0.3973, |
| "step": 1208 |
| }, |
| { |
| "epoch": 4.907848101265823, |
| "grad_norm": 0.18920875413162927, |
| "learning_rate": 3.550704634218028e-08, |
| "loss": 0.4112, |
| "step": 1209 |
| }, |
| { |
| "epoch": 4.911898734177215, |
| "grad_norm": 0.19662436039025016, |
| "learning_rate": 3.2206824010647676e-08, |
| "loss": 0.4194, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.915949367088608, |
| "grad_norm": 0.19645951110220322, |
| "learning_rate": 2.9067419540278476e-08, |
| "loss": 0.3887, |
| "step": 1211 |
| }, |
| { |
| "epoch": 4.92, |
| "grad_norm": 0.19727738998156405, |
| "learning_rate": 2.6088858215400638e-08, |
| "loss": 0.4116, |
| "step": 1212 |
| }, |
| { |
| "epoch": 4.924050632911392, |
| "grad_norm": 0.19089159131580866, |
| "learning_rate": 2.3271164024940564e-08, |
| "loss": 0.4201, |
| "step": 1213 |
| }, |
| { |
| "epoch": 4.928101265822785, |
| "grad_norm": 0.1822956248629886, |
| "learning_rate": 2.061435966221881e-08, |
| "loss": 0.4066, |
| "step": 1214 |
| }, |
| { |
| "epoch": 4.932151898734177, |
| "grad_norm": 0.19270793327685123, |
| "learning_rate": 1.811846652477245e-08, |
| "loss": 0.4116, |
| "step": 1215 |
| }, |
| { |
| "epoch": 4.93620253164557, |
| "grad_norm": 0.19687328239892615, |
| "learning_rate": 1.5783504714184106e-08, |
| "loss": 0.4079, |
| "step": 1216 |
| }, |
| { |
| "epoch": 4.940253164556962, |
| "grad_norm": 0.19527011814408435, |
| "learning_rate": 1.360949303591097e-08, |
| "loss": 0.3969, |
| "step": 1217 |
| }, |
| { |
| "epoch": 4.944303797468354, |
| "grad_norm": 0.19336700871817514, |
| "learning_rate": 1.1596448999144916e-08, |
| "loss": 0.3963, |
| "step": 1218 |
| }, |
| { |
| "epoch": 4.948354430379747, |
| "grad_norm": 0.19640490017571324, |
| "learning_rate": 9.744388816668172e-09, |
| "loss": 0.4053, |
| "step": 1219 |
| }, |
| { |
| "epoch": 4.952405063291139, |
| "grad_norm": 0.19836171357971175, |
| "learning_rate": 8.05332740472009e-09, |
| "loss": 0.4083, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.956455696202532, |
| "grad_norm": 0.19325805439099555, |
| "learning_rate": 6.523278382872811e-09, |
| "loss": 0.4175, |
| "step": 1221 |
| }, |
| { |
| "epoch": 4.960506329113924, |
| "grad_norm": 0.19477291004744382, |
| "learning_rate": 5.15425407393133e-09, |
| "loss": 0.4032, |
| "step": 1222 |
| }, |
| { |
| "epoch": 4.964556962025316, |
| "grad_norm": 0.1886607271551426, |
| "learning_rate": 3.94626550383137e-09, |
| "loss": 0.403, |
| "step": 1223 |
| }, |
| { |
| "epoch": 4.968607594936709, |
| "grad_norm": 0.1905650260583666, |
| "learning_rate": 2.899322401546112e-09, |
| "loss": 0.4032, |
| "step": 1224 |
| }, |
| { |
| "epoch": 4.972658227848101, |
| "grad_norm": 0.19674728155365995, |
| "learning_rate": 2.013433199010706e-09, |
| "loss": 0.4214, |
| "step": 1225 |
| }, |
| { |
| "epoch": 4.976708860759493, |
| "grad_norm": 0.19091742819070942, |
| "learning_rate": 1.2886050310556563e-09, |
| "loss": 0.4065, |
| "step": 1226 |
| }, |
| { |
| "epoch": 4.980759493670886, |
| "grad_norm": 0.18837404134781832, |
| "learning_rate": 7.248437353468695e-10, |
| "loss": 0.3883, |
| "step": 1227 |
| }, |
| { |
| "epoch": 4.984810126582278, |
| "grad_norm": 0.18964811102574436, |
| "learning_rate": 3.221538523412449e-10, |
| "loss": 0.4137, |
| "step": 1228 |
| }, |
| { |
| "epoch": 4.9888607594936705, |
| "grad_norm": 0.18970199994451659, |
| "learning_rate": 8.053862524670663e-11, |
| "loss": 0.4111, |
| "step": 1229 |
| }, |
| { |
| "epoch": 4.992911392405063, |
| "grad_norm": 0.19440843930659504, |
| "learning_rate": 0.0, |
| "loss": 0.3859, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.992911392405063, |
| "step": 1230, |
| "total_flos": 6.085998278900974e+18, |
| "train_loss": 0.17071973751715527, |
| "train_runtime": 22522.1157, |
| "train_samples_per_second": 7.015, |
| "train_steps_per_second": 0.055 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1230, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.085998278900974e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|