| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.988354430379747, |
| "eval_steps": 500, |
| "global_step": 1230, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004050632911392405, |
| "grad_norm": 7.190901404481192, |
| "learning_rate": 3.2520325203252037e-07, |
| "loss": 1.1692, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00810126582278481, |
| "grad_norm": 7.051225638402173, |
| "learning_rate": 6.504065040650407e-07, |
| "loss": 1.1442, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012151898734177215, |
| "grad_norm": 6.959607679118453, |
| "learning_rate": 9.75609756097561e-07, |
| "loss": 1.1135, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01620253164556962, |
| "grad_norm": 6.692895434364896, |
| "learning_rate": 1.3008130081300815e-06, |
| "loss": 1.1069, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.020253164556962026, |
| "grad_norm": 6.397078554675219, |
| "learning_rate": 1.6260162601626018e-06, |
| "loss": 1.1049, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02430379746835443, |
| "grad_norm": 6.574571057049284, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 1.1323, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028354430379746835, |
| "grad_norm": 5.020295253441259, |
| "learning_rate": 2.2764227642276426e-06, |
| "loss": 1.0449, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03240506329113924, |
| "grad_norm": 4.6177388981952525, |
| "learning_rate": 2.601626016260163e-06, |
| "loss": 1.0436, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03645569620253165, |
| "grad_norm": 2.499832899413368, |
| "learning_rate": 2.926829268292683e-06, |
| "loss": 0.9866, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04050632911392405, |
| "grad_norm": 2.282490535353663, |
| "learning_rate": 3.2520325203252037e-06, |
| "loss": 0.9979, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.044556962025316456, |
| "grad_norm": 2.1324561936519553, |
| "learning_rate": 3.577235772357724e-06, |
| "loss": 1.0051, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04860759493670886, |
| "grad_norm": 4.740506429279618, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 0.9647, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.052658227848101265, |
| "grad_norm": 5.3350448217325095, |
| "learning_rate": 4.227642276422765e-06, |
| "loss": 1.0202, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05670886075949367, |
| "grad_norm": 5.095096718919127, |
| "learning_rate": 4.552845528455285e-06, |
| "loss": 0.9594, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.060759493670886074, |
| "grad_norm": 4.721768939186621, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 0.9656, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06481012658227848, |
| "grad_norm": 3.3470449831162106, |
| "learning_rate": 5.203252032520326e-06, |
| "loss": 0.9179, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06886075949367089, |
| "grad_norm": 3.0561612789391575, |
| "learning_rate": 5.528455284552846e-06, |
| "loss": 0.9366, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0729113924050633, |
| "grad_norm": 2.7049915815157695, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 0.9135, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0769620253164557, |
| "grad_norm": 2.2567805448057054, |
| "learning_rate": 6.178861788617887e-06, |
| "loss": 0.8938, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0810126582278481, |
| "grad_norm": 1.9737960498610374, |
| "learning_rate": 6.504065040650407e-06, |
| "loss": 0.8827, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08506329113924051, |
| "grad_norm": 1.716880813414956, |
| "learning_rate": 6.829268292682928e-06, |
| "loss": 0.8558, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08911392405063291, |
| "grad_norm": 1.587599210286518, |
| "learning_rate": 7.154471544715448e-06, |
| "loss": 0.8344, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09316455696202532, |
| "grad_norm": 1.7169036911608293, |
| "learning_rate": 7.4796747967479676e-06, |
| "loss": 0.8494, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09721518987341772, |
| "grad_norm": 1.6664797897391332, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 0.8411, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 1.4331342518785164, |
| "learning_rate": 8.130081300813009e-06, |
| "loss": 0.8324, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10531645569620253, |
| "grad_norm": 1.2079182277448255, |
| "learning_rate": 8.45528455284553e-06, |
| "loss": 0.8579, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10936708860759493, |
| "grad_norm": 1.3179998929818766, |
| "learning_rate": 8.78048780487805e-06, |
| "loss": 0.8339, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11341772151898734, |
| "grad_norm": 1.1500168698539697, |
| "learning_rate": 9.10569105691057e-06, |
| "loss": 0.8394, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11746835443037974, |
| "grad_norm": 0.9786854559880325, |
| "learning_rate": 9.43089430894309e-06, |
| "loss": 0.8292, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.12151898734177215, |
| "grad_norm": 1.0505359999578043, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.8196, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12556962025316457, |
| "grad_norm": 1.097415513547258, |
| "learning_rate": 1.008130081300813e-05, |
| "loss": 0.8026, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12962025316455697, |
| "grad_norm": 0.9099145245589982, |
| "learning_rate": 1.0406504065040652e-05, |
| "loss": 0.8039, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13367088607594937, |
| "grad_norm": 0.8822321683062831, |
| "learning_rate": 1.0731707317073172e-05, |
| "loss": 0.8111, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.13772151898734178, |
| "grad_norm": 0.849027651961397, |
| "learning_rate": 1.1056910569105692e-05, |
| "loss": 0.8142, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.14177215189873418, |
| "grad_norm": 0.8118166679329141, |
| "learning_rate": 1.1382113821138213e-05, |
| "loss": 0.813, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.1458227848101266, |
| "grad_norm": 0.7260233924649556, |
| "learning_rate": 1.1707317073170731e-05, |
| "loss": 0.818, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.149873417721519, |
| "grad_norm": 0.823289339733132, |
| "learning_rate": 1.2032520325203254e-05, |
| "loss": 0.7911, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1539240506329114, |
| "grad_norm": 0.6916524900454462, |
| "learning_rate": 1.2357723577235774e-05, |
| "loss": 0.779, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1579746835443038, |
| "grad_norm": 0.6972094724044967, |
| "learning_rate": 1.2682926829268294e-05, |
| "loss": 0.7978, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1620253164556962, |
| "grad_norm": 0.7519259091590217, |
| "learning_rate": 1.3008130081300815e-05, |
| "loss": 0.785, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1660759493670886, |
| "grad_norm": 0.677406835663151, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.8178, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.17012658227848101, |
| "grad_norm": 0.6593565905383925, |
| "learning_rate": 1.3658536585365855e-05, |
| "loss": 0.7712, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17417721518987342, |
| "grad_norm": 0.7332676009163879, |
| "learning_rate": 1.3983739837398376e-05, |
| "loss": 0.7861, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17822784810126582, |
| "grad_norm": 0.7098514833794023, |
| "learning_rate": 1.4308943089430896e-05, |
| "loss": 0.7579, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18227848101265823, |
| "grad_norm": 0.7016921081841316, |
| "learning_rate": 1.4634146341463415e-05, |
| "loss": 0.7892, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18632911392405063, |
| "grad_norm": 0.6549242340177437, |
| "learning_rate": 1.4959349593495935e-05, |
| "loss": 0.7813, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19037974683544304, |
| "grad_norm": 0.6448090305801469, |
| "learning_rate": 1.528455284552846e-05, |
| "loss": 0.7405, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.19443037974683544, |
| "grad_norm": 0.6393999473612035, |
| "learning_rate": 1.5609756097560978e-05, |
| "loss": 0.749, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19848101265822785, |
| "grad_norm": 0.694833909282302, |
| "learning_rate": 1.5934959349593496e-05, |
| "loss": 0.767, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 0.628034351690571, |
| "learning_rate": 1.6260162601626018e-05, |
| "loss": 0.7748, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20658227848101265, |
| "grad_norm": 0.7175115765450881, |
| "learning_rate": 1.6585365853658537e-05, |
| "loss": 0.7945, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21063291139240506, |
| "grad_norm": 0.5820676425766123, |
| "learning_rate": 1.691056910569106e-05, |
| "loss": 0.7512, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.21468354430379746, |
| "grad_norm": 0.636423018028593, |
| "learning_rate": 1.7235772357723578e-05, |
| "loss": 0.7537, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21873417721518987, |
| "grad_norm": 0.6316378908182841, |
| "learning_rate": 1.75609756097561e-05, |
| "loss": 0.7561, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.22278481012658227, |
| "grad_norm": 0.6552895402753788, |
| "learning_rate": 1.788617886178862e-05, |
| "loss": 0.7601, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22683544303797468, |
| "grad_norm": 0.6937232969003874, |
| "learning_rate": 1.821138211382114e-05, |
| "loss": 0.7484, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23088607594936708, |
| "grad_norm": 0.7037602766098012, |
| "learning_rate": 1.8536585365853663e-05, |
| "loss": 0.7627, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23493670886075949, |
| "grad_norm": 0.8356397736722753, |
| "learning_rate": 1.886178861788618e-05, |
| "loss": 0.7767, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2389873417721519, |
| "grad_norm": 0.7380245978798398, |
| "learning_rate": 1.91869918699187e-05, |
| "loss": 0.7746, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2430379746835443, |
| "grad_norm": 0.8162769512868107, |
| "learning_rate": 1.9512195121951222e-05, |
| "loss": 0.751, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2470886075949367, |
| "grad_norm": 0.7316342705939795, |
| "learning_rate": 1.983739837398374e-05, |
| "loss": 0.765, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.25113924050632913, |
| "grad_norm": 0.8665544083806537, |
| "learning_rate": 2.016260162601626e-05, |
| "loss": 0.7497, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25518987341772154, |
| "grad_norm": 0.7196705311152518, |
| "learning_rate": 2.048780487804878e-05, |
| "loss": 0.7342, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.25924050632911394, |
| "grad_norm": 0.9398944164955052, |
| "learning_rate": 2.0813008130081303e-05, |
| "loss": 0.7702, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.26329113924050634, |
| "grad_norm": 0.7649893506255979, |
| "learning_rate": 2.1138211382113822e-05, |
| "loss": 0.7659, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26734177215189875, |
| "grad_norm": 0.706320339432766, |
| "learning_rate": 2.1463414634146344e-05, |
| "loss": 0.7569, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.27139240506329115, |
| "grad_norm": 0.8769122964925999, |
| "learning_rate": 2.1788617886178863e-05, |
| "loss": 0.7604, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.27544303797468356, |
| "grad_norm": 0.764834049223063, |
| "learning_rate": 2.2113821138211385e-05, |
| "loss": 0.7562, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.27949367088607596, |
| "grad_norm": 0.8074870677465491, |
| "learning_rate": 2.2439024390243907e-05, |
| "loss": 0.7742, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.28354430379746837, |
| "grad_norm": 0.8954714031237119, |
| "learning_rate": 2.2764227642276426e-05, |
| "loss": 0.7565, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.28759493670886077, |
| "grad_norm": 0.8360386243065755, |
| "learning_rate": 2.3089430894308948e-05, |
| "loss": 0.7557, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2916455696202532, |
| "grad_norm": 0.7249811828725226, |
| "learning_rate": 2.3414634146341463e-05, |
| "loss": 0.7569, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2956962025316456, |
| "grad_norm": 0.8158866236178446, |
| "learning_rate": 2.3739837398373985e-05, |
| "loss": 0.7391, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.299746835443038, |
| "grad_norm": 0.6863321748086537, |
| "learning_rate": 2.4065040650406507e-05, |
| "loss": 0.7549, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 0.663681300294841, |
| "learning_rate": 2.4390243902439026e-05, |
| "loss": 0.7153, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3078481012658228, |
| "grad_norm": 0.8992429805174783, |
| "learning_rate": 2.4715447154471548e-05, |
| "loss": 0.777, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3118987341772152, |
| "grad_norm": 0.6431840163457149, |
| "learning_rate": 2.5040650406504066e-05, |
| "loss": 0.7271, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3159493670886076, |
| "grad_norm": 0.7651668651668866, |
| "learning_rate": 2.536585365853659e-05, |
| "loss": 0.7379, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.8415808905258515, |
| "learning_rate": 2.569105691056911e-05, |
| "loss": 0.7881, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3240506329113924, |
| "grad_norm": 0.7987284432599308, |
| "learning_rate": 2.601626016260163e-05, |
| "loss": 0.7834, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3281012658227848, |
| "grad_norm": 0.7392772784096134, |
| "learning_rate": 2.634146341463415e-05, |
| "loss": 0.7486, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3321518987341772, |
| "grad_norm": 0.9124091739092063, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.7541, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3362025316455696, |
| "grad_norm": 0.817398644933799, |
| "learning_rate": 2.699186991869919e-05, |
| "loss": 0.7305, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.34025316455696203, |
| "grad_norm": 0.8022667393875829, |
| "learning_rate": 2.731707317073171e-05, |
| "loss": 0.7298, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34430379746835443, |
| "grad_norm": 0.7013469219053348, |
| "learning_rate": 2.764227642276423e-05, |
| "loss": 0.739, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.34835443037974684, |
| "grad_norm": 0.8355351589730851, |
| "learning_rate": 2.796747967479675e-05, |
| "loss": 0.7232, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.35240506329113924, |
| "grad_norm": 0.6651600765571432, |
| "learning_rate": 2.829268292682927e-05, |
| "loss": 0.7496, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35645569620253165, |
| "grad_norm": 0.8527881535927727, |
| "learning_rate": 2.8617886178861792e-05, |
| "loss": 0.7434, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.36050632911392405, |
| "grad_norm": 0.6543554877202244, |
| "learning_rate": 2.8943089430894314e-05, |
| "loss": 0.7303, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.36455696202531646, |
| "grad_norm": 0.7926686720576096, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.7207, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.36860759493670886, |
| "grad_norm": 0.783000357371692, |
| "learning_rate": 2.959349593495935e-05, |
| "loss": 0.7598, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37265822784810126, |
| "grad_norm": 0.8934341831487459, |
| "learning_rate": 2.991869918699187e-05, |
| "loss": 0.7461, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.37670886075949367, |
| "grad_norm": 0.7282186920022763, |
| "learning_rate": 3.0243902439024392e-05, |
| "loss": 0.7541, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3807594936708861, |
| "grad_norm": 0.9794319804325968, |
| "learning_rate": 3.056910569105692e-05, |
| "loss": 0.7532, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3848101265822785, |
| "grad_norm": 0.7774355025499959, |
| "learning_rate": 3.089430894308943e-05, |
| "loss": 0.7363, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3888607594936709, |
| "grad_norm": 0.8382649593457463, |
| "learning_rate": 3.1219512195121955e-05, |
| "loss": 0.7765, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3929113924050633, |
| "grad_norm": 0.7426610591276028, |
| "learning_rate": 3.154471544715447e-05, |
| "loss": 0.7544, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3969620253164557, |
| "grad_norm": 0.8287339690918574, |
| "learning_rate": 3.186991869918699e-05, |
| "loss": 0.7641, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4010126582278481, |
| "grad_norm": 0.7656695649043026, |
| "learning_rate": 3.2195121951219514e-05, |
| "loss": 0.7245, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 0.7881197448714423, |
| "learning_rate": 3.2520325203252037e-05, |
| "loss": 0.7175, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4091139240506329, |
| "grad_norm": 0.780186845771214, |
| "learning_rate": 3.284552845528456e-05, |
| "loss": 0.7359, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4131645569620253, |
| "grad_norm": 0.7759538714714287, |
| "learning_rate": 3.3170731707317074e-05, |
| "loss": 0.7142, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4172151898734177, |
| "grad_norm": 0.8383885491416494, |
| "learning_rate": 3.3495934959349596e-05, |
| "loss": 0.7368, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.4212658227848101, |
| "grad_norm": 0.8033478824413708, |
| "learning_rate": 3.382113821138212e-05, |
| "loss": 0.7605, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4253164556962025, |
| "grad_norm": 0.7852851671336001, |
| "learning_rate": 3.414634146341463e-05, |
| "loss": 0.7066, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4293670886075949, |
| "grad_norm": 0.9235258773468378, |
| "learning_rate": 3.4471544715447155e-05, |
| "loss": 0.7429, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43341772151898733, |
| "grad_norm": 0.667827271045595, |
| "learning_rate": 3.479674796747968e-05, |
| "loss": 0.7394, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.43746835443037974, |
| "grad_norm": 0.7470680672737354, |
| "learning_rate": 3.51219512195122e-05, |
| "loss": 0.73, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.44151898734177214, |
| "grad_norm": 0.870406579298031, |
| "learning_rate": 3.544715447154472e-05, |
| "loss": 0.7314, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.44556962025316454, |
| "grad_norm": 0.8014796793937949, |
| "learning_rate": 3.577235772357724e-05, |
| "loss": 0.7487, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.44962025316455695, |
| "grad_norm": 0.7325547697968101, |
| "learning_rate": 3.609756097560976e-05, |
| "loss": 0.7638, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.45367088607594935, |
| "grad_norm": 0.7797211355036209, |
| "learning_rate": 3.642276422764228e-05, |
| "loss": 0.7248, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.45772151898734176, |
| "grad_norm": 0.7625503985071023, |
| "learning_rate": 3.67479674796748e-05, |
| "loss": 0.7727, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46177215189873416, |
| "grad_norm": 0.9940826526589976, |
| "learning_rate": 3.7073170731707325e-05, |
| "loss": 0.7369, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.46582278481012657, |
| "grad_norm": 0.7391119951273213, |
| "learning_rate": 3.739837398373984e-05, |
| "loss": 0.7097, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.46987341772151897, |
| "grad_norm": 0.8053575061637791, |
| "learning_rate": 3.772357723577236e-05, |
| "loss": 0.7286, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.4739240506329114, |
| "grad_norm": 1.1035244754479745, |
| "learning_rate": 3.804878048780488e-05, |
| "loss": 0.7421, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.4779746835443038, |
| "grad_norm": 1.0837075567566241, |
| "learning_rate": 3.83739837398374e-05, |
| "loss": 0.7092, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4820253164556962, |
| "grad_norm": 0.8683751097507507, |
| "learning_rate": 3.869918699186992e-05, |
| "loss": 0.7623, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.4860759493670886, |
| "grad_norm": 0.9070774147484988, |
| "learning_rate": 3.9024390243902444e-05, |
| "loss": 0.7406, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.490126582278481, |
| "grad_norm": 0.840137647434597, |
| "learning_rate": 3.9349593495934966e-05, |
| "loss": 0.7503, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4941772151898734, |
| "grad_norm": 0.8777777318651654, |
| "learning_rate": 3.967479674796748e-05, |
| "loss": 0.7134, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.4982278481012658, |
| "grad_norm": 0.7943247481246558, |
| "learning_rate": 4e-05, |
| "loss": 0.707, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.5022784810126583, |
| "grad_norm": 0.7360216467472152, |
| "learning_rate": 3.999991946137476e-05, |
| "loss": 0.7377, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 0.8899041748528635, |
| "learning_rate": 3.999967784614766e-05, |
| "loss": 0.7189, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5103797468354431, |
| "grad_norm": 1.0250649745392326, |
| "learning_rate": 3.9999275156264656e-05, |
| "loss": 0.7411, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5144303797468355, |
| "grad_norm": 0.9796067595578145, |
| "learning_rate": 3.999871139496895e-05, |
| "loss": 0.7528, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5184810126582279, |
| "grad_norm": 0.8948607384212809, |
| "learning_rate": 3.9997986566800995e-05, |
| "loss": 0.7311, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5225316455696203, |
| "grad_norm": 0.6561779816520469, |
| "learning_rate": 3.999710067759846e-05, |
| "loss": 0.756, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5265822784810127, |
| "grad_norm": 0.7410174051331848, |
| "learning_rate": 3.999605373449617e-05, |
| "loss": 0.7312, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5306329113924051, |
| "grad_norm": 0.739443763692555, |
| "learning_rate": 3.9994845745926075e-05, |
| "loss": 0.7361, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5346835443037975, |
| "grad_norm": 0.8739602192902958, |
| "learning_rate": 3.999347672161713e-05, |
| "loss": 0.7282, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5387341772151899, |
| "grad_norm": 0.6097888609874863, |
| "learning_rate": 3.999194667259528e-05, |
| "loss": 0.73, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5427848101265823, |
| "grad_norm": 0.6511250874502622, |
| "learning_rate": 3.999025561118334e-05, |
| "loss": 0.722, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5468354430379747, |
| "grad_norm": 0.6717804998972988, |
| "learning_rate": 3.998840355100086e-05, |
| "loss": 0.7115, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5508860759493671, |
| "grad_norm": 0.8978474010316462, |
| "learning_rate": 3.998639050696409e-05, |
| "loss": 0.7507, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5549367088607595, |
| "grad_norm": 0.7652634950930033, |
| "learning_rate": 3.998421649528582e-05, |
| "loss": 0.7379, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5589873417721519, |
| "grad_norm": 0.8341938405239581, |
| "learning_rate": 3.9981881533475234e-05, |
| "loss": 0.7308, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5630379746835443, |
| "grad_norm": 1.0130391915262815, |
| "learning_rate": 3.997938564033779e-05, |
| "loss": 0.7072, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5670886075949367, |
| "grad_norm": 0.6388914275038264, |
| "learning_rate": 3.9976728835975064e-05, |
| "loss": 0.7502, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5711392405063291, |
| "grad_norm": 0.7443454362771561, |
| "learning_rate": 3.9973911141784605e-05, |
| "loss": 0.7616, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5751898734177215, |
| "grad_norm": 0.9992115877949723, |
| "learning_rate": 3.997093258045973e-05, |
| "loss": 0.7602, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.579240506329114, |
| "grad_norm": 0.8918016769511364, |
| "learning_rate": 3.996779317598936e-05, |
| "loss": 0.7582, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5832911392405064, |
| "grad_norm": 0.8276473020165002, |
| "learning_rate": 3.996449295365782e-05, |
| "loss": 0.7333, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5873417721518988, |
| "grad_norm": 0.6821333924790406, |
| "learning_rate": 3.996103194004467e-05, |
| "loss": 0.7605, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5913924050632912, |
| "grad_norm": 0.8996435250099992, |
| "learning_rate": 3.995741016302441e-05, |
| "loss": 0.766, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5954430379746836, |
| "grad_norm": 0.7639859267844576, |
| "learning_rate": 3.9953627651766364e-05, |
| "loss": 0.7122, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.599493670886076, |
| "grad_norm": 0.726229797304396, |
| "learning_rate": 3.9949684436734325e-05, |
| "loss": 0.726, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6035443037974684, |
| "grad_norm": 0.7581627483688994, |
| "learning_rate": 3.994558054968643e-05, |
| "loss": 0.7616, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 0.771728654478235, |
| "learning_rate": 3.994131602367481e-05, |
| "loss": 0.7291, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6116455696202532, |
| "grad_norm": 0.6167650669043562, |
| "learning_rate": 3.9936890893045376e-05, |
| "loss": 0.7245, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6156962025316456, |
| "grad_norm": 0.7073903244613691, |
| "learning_rate": 3.993230519343752e-05, |
| "loss": 0.7668, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.619746835443038, |
| "grad_norm": 0.6588688057653092, |
| "learning_rate": 3.992755896178383e-05, |
| "loss": 0.749, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6237974683544304, |
| "grad_norm": 0.6799756217090517, |
| "learning_rate": 3.992265223630981e-05, |
| "loss": 0.7335, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6278481012658228, |
| "grad_norm": 0.6065243948342005, |
| "learning_rate": 3.991758505653355e-05, |
| "loss": 0.7222, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6318987341772152, |
| "grad_norm": 0.6500712744605657, |
| "learning_rate": 3.991235746326543e-05, |
| "loss": 0.7249, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6359493670886076, |
| "grad_norm": 0.7357653397833449, |
| "learning_rate": 3.9906969498607745e-05, |
| "loss": 0.74, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.6794045634607658, |
| "learning_rate": 3.990142120595444e-05, |
| "loss": 0.7415, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6440506329113924, |
| "grad_norm": 0.5939261186945629, |
| "learning_rate": 3.98957126299907e-05, |
| "loss": 0.7406, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6481012658227848, |
| "grad_norm": 0.6441179848324072, |
| "learning_rate": 3.9889843816692596e-05, |
| "loss": 0.7066, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6521518987341772, |
| "grad_norm": 0.626246960289174, |
| "learning_rate": 3.9883814813326766e-05, |
| "loss": 0.7232, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.6562025316455696, |
| "grad_norm": 0.550071253832395, |
| "learning_rate": 3.9877625668449956e-05, |
| "loss": 0.6986, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.660253164556962, |
| "grad_norm": 0.7189689523974143, |
| "learning_rate": 3.98712764319087e-05, |
| "loss": 0.733, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6643037974683544, |
| "grad_norm": 0.5803663818632857, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.7533, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6683544303797468, |
| "grad_norm": 0.6397171126891444, |
| "learning_rate": 3.9858097889665277e-05, |
| "loss": 0.7365, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6724050632911392, |
| "grad_norm": 0.6294095061152363, |
| "learning_rate": 3.985126869010129e-05, |
| "loss": 0.7444, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6764556962025317, |
| "grad_norm": 0.6067333734351715, |
| "learning_rate": 3.984427961114833e-05, |
| "loss": 0.7244, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6805063291139241, |
| "grad_norm": 0.6042543764329238, |
| "learning_rate": 3.9837130709095475e-05, |
| "loss": 0.7355, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6845569620253165, |
| "grad_norm": 0.5920299644947065, |
| "learning_rate": 3.982982204151901e-05, |
| "loss": 0.7331, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6886075949367089, |
| "grad_norm": 0.6995350448952782, |
| "learning_rate": 3.982235366728193e-05, |
| "loss": 0.7662, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6926582278481013, |
| "grad_norm": 0.6993264151865388, |
| "learning_rate": 3.9814725646533505e-05, |
| "loss": 0.7319, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.6967088607594937, |
| "grad_norm": 0.6117393167876553, |
| "learning_rate": 3.9806938040708746e-05, |
| "loss": 0.7216, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7007594936708861, |
| "grad_norm": 0.6396886154165112, |
| "learning_rate": 3.9798990912527976e-05, |
| "loss": 0.7149, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7048101265822785, |
| "grad_norm": 0.7007358909717539, |
| "learning_rate": 3.979088432599627e-05, |
| "loss": 0.729, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 0.7789660869271491, |
| "learning_rate": 3.9782618346402964e-05, |
| "loss": 0.7377, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7129113924050633, |
| "grad_norm": 0.9333648230800345, |
| "learning_rate": 3.977419304032111e-05, |
| "loss": 0.727, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7169620253164557, |
| "grad_norm": 0.6580991876635299, |
| "learning_rate": 3.976560847560697e-05, |
| "loss": 0.7318, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7210126582278481, |
| "grad_norm": 1.063063557404282, |
| "learning_rate": 3.9756864721399456e-05, |
| "loss": 0.7601, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7250632911392405, |
| "grad_norm": 1.184876260885402, |
| "learning_rate": 3.974796184811956e-05, |
| "loss": 0.7428, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7291139240506329, |
| "grad_norm": 0.7177734763384346, |
| "learning_rate": 3.973889992746979e-05, |
| "loss": 0.7316, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7331645569620253, |
| "grad_norm": 1.0668309364461168, |
| "learning_rate": 3.972967903243361e-05, |
| "loss": 0.7334, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7372151898734177, |
| "grad_norm": 0.6774333941067255, |
| "learning_rate": 3.972029923727486e-05, |
| "loss": 0.7082, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7412658227848101, |
| "grad_norm": 0.6732114311155132, |
| "learning_rate": 3.971076061753709e-05, |
| "loss": 0.7223, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7453164556962025, |
| "grad_norm": 0.7342083249705929, |
| "learning_rate": 3.9701063250043066e-05, |
| "loss": 0.7321, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7493670886075949, |
| "grad_norm": 0.6154543622786308, |
| "learning_rate": 3.969120721289402e-05, |
| "loss": 0.7044, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7534177215189873, |
| "grad_norm": 0.6956115408168532, |
| "learning_rate": 3.9681192585469146e-05, |
| "loss": 0.7357, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7574683544303797, |
| "grad_norm": 0.6619847557306027, |
| "learning_rate": 3.9671019448424865e-05, |
| "loss": 0.7057, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7615189873417721, |
| "grad_norm": 0.5878446802457633, |
| "learning_rate": 3.966068788369422e-05, |
| "loss": 0.7223, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.7655696202531646, |
| "grad_norm": 0.5991063363507843, |
| "learning_rate": 3.965019797448622e-05, |
| "loss": 0.7159, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.769620253164557, |
| "grad_norm": 0.5790637337717411, |
| "learning_rate": 3.963954980528515e-05, |
| "loss": 0.7343, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7736708860759494, |
| "grad_norm": 0.6606855488040851, |
| "learning_rate": 3.9628743461849905e-05, |
| "loss": 0.7565, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7777215189873418, |
| "grad_norm": 0.6219888660130205, |
| "learning_rate": 3.961777903121329e-05, |
| "loss": 0.7287, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7817721518987342, |
| "grad_norm": 0.686669966748809, |
| "learning_rate": 3.960665660168131e-05, |
| "loss": 0.7178, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7858227848101266, |
| "grad_norm": 0.6178766941764763, |
| "learning_rate": 3.9595376262832485e-05, |
| "loss": 0.7137, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.789873417721519, |
| "grad_norm": 0.5816627227070387, |
| "learning_rate": 3.9583938105517127e-05, |
| "loss": 0.7461, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7939240506329114, |
| "grad_norm": 0.6125108913941943, |
| "learning_rate": 3.957234222185657e-05, |
| "loss": 0.7344, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.7979746835443038, |
| "grad_norm": 0.6556587962455535, |
| "learning_rate": 3.9560588705242474e-05, |
| "loss": 0.7006, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8020253164556962, |
| "grad_norm": 0.6586055490668822, |
| "learning_rate": 3.954867765033605e-05, |
| "loss": 0.7365, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8060759493670886, |
| "grad_norm": 0.5870603028484443, |
| "learning_rate": 3.953660915306728e-05, |
| "loss": 0.7221, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 0.70136493190662, |
| "learning_rate": 3.952438331063419e-05, |
| "loss": 0.7307, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8141772151898734, |
| "grad_norm": 0.5507015544250633, |
| "learning_rate": 3.951200022150205e-05, |
| "loss": 0.7339, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8182278481012658, |
| "grad_norm": 0.7579411635031736, |
| "learning_rate": 3.949945998540253e-05, |
| "loss": 0.7133, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8222784810126582, |
| "grad_norm": 0.6133079773881273, |
| "learning_rate": 3.9486762703332993e-05, |
| "loss": 0.7085, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8263291139240506, |
| "grad_norm": 0.5684718041507141, |
| "learning_rate": 3.947390847755559e-05, |
| "loss": 0.7239, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.830379746835443, |
| "grad_norm": 0.631928668901881, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.7504, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8344303797468354, |
| "grad_norm": 0.6336564914475888, |
| "learning_rate": 3.944772961024501e-05, |
| "loss": 0.6932, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8384810126582278, |
| "grad_norm": 0.499946415677406, |
| "learning_rate": 3.943440517955285e-05, |
| "loss": 0.6797, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8425316455696202, |
| "grad_norm": 0.7107672329125726, |
| "learning_rate": 3.9420924226833126e-05, |
| "loss": 0.7183, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8465822784810126, |
| "grad_norm": 0.5547470634635769, |
| "learning_rate": 3.9407286860659566e-05, |
| "loss": 0.6964, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.850632911392405, |
| "grad_norm": 0.6474238176361249, |
| "learning_rate": 3.9393493190865657e-05, |
| "loss": 0.6957, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8546835443037974, |
| "grad_norm": 0.7062744964067227, |
| "learning_rate": 3.937954332854371e-05, |
| "loss": 0.7229, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8587341772151899, |
| "grad_norm": 0.6241259744564678, |
| "learning_rate": 3.9365437386044016e-05, |
| "loss": 0.741, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8627848101265823, |
| "grad_norm": 0.6515078221821883, |
| "learning_rate": 3.935117547697387e-05, |
| "loss": 0.7094, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8668354430379747, |
| "grad_norm": 0.5394323606793987, |
| "learning_rate": 3.933675771619675e-05, |
| "loss": 0.7237, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8708860759493671, |
| "grad_norm": 0.6095625425176738, |
| "learning_rate": 3.932218421983131e-05, |
| "loss": 0.7056, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8749367088607595, |
| "grad_norm": 0.6172731007495936, |
| "learning_rate": 3.9307455105250484e-05, |
| "loss": 0.7392, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8789873417721519, |
| "grad_norm": 0.5595711205920004, |
| "learning_rate": 3.929257049108054e-05, |
| "loss": 0.7014, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8830379746835443, |
| "grad_norm": 0.6623168603638904, |
| "learning_rate": 3.927753049720011e-05, |
| "loss": 0.6945, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8870886075949367, |
| "grad_norm": 0.5663884275832212, |
| "learning_rate": 3.9262335244739234e-05, |
| "loss": 0.7401, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8911392405063291, |
| "grad_norm": 0.6868503050802344, |
| "learning_rate": 3.92469848560784e-05, |
| "loss": 0.7147, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8951898734177215, |
| "grad_norm": 0.5702299989803181, |
| "learning_rate": 3.923147945484751e-05, |
| "loss": 0.732, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.8992405063291139, |
| "grad_norm": 0.5590381566120586, |
| "learning_rate": 3.9215819165924956e-05, |
| "loss": 0.7371, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9032911392405063, |
| "grad_norm": 0.6055933898086717, |
| "learning_rate": 3.920000411543654e-05, |
| "loss": 0.7122, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9073417721518987, |
| "grad_norm": 0.6416274980270033, |
| "learning_rate": 3.9184034430754495e-05, |
| "loss": 0.7326, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.5476747846573933, |
| "learning_rate": 3.916791024049648e-05, |
| "loss": 0.7066, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9154430379746835, |
| "grad_norm": 0.5877574220877326, |
| "learning_rate": 3.91516316745245e-05, |
| "loss": 0.6939, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9194936708860759, |
| "grad_norm": 0.523562713012341, |
| "learning_rate": 3.913519886394389e-05, |
| "loss": 0.7212, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9235443037974683, |
| "grad_norm": 0.5502327939266493, |
| "learning_rate": 3.911861194110225e-05, |
| "loss": 0.7123, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9275949367088607, |
| "grad_norm": 0.4791777702716554, |
| "learning_rate": 3.910187103958837e-05, |
| "loss": 0.6999, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9316455696202531, |
| "grad_norm": 0.5782403490916153, |
| "learning_rate": 3.908497629423117e-05, |
| "loss": 0.7169, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9356962025316455, |
| "grad_norm": 0.5297107713225085, |
| "learning_rate": 3.9067927841098614e-05, |
| "loss": 0.7271, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.9397468354430379, |
| "grad_norm": 0.5182417252749495, |
| "learning_rate": 3.9050725817496594e-05, |
| "loss": 0.6967, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9437974683544303, |
| "grad_norm": 0.5276897861941668, |
| "learning_rate": 3.9033370361967844e-05, |
| "loss": 0.7149, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9478481012658228, |
| "grad_norm": 0.531723330349942, |
| "learning_rate": 3.901586161429081e-05, |
| "loss": 0.6711, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9518987341772152, |
| "grad_norm": 0.5796042178013429, |
| "learning_rate": 3.8998199715478545e-05, |
| "loss": 0.718, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9559493670886076, |
| "grad_norm": 0.5390299263901398, |
| "learning_rate": 3.8980384807777564e-05, |
| "loss": 0.7096, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.6028598723102316, |
| "learning_rate": 3.896241703466667e-05, |
| "loss": 0.7095, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9640506329113924, |
| "grad_norm": 0.5571689119541224, |
| "learning_rate": 3.894429654085585e-05, |
| "loss": 0.706, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9681012658227848, |
| "grad_norm": 0.5539995784967549, |
| "learning_rate": 3.892602347228505e-05, |
| "loss": 0.7003, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9721518987341772, |
| "grad_norm": 0.6837171640245477, |
| "learning_rate": 3.890759797612307e-05, |
| "loss": 0.676, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9762025316455696, |
| "grad_norm": 0.5538951068344181, |
| "learning_rate": 3.888902020076632e-05, |
| "loss": 0.6745, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.980253164556962, |
| "grad_norm": 0.6729689252704022, |
| "learning_rate": 3.887029029583764e-05, |
| "loss": 0.7096, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9843037974683544, |
| "grad_norm": 0.5444358076131028, |
| "learning_rate": 3.8851408412185125e-05, |
| "loss": 0.7084, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9883544303797468, |
| "grad_norm": 0.6595309782085004, |
| "learning_rate": 3.8832374701880855e-05, |
| "loss": 0.7055, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9924050632911392, |
| "grad_norm": 0.5317078314207873, |
| "learning_rate": 3.881318931821972e-05, |
| "loss": 0.706, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9964556962025316, |
| "grad_norm": 0.517911484006942, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.6872, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0020253164556963, |
| "grad_norm": 0.616542593091935, |
| "learning_rate": 3.8774364150112955e-05, |
| "loss": 0.6667, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.0060759493670886, |
| "grad_norm": 0.5735637452580938, |
| "learning_rate": 3.8754724678359884e-05, |
| "loss": 0.5731, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.010126582278481, |
| "grad_norm": 0.6746694139642946, |
| "learning_rate": 3.873493415863256e-05, |
| "loss": 0.5794, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.0141772151898734, |
| "grad_norm": 0.6242722065161886, |
| "learning_rate": 3.871499275032111e-05, |
| "loss": 0.5591, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0182278481012659, |
| "grad_norm": 0.7101909660989344, |
| "learning_rate": 3.869490061403091e-05, |
| "loss": 0.5507, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0222784810126582, |
| "grad_norm": 0.6639512935348818, |
| "learning_rate": 3.867465791158124e-05, |
| "loss": 0.5624, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.0263291139240507, |
| "grad_norm": 0.6366666204975779, |
| "learning_rate": 3.865426480600407e-05, |
| "loss": 0.551, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.030379746835443, |
| "grad_norm": 0.6921403875800817, |
| "learning_rate": 3.863372146154264e-05, |
| "loss": 0.5762, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0344303797468355, |
| "grad_norm": 0.6364373815002052, |
| "learning_rate": 3.861302804365024e-05, |
| "loss": 0.5877, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0384810126582278, |
| "grad_norm": 3.826681643677377, |
| "learning_rate": 3.85921847189888e-05, |
| "loss": 0.5605, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0425316455696203, |
| "grad_norm": 0.8116843150437565, |
| "learning_rate": 3.85711916554276e-05, |
| "loss": 0.5667, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0465822784810126, |
| "grad_norm": 0.6411935615619888, |
| "learning_rate": 3.85500490220419e-05, |
| "loss": 0.5743, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.0506329113924051, |
| "grad_norm": 0.7582451162060722, |
| "learning_rate": 3.852875698911154e-05, |
| "loss": 0.5886, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0546835443037974, |
| "grad_norm": 0.8019448787411588, |
| "learning_rate": 3.850731572811963e-05, |
| "loss": 0.593, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.05873417721519, |
| "grad_norm": 0.6506019577910462, |
| "learning_rate": 3.848572541175116e-05, |
| "loss": 0.5478, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0627848101265822, |
| "grad_norm": 0.6448267826726746, |
| "learning_rate": 3.846398621389154e-05, |
| "loss": 0.5642, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0668354430379747, |
| "grad_norm": 0.6238309385487583, |
| "learning_rate": 3.84420983096253e-05, |
| "loss": 0.5537, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.070886075949367, |
| "grad_norm": 0.7272129314519626, |
| "learning_rate": 3.8420061875234606e-05, |
| "loss": 0.5376, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.0749367088607595, |
| "grad_norm": 0.7098804415794944, |
| "learning_rate": 3.839787708819787e-05, |
| "loss": 0.5858, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0789873417721518, |
| "grad_norm": 0.7890239011796226, |
| "learning_rate": 3.8375544127188325e-05, |
| "loss": 0.5719, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.0830379746835443, |
| "grad_norm": 0.6942469768358287, |
| "learning_rate": 3.8353063172072564e-05, |
| "loss": 0.5788, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0870886075949366, |
| "grad_norm": 0.612112747165591, |
| "learning_rate": 3.8330434403909105e-05, |
| "loss": 0.5175, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0911392405063292, |
| "grad_norm": 0.840404086178476, |
| "learning_rate": 3.8307658004946934e-05, |
| "loss": 0.579, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0951898734177214, |
| "grad_norm": 0.6423078868165512, |
| "learning_rate": 3.8284734158624046e-05, |
| "loss": 0.5506, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.099240506329114, |
| "grad_norm": 0.7883048319631082, |
| "learning_rate": 3.826166304956594e-05, |
| "loss": 0.554, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1032911392405063, |
| "grad_norm": 0.7702251335288742, |
| "learning_rate": 3.8238444863584164e-05, |
| "loss": 0.5555, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1073417721518988, |
| "grad_norm": 0.7563274629821547, |
| "learning_rate": 3.821507978767479e-05, |
| "loss": 0.5495, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.111392405063291, |
| "grad_norm": 0.7013200843384998, |
| "learning_rate": 3.819156801001693e-05, |
| "loss": 0.5581, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.1154430379746836, |
| "grad_norm": 0.660619352555484, |
| "learning_rate": 3.816790971997121e-05, |
| "loss": 0.5612, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.1194936708860759, |
| "grad_norm": 0.7247187342084234, |
| "learning_rate": 3.8144105108078246e-05, |
| "loss": 0.5554, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1235443037974684, |
| "grad_norm": 0.7999761391209388, |
| "learning_rate": 3.81201543660571e-05, |
| "loss": 0.533, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1275949367088607, |
| "grad_norm": 0.6764357323937197, |
| "learning_rate": 3.809605768680377e-05, |
| "loss": 0.5579, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1316455696202532, |
| "grad_norm": 0.8380511497180624, |
| "learning_rate": 3.807181526438958e-05, |
| "loss": 0.5485, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.1356962025316455, |
| "grad_norm": 0.7276480881073278, |
| "learning_rate": 3.8047427294059697e-05, |
| "loss": 0.5562, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.139746835443038, |
| "grad_norm": 0.6866653240399239, |
| "learning_rate": 3.802289397223145e-05, |
| "loss": 0.5454, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1437974683544303, |
| "grad_norm": 0.8026062511833789, |
| "learning_rate": 3.7998215496492854e-05, |
| "loss": 0.5494, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.1478481012658228, |
| "grad_norm": 0.7171869596936482, |
| "learning_rate": 3.797339206560096e-05, |
| "loss": 0.5468, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.1518987341772151, |
| "grad_norm": 0.9135293425830762, |
| "learning_rate": 3.794842387948027e-05, |
| "loss": 0.5582, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1559493670886076, |
| "grad_norm": 0.6786871245521915, |
| "learning_rate": 3.7923311139221114e-05, |
| "loss": 0.5652, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.16, |
| "grad_norm": 0.7381811723508628, |
| "learning_rate": 3.7898054047078054e-05, |
| "loss": 0.5893, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1640506329113924, |
| "grad_norm": 0.7320949531670509, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.5534, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1681012658227847, |
| "grad_norm": 0.6650500483768336, |
| "learning_rate": 3.7847107621969786e-05, |
| "loss": 0.5363, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1721518987341772, |
| "grad_norm": 0.6527041194118853, |
| "learning_rate": 3.7821418699320064e-05, |
| "loss": 0.5644, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1762025316455695, |
| "grad_norm": 0.6400964618176189, |
| "learning_rate": 3.7795586245414145e-05, |
| "loss": 0.5851, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.180253164556962, |
| "grad_norm": 0.5917570095987915, |
| "learning_rate": 3.776961046830306e-05, |
| "loss": 0.5586, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.1843037974683543, |
| "grad_norm": 0.5417310974946382, |
| "learning_rate": 3.774349157719215e-05, |
| "loss": 0.5726, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1883544303797469, |
| "grad_norm": 0.5791574565324539, |
| "learning_rate": 3.7717229782439365e-05, |
| "loss": 0.5592, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1924050632911392, |
| "grad_norm": 0.5466814394220549, |
| "learning_rate": 3.769082529555359e-05, |
| "loss": 0.5483, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.1964556962025317, |
| "grad_norm": 0.5594521861126135, |
| "learning_rate": 3.766427832919294e-05, |
| "loss": 0.5676, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.200506329113924, |
| "grad_norm": 0.5620715711055998, |
| "learning_rate": 3.7637589097163024e-05, |
| "loss": 0.5663, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2045569620253165, |
| "grad_norm": 0.5566730258039203, |
| "learning_rate": 3.761075781441526e-05, |
| "loss": 0.5962, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2086075949367088, |
| "grad_norm": 0.49323119915682934, |
| "learning_rate": 3.75837846970451e-05, |
| "loss": 0.5401, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2126582278481013, |
| "grad_norm": 0.6630058036392946, |
| "learning_rate": 3.755666996229032e-05, |
| "loss": 0.5666, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.2167088607594936, |
| "grad_norm": 0.5699066453296573, |
| "learning_rate": 3.752941382852927e-05, |
| "loss": 0.5855, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.220759493670886, |
| "grad_norm": 0.5289703789161617, |
| "learning_rate": 3.7502016515279115e-05, |
| "loss": 0.554, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.2248101265822784, |
| "grad_norm": 0.5391442054377428, |
| "learning_rate": 3.7474478243194043e-05, |
| "loss": 0.5585, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.228860759493671, |
| "grad_norm": 0.535600282463096, |
| "learning_rate": 3.744679923406351e-05, |
| "loss": 0.5535, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2329113924050632, |
| "grad_norm": 0.5465666658215828, |
| "learning_rate": 3.741897971081043e-05, |
| "loss": 0.5648, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2369620253164557, |
| "grad_norm": 0.5089931797583095, |
| "learning_rate": 3.739101989748946e-05, |
| "loss": 0.5799, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.241012658227848, |
| "grad_norm": 0.6173030375394424, |
| "learning_rate": 3.7362920019285066e-05, |
| "loss": 0.5723, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2450632911392405, |
| "grad_norm": 0.5768171198790069, |
| "learning_rate": 3.73346803025098e-05, |
| "loss": 0.5576, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2491139240506328, |
| "grad_norm": 0.570763339643696, |
| "learning_rate": 3.730630097460247e-05, |
| "loss": 0.5337, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2531645569620253, |
| "grad_norm": 0.5591765383015607, |
| "learning_rate": 3.727778226412628e-05, |
| "loss": 0.5806, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2572151898734178, |
| "grad_norm": 0.5439187614108945, |
| "learning_rate": 3.7249124400767006e-05, |
| "loss": 0.5828, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2612658227848101, |
| "grad_norm": 0.5270026387338699, |
| "learning_rate": 3.722032761533114e-05, |
| "loss": 0.5654, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2653164556962024, |
| "grad_norm": 0.5092771864230822, |
| "learning_rate": 3.719139213974403e-05, |
| "loss": 0.5469, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.269367088607595, |
| "grad_norm": 0.5886422738491766, |
| "learning_rate": 3.7162318207048006e-05, |
| "loss": 0.5563, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2734177215189875, |
| "grad_norm": 0.5595241852487232, |
| "learning_rate": 3.713310605140055e-05, |
| "loss": 0.5502, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2774683544303798, |
| "grad_norm": 0.577631632658033, |
| "learning_rate": 3.710375590807233e-05, |
| "loss": 0.5658, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.281518987341772, |
| "grad_norm": 0.6506103642695971, |
| "learning_rate": 3.7074268013445365e-05, |
| "loss": 0.5851, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2855696202531646, |
| "grad_norm": 0.5287995565293417, |
| "learning_rate": 3.7044642605011114e-05, |
| "loss": 0.5617, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.289620253164557, |
| "grad_norm": 0.6294587538519871, |
| "learning_rate": 3.701487992136854e-05, |
| "loss": 0.5759, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2936708860759494, |
| "grad_norm": 0.5724220296326376, |
| "learning_rate": 3.69849802022222e-05, |
| "loss": 0.56, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.2977215189873417, |
| "grad_norm": 0.5777957366408841, |
| "learning_rate": 3.6954943688380334e-05, |
| "loss": 0.5418, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3017721518987342, |
| "grad_norm": 0.5458690114020456, |
| "learning_rate": 3.692477062175289e-05, |
| "loss": 0.5737, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3058227848101267, |
| "grad_norm": 0.5243788709608046, |
| "learning_rate": 3.689446124534958e-05, |
| "loss": 0.573, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.309873417721519, |
| "grad_norm": 0.6261408512204654, |
| "learning_rate": 3.686401580327799e-05, |
| "loss": 0.5593, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.3139240506329113, |
| "grad_norm": 0.5787289389136574, |
| "learning_rate": 3.683343454074149e-05, |
| "loss": 0.5329, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3179746835443038, |
| "grad_norm": 0.5640096076099105, |
| "learning_rate": 3.6802717704037386e-05, |
| "loss": 0.5505, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.3220253164556963, |
| "grad_norm": 0.5961263264470968, |
| "learning_rate": 3.6771865540554855e-05, |
| "loss": 0.5826, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3260759493670886, |
| "grad_norm": 0.6214225477923013, |
| "learning_rate": 3.674087829877297e-05, |
| "loss": 0.558, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.330126582278481, |
| "grad_norm": 0.5523716101822423, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.5423, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.3341772151898734, |
| "grad_norm": 0.6134148059011193, |
| "learning_rate": 3.667849957966501e-05, |
| "loss": 0.5527, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.338227848101266, |
| "grad_norm": 0.5823552732169466, |
| "learning_rate": 3.6647108604728546e-05, |
| "loss": 0.5849, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3422784810126582, |
| "grad_norm": 0.6452104876129356, |
| "learning_rate": 3.661558355626795e-05, |
| "loss": 0.5688, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3463291139240505, |
| "grad_norm": 0.5264755078806451, |
| "learning_rate": 3.658392468818163e-05, |
| "loss": 0.5543, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.350379746835443, |
| "grad_norm": 0.6321584310396682, |
| "learning_rate": 3.655213225544574e-05, |
| "loss": 0.5842, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3544303797468356, |
| "grad_norm": 0.5485900270930143, |
| "learning_rate": 3.652020651411218e-05, |
| "loss": 0.5719, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.3584810126582278, |
| "grad_norm": 0.5617665574135458, |
| "learning_rate": 3.6488147721306474e-05, |
| "loss": 0.5477, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3625316455696201, |
| "grad_norm": 0.5213204158079547, |
| "learning_rate": 3.645595613522574e-05, |
| "loss": 0.5511, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3665822784810127, |
| "grad_norm": 0.5603127267172388, |
| "learning_rate": 3.642363201513657e-05, |
| "loss": 0.5655, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.3706329113924052, |
| "grad_norm": 0.5581021763626922, |
| "learning_rate": 3.6391175621373006e-05, |
| "loss": 0.5411, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3746835443037975, |
| "grad_norm": 0.5718793695862455, |
| "learning_rate": 3.6358587215334355e-05, |
| "loss": 0.5802, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.3787341772151898, |
| "grad_norm": 0.6200353610197415, |
| "learning_rate": 3.632586705948318e-05, |
| "loss": 0.5603, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.3827848101265823, |
| "grad_norm": 0.5054617341205304, |
| "learning_rate": 3.629301541734311e-05, |
| "loss": 0.5537, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3868354430379748, |
| "grad_norm": 0.5439147669540596, |
| "learning_rate": 3.626003255349676e-05, |
| "loss": 0.5473, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.390886075949367, |
| "grad_norm": 0.5369943131114929, |
| "learning_rate": 3.622691873358357e-05, |
| "loss": 0.5817, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.3949367088607594, |
| "grad_norm": 0.4866353559680135, |
| "learning_rate": 3.61936742242977e-05, |
| "loss": 0.5629, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.398987341772152, |
| "grad_norm": 0.5591552134122262, |
| "learning_rate": 3.6160299293385864e-05, |
| "loss": 0.5704, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4030379746835444, |
| "grad_norm": 0.5265482804404384, |
| "learning_rate": 3.612679420964516e-05, |
| "loss": 0.5925, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4070886075949367, |
| "grad_norm": 0.5476072629626323, |
| "learning_rate": 3.609315924292092e-05, |
| "loss": 0.5717, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.411139240506329, |
| "grad_norm": 0.5382885063335417, |
| "learning_rate": 3.6059394664104554e-05, |
| "loss": 0.5511, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4151898734177215, |
| "grad_norm": 0.5985074024613601, |
| "learning_rate": 3.602550074513133e-05, |
| "loss": 0.5741, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.419240506329114, |
| "grad_norm": 0.5001811092312017, |
| "learning_rate": 3.599147775897822e-05, |
| "loss": 0.561, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.4232911392405063, |
| "grad_norm": 0.6266189290952527, |
| "learning_rate": 3.595732597966167e-05, |
| "loss": 0.5582, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4273417721518986, |
| "grad_norm": 0.5254904390591227, |
| "learning_rate": 3.592304568223542e-05, |
| "loss": 0.5642, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4313924050632911, |
| "grad_norm": 0.5275779903529928, |
| "learning_rate": 3.588863714278826e-05, |
| "loss": 0.5716, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4354430379746836, |
| "grad_norm": 0.5861315821238481, |
| "learning_rate": 3.585410063844186e-05, |
| "loss": 0.5617, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.439493670886076, |
| "grad_norm": 0.517133102539322, |
| "learning_rate": 3.581943644734846e-05, |
| "loss": 0.5397, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4435443037974682, |
| "grad_norm": 0.5953815150189339, |
| "learning_rate": 3.578464484868869e-05, |
| "loss": 0.5652, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4475949367088607, |
| "grad_norm": 0.5676101173048175, |
| "learning_rate": 3.5749726122669316e-05, |
| "loss": 0.5843, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.4516455696202533, |
| "grad_norm": 0.564835835505198, |
| "learning_rate": 3.5714680550520943e-05, |
| "loss": 0.5802, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4556962025316456, |
| "grad_norm": 0.5989294456169791, |
| "learning_rate": 3.5679508414495794e-05, |
| "loss": 0.5722, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4597468354430378, |
| "grad_norm": 0.5351861803440486, |
| "learning_rate": 3.564420999786543e-05, |
| "loss": 0.5719, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4637974683544304, |
| "grad_norm": 0.6850983865110504, |
| "learning_rate": 3.560878558491842e-05, |
| "loss": 0.5639, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4678481012658229, |
| "grad_norm": 0.5118352672122104, |
| "learning_rate": 3.5573235460958145e-05, |
| "loss": 0.5642, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4718987341772152, |
| "grad_norm": 0.6002919030667598, |
| "learning_rate": 3.553755991230039e-05, |
| "loss": 0.5407, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4759493670886075, |
| "grad_norm": 0.4992891152870657, |
| "learning_rate": 3.5501759226271144e-05, |
| "loss": 0.5636, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.48, |
| "grad_norm": 0.5292049130954618, |
| "learning_rate": 3.546583369120419e-05, |
| "loss": 0.5629, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4840506329113925, |
| "grad_norm": 0.49384950398431143, |
| "learning_rate": 3.5429783596438864e-05, |
| "loss": 0.6028, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.4881012658227848, |
| "grad_norm": 0.534743933754103, |
| "learning_rate": 3.539360923231766e-05, |
| "loss": 0.5582, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.492151898734177, |
| "grad_norm": 0.5282198732101683, |
| "learning_rate": 3.535731089018394e-05, |
| "loss": 0.5547, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.4962025316455696, |
| "grad_norm": 0.5622449759377719, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.5358, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.5002531645569621, |
| "grad_norm": 0.582083831617939, |
| "learning_rate": 3.528434344224253e-05, |
| "loss": 0.5536, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5043037974683544, |
| "grad_norm": 0.5059942159626537, |
| "learning_rate": 3.524767492410464e-05, |
| "loss": 0.5483, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.5083544303797467, |
| "grad_norm": 0.6025292993496548, |
| "learning_rate": 3.521088360328908e-05, |
| "loss": 0.5539, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5124050632911392, |
| "grad_norm": 0.5210713078445697, |
| "learning_rate": 3.517396977610811e-05, |
| "loss": 0.5548, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5164556962025317, |
| "grad_norm": 0.6556948141258002, |
| "learning_rate": 3.5136933739860595e-05, |
| "loss": 0.5795, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.520506329113924, |
| "grad_norm": 0.5068242288152303, |
| "learning_rate": 3.509977579282971e-05, |
| "loss": 0.5589, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5245569620253163, |
| "grad_norm": 0.561890742952319, |
| "learning_rate": 3.5062496234280424e-05, |
| "loss": 0.5615, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.5286075949367088, |
| "grad_norm": 0.5180651088043136, |
| "learning_rate": 3.502509536445719e-05, |
| "loss": 0.5764, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.5326582278481014, |
| "grad_norm": 0.5558645037020938, |
| "learning_rate": 3.498757348458147e-05, |
| "loss": 0.5637, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5367088607594936, |
| "grad_norm": 0.556180177428061, |
| "learning_rate": 3.4949930896849324e-05, |
| "loss": 0.5788, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.540759493670886, |
| "grad_norm": 0.620303106965505, |
| "learning_rate": 3.491216790442899e-05, |
| "loss": 0.575, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5448101265822785, |
| "grad_norm": 0.5420809342060263, |
| "learning_rate": 3.487428481145839e-05, |
| "loss": 0.5613, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.548860759493671, |
| "grad_norm": 0.6852184942000812, |
| "learning_rate": 3.483628192304278e-05, |
| "loss": 0.5453, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5529113924050633, |
| "grad_norm": 0.5464797013446153, |
| "learning_rate": 3.479815954525219e-05, |
| "loss": 0.552, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5569620253164556, |
| "grad_norm": 0.6644124623746328, |
| "learning_rate": 3.475991798511899e-05, |
| "loss": 0.5576, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.561012658227848, |
| "grad_norm": 0.6125871540632157, |
| "learning_rate": 3.4721557550635464e-05, |
| "loss": 0.5562, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5650632911392406, |
| "grad_norm": 0.5655663220881555, |
| "learning_rate": 3.468307855075128e-05, |
| "loss": 0.5518, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5691139240506329, |
| "grad_norm": 0.7080046950141624, |
| "learning_rate": 3.4644481295371005e-05, |
| "loss": 0.5482, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.5731645569620252, |
| "grad_norm": 0.5636614125512427, |
| "learning_rate": 3.460576609535163e-05, |
| "loss": 0.5761, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.5772151898734177, |
| "grad_norm": 0.6505951351223528, |
| "learning_rate": 3.456693326250006e-05, |
| "loss": 0.5652, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.5812658227848102, |
| "grad_norm": 0.5632854421530793, |
| "learning_rate": 3.452798310957058e-05, |
| "loss": 0.5714, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5853164556962025, |
| "grad_norm": 0.6697577422240389, |
| "learning_rate": 3.4488915950262386e-05, |
| "loss": 0.5611, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5893670886075948, |
| "grad_norm": 0.5411960908973954, |
| "learning_rate": 3.4449732099216985e-05, |
| "loss": 0.5616, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.5934177215189873, |
| "grad_norm": 0.6539927267453391, |
| "learning_rate": 3.441043187201574e-05, |
| "loss": 0.5842, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.5974683544303798, |
| "grad_norm": 0.5416185040352602, |
| "learning_rate": 3.437101558517728e-05, |
| "loss": 0.5781, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6015189873417721, |
| "grad_norm": 0.5199909055823728, |
| "learning_rate": 3.433148355615496e-05, |
| "loss": 0.5653, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6055696202531644, |
| "grad_norm": 0.5711196945061209, |
| "learning_rate": 3.4291836103334294e-05, |
| "loss": 0.5822, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.609620253164557, |
| "grad_norm": 0.6131783865438132, |
| "learning_rate": 3.425207354603043e-05, |
| "loss": 0.5729, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.6136708860759494, |
| "grad_norm": 0.5712811792776132, |
| "learning_rate": 3.421219620448553e-05, |
| "loss": 0.5703, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.6177215189873417, |
| "grad_norm": 0.6200828658856946, |
| "learning_rate": 3.417220439986623e-05, |
| "loss": 0.5658, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.621772151898734, |
| "grad_norm": 0.49778296588628745, |
| "learning_rate": 3.4132098454261024e-05, |
| "loss": 0.5618, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6258227848101265, |
| "grad_norm": 0.5657624910553918, |
| "learning_rate": 3.4091878690677676e-05, |
| "loss": 0.5815, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.629873417721519, |
| "grad_norm": 0.5878450855274067, |
| "learning_rate": 3.405154543304065e-05, |
| "loss": 0.5717, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6339240506329114, |
| "grad_norm": 0.5334431736292033, |
| "learning_rate": 3.401109900618843e-05, |
| "loss": 0.5678, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.6379746835443036, |
| "grad_norm": 0.584413094503241, |
| "learning_rate": 3.3970539735870996e-05, |
| "loss": 0.5521, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.6420253164556962, |
| "grad_norm": 0.5179200790990399, |
| "learning_rate": 3.392986794874714e-05, |
| "loss": 0.569, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6460759493670887, |
| "grad_norm": 0.5309456217745878, |
| "learning_rate": 3.388908397238184e-05, |
| "loss": 0.5603, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.650126582278481, |
| "grad_norm": 0.6699877782689999, |
| "learning_rate": 3.384818813524362e-05, |
| "loss": 0.5704, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6541772151898733, |
| "grad_norm": 0.5569351181884489, |
| "learning_rate": 3.380718076670195e-05, |
| "loss": 0.5879, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6582278481012658, |
| "grad_norm": 0.6013735766912347, |
| "learning_rate": 3.376606219702454e-05, |
| "loss": 0.568, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.6622784810126583, |
| "grad_norm": 0.5907195647270309, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.5699, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.6663291139240506, |
| "grad_norm": 0.5798999542704003, |
| "learning_rate": 3.368349277980861e-05, |
| "loss": 0.5619, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6703797468354429, |
| "grad_norm": 0.5603361495478024, |
| "learning_rate": 3.3642042597272844e-05, |
| "loss": 0.5546, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6744303797468354, |
| "grad_norm": 0.4980258423648179, |
| "learning_rate": 3.360048254360144e-05, |
| "loss": 0.5499, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.678481012658228, |
| "grad_norm": 0.5360984424878351, |
| "learning_rate": 3.355881295351336e-05, |
| "loss": 0.5585, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.6825316455696202, |
| "grad_norm": 0.5651102114619427, |
| "learning_rate": 3.351703416260975e-05, |
| "loss": 0.5785, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6865822784810125, |
| "grad_norm": 0.49093889408070185, |
| "learning_rate": 3.347514650737126e-05, |
| "loss": 0.5683, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.690632911392405, |
| "grad_norm": 0.603023516838559, |
| "learning_rate": 3.3433150325155295e-05, |
| "loss": 0.6025, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.6946835443037975, |
| "grad_norm": 0.5127548587831127, |
| "learning_rate": 3.339104595419334e-05, |
| "loss": 0.5451, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.6987341772151898, |
| "grad_norm": 0.5668363205935075, |
| "learning_rate": 3.3348833733588204e-05, |
| "loss": 0.5738, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.7027848101265821, |
| "grad_norm": 0.5465360912034068, |
| "learning_rate": 3.3306514003311305e-05, |
| "loss": 0.5799, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7068354430379746, |
| "grad_norm": 0.542363003707889, |
| "learning_rate": 3.326408710419996e-05, |
| "loss": 0.5655, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.7108860759493671, |
| "grad_norm": 0.59048689366975, |
| "learning_rate": 3.322155337795454e-05, |
| "loss": 0.5598, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7149367088607594, |
| "grad_norm": 0.5631754771276604, |
| "learning_rate": 3.317891316713587e-05, |
| "loss": 0.5742, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.7189873417721517, |
| "grad_norm": 0.5284358515604691, |
| "learning_rate": 3.313616681516231e-05, |
| "loss": 0.5623, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.7230379746835442, |
| "grad_norm": 0.6264832875549537, |
| "learning_rate": 3.309331466630713e-05, |
| "loss": 0.5693, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.7270886075949368, |
| "grad_norm": 0.5441883760270967, |
| "learning_rate": 3.305035706569563e-05, |
| "loss": 0.5731, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.7311392405063293, |
| "grad_norm": 0.5805645514103459, |
| "learning_rate": 3.3007294359302433e-05, |
| "loss": 0.5803, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7351898734177216, |
| "grad_norm": 0.5295494793312746, |
| "learning_rate": 3.296412689394864e-05, |
| "loss": 0.5758, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7392405063291139, |
| "grad_norm": 0.584098328263877, |
| "learning_rate": 3.292085501729909e-05, |
| "loss": 0.5843, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7432911392405064, |
| "grad_norm": 0.5889181866342664, |
| "learning_rate": 3.2877479077859534e-05, |
| "loss": 0.5704, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.747341772151899, |
| "grad_norm": 0.621605450580845, |
| "learning_rate": 3.283399942497381e-05, |
| "loss": 0.568, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.7513924050632912, |
| "grad_norm": 0.5793831299607144, |
| "learning_rate": 3.279041640882108e-05, |
| "loss": 0.5574, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7554430379746835, |
| "grad_norm": 0.5813361257052053, |
| "learning_rate": 3.2746730380412964e-05, |
| "loss": 0.5567, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.759493670886076, |
| "grad_norm": 0.6326583711008914, |
| "learning_rate": 3.2702941691590726e-05, |
| "loss": 0.5566, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7635443037974685, |
| "grad_norm": 0.5761251924716608, |
| "learning_rate": 3.265905069502244e-05, |
| "loss": 0.55, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7675949367088608, |
| "grad_norm": 0.5135174708949741, |
| "learning_rate": 3.261505774420016e-05, |
| "loss": 0.5579, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.771645569620253, |
| "grad_norm": 0.6993640341210392, |
| "learning_rate": 3.257096319343707e-05, |
| "loss": 0.5716, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7756962025316456, |
| "grad_norm": 0.604332692939568, |
| "learning_rate": 3.2526767397864614e-05, |
| "loss": 0.5701, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.7797468354430381, |
| "grad_norm": 0.5308453303224744, |
| "learning_rate": 3.248247071342966e-05, |
| "loss": 0.5652, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7837974683544304, |
| "grad_norm": 0.6220665741227448, |
| "learning_rate": 3.243807349689161e-05, |
| "loss": 0.5613, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7878481012658227, |
| "grad_norm": 0.5128332296581554, |
| "learning_rate": 3.2393576105819544e-05, |
| "loss": 0.5606, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.7918987341772152, |
| "grad_norm": 0.5215734426093811, |
| "learning_rate": 3.2348978898589333e-05, |
| "loss": 0.5699, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.7959493670886078, |
| "grad_norm": 0.5029939853844808, |
| "learning_rate": 3.230428223438075e-05, |
| "loss": 0.5475, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.5256021286621395, |
| "learning_rate": 3.225948647317459e-05, |
| "loss": 0.5606, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8040506329113923, |
| "grad_norm": 0.49297983052648237, |
| "learning_rate": 3.2214591975749745e-05, |
| "loss": 0.582, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8081012658227849, |
| "grad_norm": 0.533634014311471, |
| "learning_rate": 3.216959910368034e-05, |
| "loss": 0.5817, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.8121518987341774, |
| "grad_norm": 0.49422942930335784, |
| "learning_rate": 3.212450821933277e-05, |
| "loss": 0.5671, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.8162025316455697, |
| "grad_norm": 0.4778814080287163, |
| "learning_rate": 3.207931968586281e-05, |
| "loss": 0.5511, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.820253164556962, |
| "grad_norm": 0.4837307727328528, |
| "learning_rate": 3.203403386721272e-05, |
| "loss": 0.585, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8243037974683545, |
| "grad_norm": 0.5500334254216247, |
| "learning_rate": 3.1988651128108245e-05, |
| "loss": 0.5414, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.828354430379747, |
| "grad_norm": 0.5849289852654976, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.5834, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.8324050632911393, |
| "grad_norm": 0.6851441005524191, |
| "learning_rate": 3.189759635133914e-05, |
| "loss": 0.5687, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.8364556962025316, |
| "grad_norm": 0.5189215797336858, |
| "learning_rate": 3.185192504701718e-05, |
| "loss": 0.5835, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.840506329113924, |
| "grad_norm": 0.5417395855672873, |
| "learning_rate": 3.1806158288920234e-05, |
| "loss": 0.5636, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8445569620253166, |
| "grad_norm": 0.5375128932183207, |
| "learning_rate": 3.1760296445647477e-05, |
| "loss": 0.5909, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.848607594936709, |
| "grad_norm": 0.5234687199223308, |
| "learning_rate": 3.1714339886563896e-05, |
| "loss": 0.5589, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8526582278481012, |
| "grad_norm": 0.5492798148376636, |
| "learning_rate": 3.166828898179731e-05, |
| "loss": 0.5801, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.8567088607594937, |
| "grad_norm": 0.5216064665460479, |
| "learning_rate": 3.162214410223536e-05, |
| "loss": 0.5755, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.8607594936708862, |
| "grad_norm": 0.5595995263539512, |
| "learning_rate": 3.157590561952257e-05, |
| "loss": 0.568, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8648101265822785, |
| "grad_norm": 0.5787068210236129, |
| "learning_rate": 3.152957390605732e-05, |
| "loss": 0.5733, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8688607594936708, |
| "grad_norm": 0.5156361442059805, |
| "learning_rate": 3.148314933498886e-05, |
| "loss": 0.5774, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.8729113924050633, |
| "grad_norm": 0.5517322274176978, |
| "learning_rate": 3.143663228021431e-05, |
| "loss": 0.5573, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8769620253164558, |
| "grad_norm": 0.5116958522428543, |
| "learning_rate": 3.1390023116375624e-05, |
| "loss": 0.5793, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.8810126582278481, |
| "grad_norm": 0.4374843553859036, |
| "learning_rate": 3.134332221885661e-05, |
| "loss": 0.5606, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.8850632911392404, |
| "grad_norm": 0.5107860606830668, |
| "learning_rate": 3.129652996377987e-05, |
| "loss": 0.5664, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.889113924050633, |
| "grad_norm": 0.5517011075394017, |
| "learning_rate": 3.12496467280038e-05, |
| "loss": 0.5416, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.8931645569620255, |
| "grad_norm": 0.4938613418874763, |
| "learning_rate": 3.120267288911952e-05, |
| "loss": 0.5786, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.8972151898734178, |
| "grad_norm": 0.6002097621485626, |
| "learning_rate": 3.11556088254479e-05, |
| "loss": 0.5612, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.90126582278481, |
| "grad_norm": 0.4668511065361712, |
| "learning_rate": 3.11084549160364e-05, |
| "loss": 0.539, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9053164556962026, |
| "grad_norm": 0.5446809002765682, |
| "learning_rate": 3.106121154065615e-05, |
| "loss": 0.5441, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.909367088607595, |
| "grad_norm": 0.513871314037061, |
| "learning_rate": 3.1013879079798805e-05, |
| "loss": 0.5623, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.9134177215189874, |
| "grad_norm": 0.6309549073142006, |
| "learning_rate": 3.096645791467348e-05, |
| "loss": 0.5892, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9174683544303797, |
| "grad_norm": 0.5251738497408172, |
| "learning_rate": 3.091894842720373e-05, |
| "loss": 0.5598, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.9215189873417722, |
| "grad_norm": 0.5356732350823828, |
| "learning_rate": 3.0871351000024425e-05, |
| "loss": 0.5913, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9255696202531647, |
| "grad_norm": 0.49957411875354996, |
| "learning_rate": 3.0823666016478716e-05, |
| "loss": 0.5617, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.929620253164557, |
| "grad_norm": 0.5053236582967193, |
| "learning_rate": 3.0775893860614896e-05, |
| "loss": 0.5813, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.9336708860759493, |
| "grad_norm": 0.5376632808172528, |
| "learning_rate": 3.0728034917183336e-05, |
| "loss": 0.5711, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9377215189873418, |
| "grad_norm": 0.5264738873562244, |
| "learning_rate": 3.06800895716334e-05, |
| "loss": 0.5604, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.9417721518987343, |
| "grad_norm": 0.5058250577427734, |
| "learning_rate": 3.063205821011029e-05, |
| "loss": 0.5622, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.9458227848101266, |
| "grad_norm": 0.5619328434134006, |
| "learning_rate": 3.0583941219452016e-05, |
| "loss": 0.5734, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.949873417721519, |
| "grad_norm": 0.504117507554053, |
| "learning_rate": 3.053573898718618e-05, |
| "loss": 0.5578, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9539240506329114, |
| "grad_norm": 0.5344232205022844, |
| "learning_rate": 3.0487451901526956e-05, |
| "loss": 0.5444, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.957974683544304, |
| "grad_norm": 0.542897169249038, |
| "learning_rate": 3.0439080351371875e-05, |
| "loss": 0.5779, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9620253164556962, |
| "grad_norm": 0.5446527187364558, |
| "learning_rate": 3.0390624726298764e-05, |
| "loss": 0.5646, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9660759493670885, |
| "grad_norm": 0.6563590681895782, |
| "learning_rate": 3.034208541656255e-05, |
| "loss": 0.5495, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.970126582278481, |
| "grad_norm": 0.458377203466753, |
| "learning_rate": 3.029346281309218e-05, |
| "loss": 0.5748, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9741772151898735, |
| "grad_norm": 0.589142467063088, |
| "learning_rate": 3.0244757307487415e-05, |
| "loss": 0.5517, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9782278481012658, |
| "grad_norm": 0.5047439076367575, |
| "learning_rate": 3.019596929201569e-05, |
| "loss": 0.5683, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9822784810126581, |
| "grad_norm": 0.5219554397319108, |
| "learning_rate": 3.0147099159608985e-05, |
| "loss": 0.5938, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9863291139240506, |
| "grad_norm": 0.5610614696756229, |
| "learning_rate": 3.0098147303860616e-05, |
| "loss": 0.5584, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.9903797468354432, |
| "grad_norm": 0.4768747817360498, |
| "learning_rate": 3.0049114119022117e-05, |
| "loss": 0.5832, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.9944303797468355, |
| "grad_norm": 0.5524499227272227, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.5619, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.9984810126582278, |
| "grad_norm": 0.48976163475619316, |
| "learning_rate": 2.995080534235264e-05, |
| "loss": 0.5459, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.0040506329113925, |
| "grad_norm": 0.9637096513249046, |
| "learning_rate": 2.9901530542287044e-05, |
| "loss": 0.3982, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.008101265822785, |
| "grad_norm": 0.6784088925714888, |
| "learning_rate": 2.9852175996655676e-05, |
| "loss": 0.3797, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.012151898734177, |
| "grad_norm": 1.1211284469671667, |
| "learning_rate": 2.980274210295326e-05, |
| "loss": 0.3808, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.0162025316455696, |
| "grad_norm": 0.7341042896773003, |
| "learning_rate": 2.9753229259313578e-05, |
| "loss": 0.372, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.020253164556962, |
| "grad_norm": 0.6250716646523884, |
| "learning_rate": 2.9703637864506274e-05, |
| "loss": 0.3993, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.0243037974683546, |
| "grad_norm": 0.7041332087934312, |
| "learning_rate": 2.965396831793362e-05, |
| "loss": 0.3668, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.0283544303797467, |
| "grad_norm": 0.5374319045681779, |
| "learning_rate": 2.9604221019627316e-05, |
| "loss": 0.3778, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.0324050632911392, |
| "grad_norm": 0.6747468633166239, |
| "learning_rate": 2.955439637024526e-05, |
| "loss": 0.3694, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.0364556962025318, |
| "grad_norm": 0.5796378706655496, |
| "learning_rate": 2.9504494771068334e-05, |
| "loss": 0.3716, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.0405063291139243, |
| "grad_norm": 0.7169252121484504, |
| "learning_rate": 2.9454516623997156e-05, |
| "loss": 0.387, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0445569620253163, |
| "grad_norm": 0.5923455179637341, |
| "learning_rate": 2.9404462331548847e-05, |
| "loss": 0.3712, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.048607594936709, |
| "grad_norm": 0.7191855197186855, |
| "learning_rate": 2.93543322968538e-05, |
| "loss": 0.383, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.0526582278481014, |
| "grad_norm": 0.561015234512655, |
| "learning_rate": 2.9304126923652428e-05, |
| "loss": 0.364, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.056708860759494, |
| "grad_norm": 0.6218243233014099, |
| "learning_rate": 2.9253846616291896e-05, |
| "loss": 0.3915, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.060759493670886, |
| "grad_norm": 0.5661357886088846, |
| "learning_rate": 2.9203491779722896e-05, |
| "loss": 0.3814, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.0648101265822785, |
| "grad_norm": 0.6613016694129545, |
| "learning_rate": 2.9153062819496357e-05, |
| "loss": 0.3741, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.068860759493671, |
| "grad_norm": 0.5092402990824846, |
| "learning_rate": 2.9102560141760178e-05, |
| "loss": 0.3743, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.0729113924050635, |
| "grad_norm": 0.643867696108929, |
| "learning_rate": 2.9051984153256004e-05, |
| "loss": 0.3701, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.0769620253164556, |
| "grad_norm": 0.5821910795154506, |
| "learning_rate": 2.900133526131588e-05, |
| "loss": 0.3919, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.081012658227848, |
| "grad_norm": 0.5604263008463369, |
| "learning_rate": 2.8950613873859025e-05, |
| "loss": 0.3629, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.0850632911392406, |
| "grad_norm": 0.556627941515685, |
| "learning_rate": 2.8899820399388515e-05, |
| "loss": 0.3686, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.089113924050633, |
| "grad_norm": 0.5487303276027243, |
| "learning_rate": 2.8848955246988012e-05, |
| "loss": 0.3788, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.093164556962025, |
| "grad_norm": 0.5531435327878613, |
| "learning_rate": 2.879801882631847e-05, |
| "loss": 0.3646, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.0972151898734177, |
| "grad_norm": 0.5119487618591579, |
| "learning_rate": 2.8747011547614808e-05, |
| "loss": 0.3601, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.1012658227848102, |
| "grad_norm": 0.47385108061587916, |
| "learning_rate": 2.8695933821682635e-05, |
| "loss": 0.3697, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.1053164556962027, |
| "grad_norm": 0.46636033552064693, |
| "learning_rate": 2.864478605989494e-05, |
| "loss": 0.395, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.109367088607595, |
| "grad_norm": 0.5164568963586864, |
| "learning_rate": 2.8593568674188765e-05, |
| "loss": 0.3723, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.1134177215189873, |
| "grad_norm": 0.4632398487049904, |
| "learning_rate": 2.8542282077061892e-05, |
| "loss": 0.3476, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.11746835443038, |
| "grad_norm": 0.5414993587489378, |
| "learning_rate": 2.8490926681569523e-05, |
| "loss": 0.3882, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.1215189873417724, |
| "grad_norm": 0.5106417529490032, |
| "learning_rate": 2.8439502901320956e-05, |
| "loss": 0.3526, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.1255696202531644, |
| "grad_norm": 0.4935023766001845, |
| "learning_rate": 2.8388011150476237e-05, |
| "loss": 0.4003, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.129620253164557, |
| "grad_norm": 0.4962737363109778, |
| "learning_rate": 2.8336451843742866e-05, |
| "loss": 0.3764, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.1336708860759495, |
| "grad_norm": 0.5215470140009274, |
| "learning_rate": 2.8284825396372387e-05, |
| "loss": 0.3657, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.137721518987342, |
| "grad_norm": 0.5091065918734566, |
| "learning_rate": 2.8233132224157132e-05, |
| "loss": 0.3809, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.141772151898734, |
| "grad_norm": 0.5116740243363739, |
| "learning_rate": 2.8181372743426805e-05, |
| "loss": 0.3745, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.1458227848101266, |
| "grad_norm": 0.5809853542581201, |
| "learning_rate": 2.8129547371045128e-05, |
| "loss": 0.3774, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.149873417721519, |
| "grad_norm": 0.5083997052542013, |
| "learning_rate": 2.8077656524406534e-05, |
| "loss": 0.3717, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.1539240506329116, |
| "grad_norm": 0.5684361811395124, |
| "learning_rate": 2.802570062143278e-05, |
| "loss": 0.3907, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.1579746835443037, |
| "grad_norm": 0.49159889350931424, |
| "learning_rate": 2.7973680080569555e-05, |
| "loss": 0.3714, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.162025316455696, |
| "grad_norm": 0.516541629153083, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.361, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.1660759493670887, |
| "grad_norm": 0.5264248985280915, |
| "learning_rate": 2.7869446761557033e-05, |
| "loss": 0.3501, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.170126582278481, |
| "grad_norm": 0.4896135743972396, |
| "learning_rate": 2.781723482288857e-05, |
| "loss": 0.3916, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1741772151898733, |
| "grad_norm": 0.4948607532820462, |
| "learning_rate": 2.7764959925285517e-05, |
| "loss": 0.3651, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.178227848101266, |
| "grad_norm": 0.5300389840191382, |
| "learning_rate": 2.771262248976272e-05, |
| "loss": 0.3908, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.1822784810126583, |
| "grad_norm": 0.5422498632155157, |
| "learning_rate": 2.7660222937838677e-05, |
| "loss": 0.3878, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.186329113924051, |
| "grad_norm": 0.4859533659199391, |
| "learning_rate": 2.7607761691532186e-05, |
| "loss": 0.3828, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.190379746835443, |
| "grad_norm": 0.5180874988994738, |
| "learning_rate": 2.7555239173358916e-05, |
| "loss": 0.3525, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.1944303797468354, |
| "grad_norm": 0.5600417286299402, |
| "learning_rate": 2.7502655806328e-05, |
| "loss": 0.37, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.198481012658228, |
| "grad_norm": 0.4786697663526683, |
| "learning_rate": 2.7450012013938648e-05, |
| "loss": 0.3691, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.2025316455696204, |
| "grad_norm": 0.5855520748672085, |
| "learning_rate": 2.739730822017673e-05, |
| "loss": 0.3869, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2065822784810125, |
| "grad_norm": 0.47973043035897395, |
| "learning_rate": 2.7344544849511355e-05, |
| "loss": 0.3703, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.210632911392405, |
| "grad_norm": 0.4830445826890563, |
| "learning_rate": 2.7291722326891456e-05, |
| "loss": 0.3909, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.2146835443037975, |
| "grad_norm": 0.4944475263150758, |
| "learning_rate": 2.723884107774236e-05, |
| "loss": 0.382, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.21873417721519, |
| "grad_norm": 0.494549166298531, |
| "learning_rate": 2.718590152796239e-05, |
| "loss": 0.3869, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.222784810126582, |
| "grad_norm": 0.48317548669490096, |
| "learning_rate": 2.71329041039194e-05, |
| "loss": 0.3779, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.2268354430379746, |
| "grad_norm": 0.5137299790307739, |
| "learning_rate": 2.7079849232447357e-05, |
| "loss": 0.376, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.230886075949367, |
| "grad_norm": 0.5263533697113645, |
| "learning_rate": 2.7026737340842895e-05, |
| "loss": 0.3876, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.2349367088607597, |
| "grad_norm": 0.5080565428094569, |
| "learning_rate": 2.697356885686189e-05, |
| "loss": 0.3794, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.2389873417721518, |
| "grad_norm": 0.5209845321040182, |
| "learning_rate": 2.6920344208716014e-05, |
| "loss": 0.3779, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.2430379746835443, |
| "grad_norm": 0.5006422796540798, |
| "learning_rate": 2.6867063825069252e-05, |
| "loss": 0.3936, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.247088607594937, |
| "grad_norm": 0.4871508684162012, |
| "learning_rate": 2.6813728135034494e-05, |
| "loss": 0.3809, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.2511392405063293, |
| "grad_norm": 0.5048571381659989, |
| "learning_rate": 2.6760337568170056e-05, |
| "loss": 0.3563, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.2551898734177214, |
| "grad_norm": 0.5045764775256539, |
| "learning_rate": 2.6706892554476226e-05, |
| "loss": 0.3659, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.259240506329114, |
| "grad_norm": 0.5489574168120245, |
| "learning_rate": 2.6653393524391795e-05, |
| "loss": 0.3799, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.2632911392405064, |
| "grad_norm": 0.5264355041290949, |
| "learning_rate": 2.6599840908790592e-05, |
| "loss": 0.3626, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.267341772151899, |
| "grad_norm": 0.49818156806871744, |
| "learning_rate": 2.6546235138978028e-05, |
| "loss": 0.3747, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.271392405063291, |
| "grad_norm": 0.4846642635688406, |
| "learning_rate": 2.6492576646687597e-05, |
| "loss": 0.373, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.2754430379746835, |
| "grad_norm": 0.4933413311407337, |
| "learning_rate": 2.6438865864077425e-05, |
| "loss": 0.3801, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.279493670886076, |
| "grad_norm": 0.48614236219612356, |
| "learning_rate": 2.6385103223726766e-05, |
| "loss": 0.3783, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.2835443037974685, |
| "grad_norm": 0.4626689981244063, |
| "learning_rate": 2.6331289158632537e-05, |
| "loss": 0.367, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2875949367088606, |
| "grad_norm": 0.49472063956912343, |
| "learning_rate": 2.6277424102205817e-05, |
| "loss": 0.3874, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.291645569620253, |
| "grad_norm": 0.49891347391466423, |
| "learning_rate": 2.6223508488268374e-05, |
| "loss": 0.3872, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.2956962025316456, |
| "grad_norm": 0.49162064191563726, |
| "learning_rate": 2.6169542751049148e-05, |
| "loss": 0.3763, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.299746835443038, |
| "grad_norm": 0.535817745463568, |
| "learning_rate": 2.6115527325180754e-05, |
| "loss": 0.3786, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.3037974683544302, |
| "grad_norm": 0.48363116707760806, |
| "learning_rate": 2.606146264569603e-05, |
| "loss": 0.3935, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3078481012658227, |
| "grad_norm": 0.46621437282626493, |
| "learning_rate": 2.6007349148024447e-05, |
| "loss": 0.3711, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.3118987341772153, |
| "grad_norm": 0.45366947556239157, |
| "learning_rate": 2.5953187267988694e-05, |
| "loss": 0.3506, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.3159493670886078, |
| "grad_norm": 0.4982295832820097, |
| "learning_rate": 2.5898977441801097e-05, |
| "loss": 0.369, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.32, |
| "grad_norm": 0.505021500343266, |
| "learning_rate": 2.584472010606015e-05, |
| "loss": 0.3549, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.3240506329113924, |
| "grad_norm": 0.5067020239638199, |
| "learning_rate": 2.5790415697746976e-05, |
| "loss": 0.4017, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.328101265822785, |
| "grad_norm": 0.48577969318615377, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.3692, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.3321518987341774, |
| "grad_norm": 0.4993138735138936, |
| "learning_rate": 2.568166741322048e-05, |
| "loss": 0.4023, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.3362025316455695, |
| "grad_norm": 0.4713858767516867, |
| "learning_rate": 2.56272244128509e-05, |
| "loss": 0.3559, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.340253164556962, |
| "grad_norm": 0.49611353900642297, |
| "learning_rate": 2.55727360915895e-05, |
| "loss": 0.3783, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.3443037974683545, |
| "grad_norm": 0.4926070009004814, |
| "learning_rate": 2.5518202888277734e-05, |
| "loss": 0.3695, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.348354430379747, |
| "grad_norm": 0.5127146635260782, |
| "learning_rate": 2.5463625242118523e-05, |
| "loss": 0.3701, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.352405063291139, |
| "grad_norm": 0.439900602010005, |
| "learning_rate": 2.5409003592672723e-05, |
| "loss": 0.381, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.3564556962025316, |
| "grad_norm": 0.46014092848022314, |
| "learning_rate": 2.535433837985559e-05, |
| "loss": 0.3647, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.360506329113924, |
| "grad_norm": 0.5006932385954939, |
| "learning_rate": 2.529963004393324e-05, |
| "loss": 0.3922, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.3645569620253166, |
| "grad_norm": 0.45555776102365086, |
| "learning_rate": 2.524487902551908e-05, |
| "loss": 0.3744, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.3686075949367087, |
| "grad_norm": 0.4802799710743835, |
| "learning_rate": 2.519008576557029e-05, |
| "loss": 0.3785, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.372658227848101, |
| "grad_norm": 0.4399812834075029, |
| "learning_rate": 2.5135250705384254e-05, |
| "loss": 0.3731, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.3767088607594937, |
| "grad_norm": 0.5280483125931545, |
| "learning_rate": 2.5080374286595007e-05, |
| "loss": 0.3866, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.3807594936708862, |
| "grad_norm": 0.452866322187474, |
| "learning_rate": 2.5025456951169677e-05, |
| "loss": 0.3832, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.3848101265822783, |
| "grad_norm": 0.471727964356464, |
| "learning_rate": 2.4970499141404942e-05, |
| "loss": 0.3771, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.388860759493671, |
| "grad_norm": 0.45484382439749665, |
| "learning_rate": 2.491550129992345e-05, |
| "loss": 0.4068, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.3929113924050633, |
| "grad_norm": 0.45587172002295046, |
| "learning_rate": 2.486046386967024e-05, |
| "loss": 0.3889, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.396962025316456, |
| "grad_norm": 0.43428892518840406, |
| "learning_rate": 2.4805387293909214e-05, |
| "loss": 0.3704, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.401012658227848, |
| "grad_norm": 0.4792483230198426, |
| "learning_rate": 2.4750272016219552e-05, |
| "loss": 0.3912, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.4050632911392404, |
| "grad_norm": 0.4498413816011751, |
| "learning_rate": 2.4695118480492114e-05, |
| "loss": 0.3599, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.409113924050633, |
| "grad_norm": 0.45810226932109926, |
| "learning_rate": 2.4639927130925898e-05, |
| "loss": 0.3824, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.4131645569620255, |
| "grad_norm": 0.47665839149519085, |
| "learning_rate": 2.458469841202444e-05, |
| "loss": 0.4088, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4172151898734175, |
| "grad_norm": 0.43950963113618496, |
| "learning_rate": 2.452943276859226e-05, |
| "loss": 0.3813, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.42126582278481, |
| "grad_norm": 0.459589931549135, |
| "learning_rate": 2.447413064573125e-05, |
| "loss": 0.3941, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.4253164556962026, |
| "grad_norm": 0.44990611396238034, |
| "learning_rate": 2.4418792488837095e-05, |
| "loss": 0.3817, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.429367088607595, |
| "grad_norm": 0.4866485269724401, |
| "learning_rate": 2.4363418743595713e-05, |
| "loss": 0.3885, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.433417721518987, |
| "grad_norm": 0.4588947990038194, |
| "learning_rate": 2.430800985597963e-05, |
| "loss": 0.3828, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4374683544303797, |
| "grad_norm": 0.4861321898761674, |
| "learning_rate": 2.4252566272244415e-05, |
| "loss": 0.3895, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.441518987341772, |
| "grad_norm": 0.46872877858564904, |
| "learning_rate": 2.4197088438925063e-05, |
| "loss": 0.3659, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.4455696202531647, |
| "grad_norm": 0.4639506113241653, |
| "learning_rate": 2.4141576802832417e-05, |
| "loss": 0.3833, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.449620253164557, |
| "grad_norm": 0.45464259671732216, |
| "learning_rate": 2.408603181104957e-05, |
| "loss": 0.3921, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4536708860759493, |
| "grad_norm": 0.43674674600928076, |
| "learning_rate": 2.4030453910928245e-05, |
| "loss": 0.3794, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.457721518987342, |
| "grad_norm": 0.46267841396224724, |
| "learning_rate": 2.397484355008521e-05, |
| "loss": 0.392, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.4617721518987343, |
| "grad_norm": 0.44731055538160264, |
| "learning_rate": 2.3919201176398662e-05, |
| "loss": 0.3631, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4658227848101264, |
| "grad_norm": 0.4412543689825874, |
| "learning_rate": 2.3863527238004633e-05, |
| "loss": 0.3782, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.469873417721519, |
| "grad_norm": 0.42747364308054003, |
| "learning_rate": 2.380782218329337e-05, |
| "loss": 0.3492, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.4739240506329114, |
| "grad_norm": 0.4882090774966562, |
| "learning_rate": 2.3752086460905725e-05, |
| "loss": 0.4076, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.477974683544304, |
| "grad_norm": 0.44100834795277016, |
| "learning_rate": 2.3696320519729544e-05, |
| "loss": 0.3721, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.482025316455696, |
| "grad_norm": 0.4182795722919671, |
| "learning_rate": 2.3640524808896045e-05, |
| "loss": 0.4076, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4860759493670885, |
| "grad_norm": 0.44229573543816914, |
| "learning_rate": 2.3584699777776222e-05, |
| "loss": 0.3642, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.490126582278481, |
| "grad_norm": 0.4539214140597022, |
| "learning_rate": 2.3528845875977195e-05, |
| "loss": 0.3598, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.4941772151898736, |
| "grad_norm": 0.4505472289539449, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.373, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.4982278481012656, |
| "grad_norm": 0.4459279184920543, |
| "learning_rate": 2.341705325992901e-05, |
| "loss": 0.3813, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.502278481012658, |
| "grad_norm": 0.43955258142756876, |
| "learning_rate": 2.336111544604222e-05, |
| "loss": 0.3789, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.5063291139240507, |
| "grad_norm": 0.48273036512534023, |
| "learning_rate": 2.33051505621937e-05, |
| "loss": 0.3764, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.510379746835443, |
| "grad_norm": 0.4623190171915114, |
| "learning_rate": 2.324915905911693e-05, |
| "loss": 0.3959, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.5144303797468357, |
| "grad_norm": 0.4297725872102259, |
| "learning_rate": 2.319314138775977e-05, |
| "loss": 0.3804, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.5184810126582278, |
| "grad_norm": 0.47915984824744884, |
| "learning_rate": 2.3137097999280856e-05, |
| "loss": 0.3703, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.5225316455696203, |
| "grad_norm": 0.4481147850764211, |
| "learning_rate": 2.308102934504593e-05, |
| "loss": 0.3914, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.526582278481013, |
| "grad_norm": 0.4445224062951568, |
| "learning_rate": 2.3024935876624222e-05, |
| "loss": 0.3968, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.530632911392405, |
| "grad_norm": 0.44106474960715497, |
| "learning_rate": 2.2968818045784813e-05, |
| "loss": 0.3759, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5346835443037974, |
| "grad_norm": 0.4496087602520478, |
| "learning_rate": 2.2912676304493006e-05, |
| "loss": 0.3874, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.53873417721519, |
| "grad_norm": 0.4169412566432786, |
| "learning_rate": 2.2856511104906668e-05, |
| "loss": 0.3838, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.5427848101265824, |
| "grad_norm": 0.4393683746294493, |
| "learning_rate": 2.2800322899372586e-05, |
| "loss": 0.3985, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.546835443037975, |
| "grad_norm": 0.4536234731271243, |
| "learning_rate": 2.2744112140422844e-05, |
| "loss": 0.3575, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.550886075949367, |
| "grad_norm": 0.43249724371159254, |
| "learning_rate": 2.2687879280771177e-05, |
| "loss": 0.3803, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.5549367088607595, |
| "grad_norm": 0.4455456224923025, |
| "learning_rate": 2.26316247733093e-05, |
| "loss": 0.3343, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.558987341772152, |
| "grad_norm": 0.48693455029830657, |
| "learning_rate": 2.257534907110328e-05, |
| "loss": 0.3768, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.563037974683544, |
| "grad_norm": 0.4737319170337223, |
| "learning_rate": 2.2519052627389882e-05, |
| "loss": 0.3546, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.5670886075949366, |
| "grad_norm": 0.43844251345570706, |
| "learning_rate": 2.246273589557294e-05, |
| "loss": 0.4066, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.571139240506329, |
| "grad_norm": 0.43387836754508985, |
| "learning_rate": 2.240639932921966e-05, |
| "loss": 0.3778, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.5751898734177217, |
| "grad_norm": 0.45478159541473934, |
| "learning_rate": 2.2350043382056995e-05, |
| "loss": 0.3742, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.579240506329114, |
| "grad_norm": 0.429227575536342, |
| "learning_rate": 2.2293668507968015e-05, |
| "loss": 0.3671, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.5832911392405062, |
| "grad_norm": 0.44019252824460353, |
| "learning_rate": 2.2237275160988186e-05, |
| "loss": 0.3795, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5873417721518988, |
| "grad_norm": 0.4307945280172593, |
| "learning_rate": 2.2180863795301787e-05, |
| "loss": 0.3873, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.5913924050632913, |
| "grad_norm": 0.4334408935524527, |
| "learning_rate": 2.212443486523819e-05, |
| "loss": 0.4102, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.5954430379746833, |
| "grad_norm": 0.43041001926827577, |
| "learning_rate": 2.2067988825268243e-05, |
| "loss": 0.3908, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.599493670886076, |
| "grad_norm": 0.4465353167225165, |
| "learning_rate": 2.2011526130000596e-05, |
| "loss": 0.3861, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.6035443037974684, |
| "grad_norm": 0.4610918424282069, |
| "learning_rate": 2.1955047234178038e-05, |
| "loss": 0.3835, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.607594936708861, |
| "grad_norm": 0.45135454029604377, |
| "learning_rate": 2.1898552592673825e-05, |
| "loss": 0.3516, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.6116455696202534, |
| "grad_norm": 0.46353046285071625, |
| "learning_rate": 2.184204266048803e-05, |
| "loss": 0.3878, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.6156962025316455, |
| "grad_norm": 0.4962048029254238, |
| "learning_rate": 2.1785517892743887e-05, |
| "loss": 0.3785, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.619746835443038, |
| "grad_norm": 0.43484519494615126, |
| "learning_rate": 2.17289787446841e-05, |
| "loss": 0.3813, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.6237974683544305, |
| "grad_norm": 0.4318658787472834, |
| "learning_rate": 2.1672425671667198e-05, |
| "loss": 0.3803, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.6278481012658226, |
| "grad_norm": 0.4525706830681805, |
| "learning_rate": 2.161585912916385e-05, |
| "loss": 0.3906, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.631898734177215, |
| "grad_norm": 0.4870941088441526, |
| "learning_rate": 2.1559279572753214e-05, |
| "loss": 0.3563, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.6359493670886076, |
| "grad_norm": 0.4431208612601278, |
| "learning_rate": 2.1502687458119268e-05, |
| "loss": 0.3607, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.64, |
| "grad_norm": 0.44811316994325123, |
| "learning_rate": 2.1446083241047116e-05, |
| "loss": 0.394, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.6440506329113926, |
| "grad_norm": 0.43132295851062724, |
| "learning_rate": 2.1389467377419333e-05, |
| "loss": 0.3727, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6481012658227847, |
| "grad_norm": 0.4180888264034889, |
| "learning_rate": 2.133284032321232e-05, |
| "loss": 0.3649, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.6521518987341772, |
| "grad_norm": 0.44388053368540653, |
| "learning_rate": 2.1276202534492566e-05, |
| "loss": 0.3828, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6562025316455697, |
| "grad_norm": 0.4189860602301019, |
| "learning_rate": 2.121955446741306e-05, |
| "loss": 0.3823, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.660253164556962, |
| "grad_norm": 0.41351649179546596, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.3647, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.6643037974683543, |
| "grad_norm": 0.43787112635667697, |
| "learning_rate": 2.1106229323196813e-05, |
| "loss": 0.3754, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.668354430379747, |
| "grad_norm": 0.42432578579784197, |
| "learning_rate": 2.1049553158765214e-05, |
| "loss": 0.3815, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.6724050632911394, |
| "grad_norm": 0.4160717414871724, |
| "learning_rate": 2.0992868541376764e-05, |
| "loss": 0.3784, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.676455696202532, |
| "grad_norm": 0.4337575403060488, |
| "learning_rate": 2.093617592756158e-05, |
| "loss": 0.3848, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.680506329113924, |
| "grad_norm": 0.4516733768234508, |
| "learning_rate": 2.0879475773914167e-05, |
| "loss": 0.3745, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.6845569620253165, |
| "grad_norm": 0.4137848502900846, |
| "learning_rate": 2.082276853708978e-05, |
| "loss": 0.3717, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.688607594936709, |
| "grad_norm": 0.44073957150031523, |
| "learning_rate": 2.076605467380071e-05, |
| "loss": 0.3954, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.692658227848101, |
| "grad_norm": 0.4237346187227689, |
| "learning_rate": 2.0709334640812613e-05, |
| "loss": 0.3879, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.6967088607594936, |
| "grad_norm": 0.43325847194937095, |
| "learning_rate": 2.0652608894940824e-05, |
| "loss": 0.3723, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.700759493670886, |
| "grad_norm": 0.4506545001455313, |
| "learning_rate": 2.0595877893046722e-05, |
| "loss": 0.4073, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.7048101265822786, |
| "grad_norm": 0.4347006099588823, |
| "learning_rate": 2.0539142092033985e-05, |
| "loss": 0.3751, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.708860759493671, |
| "grad_norm": 0.4345505164884983, |
| "learning_rate": 2.048240194884496e-05, |
| "loss": 0.3805, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.712911392405063, |
| "grad_norm": 0.4452345094901938, |
| "learning_rate": 2.042565792045695e-05, |
| "loss": 0.3733, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.7169620253164557, |
| "grad_norm": 0.4175318294031625, |
| "learning_rate": 2.036891046387857e-05, |
| "loss": 0.3744, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.721012658227848, |
| "grad_norm": 0.45481399860615157, |
| "learning_rate": 2.0312160036146036e-05, |
| "loss": 0.3667, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.7250632911392403, |
| "grad_norm": 0.4505795724352439, |
| "learning_rate": 2.025540709431948e-05, |
| "loss": 0.3747, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.729113924050633, |
| "grad_norm": 0.47412129027342137, |
| "learning_rate": 2.0198652095479298e-05, |
| "loss": 0.3912, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.7331645569620253, |
| "grad_norm": 0.4350052243907834, |
| "learning_rate": 2.014189549672245e-05, |
| "loss": 0.3907, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.737215189873418, |
| "grad_norm": 0.4151741748886296, |
| "learning_rate": 2.0085137755158776e-05, |
| "loss": 0.3729, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.7412658227848103, |
| "grad_norm": 0.4678484830651789, |
| "learning_rate": 2.0028379327907327e-05, |
| "loss": 0.3851, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.7453164556962024, |
| "grad_norm": 0.41427139376165145, |
| "learning_rate": 1.9971620672092676e-05, |
| "loss": 0.3894, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.749367088607595, |
| "grad_norm": 0.455668902633862, |
| "learning_rate": 1.991486224484123e-05, |
| "loss": 0.3765, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.7534177215189874, |
| "grad_norm": 0.4536903122836467, |
| "learning_rate": 1.985810450327756e-05, |
| "loss": 0.3776, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.7574683544303795, |
| "grad_norm": 0.42428346364618863, |
| "learning_rate": 1.9801347904520706e-05, |
| "loss": 0.3532, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.761518987341772, |
| "grad_norm": 0.4882832960072155, |
| "learning_rate": 1.974459290568053e-05, |
| "loss": 0.3896, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.7655696202531646, |
| "grad_norm": 0.42473995627750316, |
| "learning_rate": 1.968783996385397e-05, |
| "loss": 0.3573, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.769620253164557, |
| "grad_norm": 0.444775989082852, |
| "learning_rate": 1.963108953612143e-05, |
| "loss": 0.3733, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.7736708860759496, |
| "grad_norm": 0.43376639236258485, |
| "learning_rate": 1.9574342079543056e-05, |
| "loss": 0.3872, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.7777215189873417, |
| "grad_norm": 0.4309873891508825, |
| "learning_rate": 1.9517598051155046e-05, |
| "loss": 0.401, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.781772151898734, |
| "grad_norm": 0.43060396458198724, |
| "learning_rate": 1.9460857907966025e-05, |
| "loss": 0.3729, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.7858227848101267, |
| "grad_norm": 0.4074422641695221, |
| "learning_rate": 1.9404122106953285e-05, |
| "loss": 0.4042, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.7898734177215188, |
| "grad_norm": 0.4622599057492962, |
| "learning_rate": 1.9347391105059176e-05, |
| "loss": 0.3548, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.7939240506329113, |
| "grad_norm": 0.4432671873312026, |
| "learning_rate": 1.92906653591874e-05, |
| "loss": 0.3905, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.797974683544304, |
| "grad_norm": 0.44388161350095556, |
| "learning_rate": 1.9233945326199295e-05, |
| "loss": 0.4002, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.8020253164556963, |
| "grad_norm": 0.42372986857950207, |
| "learning_rate": 1.917723146291022e-05, |
| "loss": 0.3845, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.806075949367089, |
| "grad_norm": 0.4531028269740001, |
| "learning_rate": 1.912052422608584e-05, |
| "loss": 0.3663, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.810126582278481, |
| "grad_norm": 0.46479959256672476, |
| "learning_rate": 1.9063824072438428e-05, |
| "loss": 0.3854, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8141772151898734, |
| "grad_norm": 0.4458617973886558, |
| "learning_rate": 1.9007131458623246e-05, |
| "loss": 0.3689, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.818227848101266, |
| "grad_norm": 0.4258610489316151, |
| "learning_rate": 1.895044684123479e-05, |
| "loss": 0.3773, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.822278481012658, |
| "grad_norm": 0.4215890754563039, |
| "learning_rate": 1.8893770676803194e-05, |
| "loss": 0.3795, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.8263291139240505, |
| "grad_norm": 0.43033738558357226, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.3872, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.830379746835443, |
| "grad_norm": 0.4358432443333096, |
| "learning_rate": 1.8780445532586952e-05, |
| "loss": 0.3813, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.8344303797468355, |
| "grad_norm": 0.41503243846559007, |
| "learning_rate": 1.872379746550743e-05, |
| "loss": 0.3807, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.838481012658228, |
| "grad_norm": 0.43455856275717814, |
| "learning_rate": 1.866715967678769e-05, |
| "loss": 0.3827, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.84253164556962, |
| "grad_norm": 0.4376556953562068, |
| "learning_rate": 1.861053262258067e-05, |
| "loss": 0.381, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.8465822784810126, |
| "grad_norm": 0.4580047796383711, |
| "learning_rate": 1.8553916758952897e-05, |
| "loss": 0.3635, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.850632911392405, |
| "grad_norm": 0.4256932949079572, |
| "learning_rate": 1.8497312541880735e-05, |
| "loss": 0.3734, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.8546835443037972, |
| "grad_norm": 0.4253751549408488, |
| "learning_rate": 1.8440720427246786e-05, |
| "loss": 0.3869, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8587341772151897, |
| "grad_norm": 0.4264394596961722, |
| "learning_rate": 1.8384140870836157e-05, |
| "loss": 0.3593, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.8627848101265823, |
| "grad_norm": 0.43259058321159727, |
| "learning_rate": 1.8327574328332806e-05, |
| "loss": 0.3889, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8668354430379748, |
| "grad_norm": 0.41393510476507794, |
| "learning_rate": 1.8271021255315906e-05, |
| "loss": 0.375, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.8708860759493673, |
| "grad_norm": 0.4085456133995289, |
| "learning_rate": 1.8214482107256117e-05, |
| "loss": 0.3894, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.8749367088607594, |
| "grad_norm": 0.40762588158841584, |
| "learning_rate": 1.8157957339511968e-05, |
| "loss": 0.365, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.878987341772152, |
| "grad_norm": 0.42410295178513735, |
| "learning_rate": 1.8101447407326182e-05, |
| "loss": 0.3887, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.8830379746835444, |
| "grad_norm": 0.4333907735116901, |
| "learning_rate": 1.8044952765821966e-05, |
| "loss": 0.376, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.8870886075949365, |
| "grad_norm": 0.41197919446286635, |
| "learning_rate": 1.7988473869999407e-05, |
| "loss": 0.3796, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.891139240506329, |
| "grad_norm": 0.417992300147192, |
| "learning_rate": 1.7932011174731764e-05, |
| "loss": 0.3753, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.8951898734177215, |
| "grad_norm": 0.4237499490481243, |
| "learning_rate": 1.7875565134761817e-05, |
| "loss": 0.384, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.899240506329114, |
| "grad_norm": 0.4237777714755167, |
| "learning_rate": 1.7819136204698226e-05, |
| "loss": 0.3976, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.9032911392405065, |
| "grad_norm": 0.43233185344798275, |
| "learning_rate": 1.776272483901182e-05, |
| "loss": 0.3657, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.9073417721518986, |
| "grad_norm": 0.436681855840781, |
| "learning_rate": 1.7706331492031995e-05, |
| "loss": 0.3807, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.911392405063291, |
| "grad_norm": 0.4237179563310809, |
| "learning_rate": 1.764995661794301e-05, |
| "loss": 0.3851, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.9154430379746836, |
| "grad_norm": 0.4234437140811551, |
| "learning_rate": 1.759360067078035e-05, |
| "loss": 0.3783, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.9194936708860757, |
| "grad_norm": 0.42866920061086083, |
| "learning_rate": 1.7537264104427064e-05, |
| "loss": 0.4017, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.923544303797468, |
| "grad_norm": 0.42720649161521274, |
| "learning_rate": 1.748094737261012e-05, |
| "loss": 0.3911, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.9275949367088607, |
| "grad_norm": 0.4445389561091741, |
| "learning_rate": 1.7424650928896726e-05, |
| "loss": 0.3648, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.9316455696202532, |
| "grad_norm": 0.4468194323832039, |
| "learning_rate": 1.7368375226690712e-05, |
| "loss": 0.3729, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9356962025316458, |
| "grad_norm": 0.4252248926456682, |
| "learning_rate": 1.731212071922883e-05, |
| "loss": 0.3912, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.939746835443038, |
| "grad_norm": 0.41473146055442733, |
| "learning_rate": 1.7255887859577156e-05, |
| "loss": 0.3844, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.9437974683544303, |
| "grad_norm": 0.4404499221556875, |
| "learning_rate": 1.7199677100627427e-05, |
| "loss": 0.3623, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.947848101265823, |
| "grad_norm": 0.4150515447859639, |
| "learning_rate": 1.7143488895093343e-05, |
| "loss": 0.3983, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.951898734177215, |
| "grad_norm": 0.4282595764545946, |
| "learning_rate": 1.7087323695506994e-05, |
| "loss": 0.3729, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.9559493670886074, |
| "grad_norm": 0.4233086568343235, |
| "learning_rate": 1.7031181954215194e-05, |
| "loss": 0.3797, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.96, |
| "grad_norm": 0.42749779588620845, |
| "learning_rate": 1.6975064123375788e-05, |
| "loss": 0.3715, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.9640506329113925, |
| "grad_norm": 0.4341048039222509, |
| "learning_rate": 1.6918970654954084e-05, |
| "loss": 0.4073, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.968101265822785, |
| "grad_norm": 0.4405692629220085, |
| "learning_rate": 1.686290200071915e-05, |
| "loss": 0.3535, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.972151898734177, |
| "grad_norm": 0.41819834768473063, |
| "learning_rate": 1.6806858612240234e-05, |
| "loss": 0.3645, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.9762025316455696, |
| "grad_norm": 0.4482313628058863, |
| "learning_rate": 1.6750840940883078e-05, |
| "loss": 0.366, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.980253164556962, |
| "grad_norm": 0.45555348584764255, |
| "learning_rate": 1.6694849437806305e-05, |
| "loss": 0.3763, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.984303797468354, |
| "grad_norm": 0.4118242103030459, |
| "learning_rate": 1.663888455395778e-05, |
| "loss": 0.3731, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.9883544303797467, |
| "grad_norm": 0.4198377285299964, |
| "learning_rate": 1.6582946740070995e-05, |
| "loss": 0.367, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.992405063291139, |
| "grad_norm": 0.454284285685915, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.3641, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.9964556962025317, |
| "grad_norm": 0.3935147644678996, |
| "learning_rate": 1.6471154124022818e-05, |
| "loss": 0.3786, |
| "step": 739 |
| }, |
| { |
| "epoch": 3.002025316455696, |
| "grad_norm": 0.49827090041795863, |
| "learning_rate": 1.6415300222223788e-05, |
| "loss": 0.3084, |
| "step": 740 |
| }, |
| { |
| "epoch": 3.0060759493670886, |
| "grad_norm": 0.5570821521121857, |
| "learning_rate": 1.6359475191103958e-05, |
| "loss": 0.2607, |
| "step": 741 |
| }, |
| { |
| "epoch": 3.010126582278481, |
| "grad_norm": 0.429446041878255, |
| "learning_rate": 1.6303679480270466e-05, |
| "loss": 0.239, |
| "step": 742 |
| }, |
| { |
| "epoch": 3.0141772151898736, |
| "grad_norm": 0.6863850206090812, |
| "learning_rate": 1.624791353909428e-05, |
| "loss": 0.2554, |
| "step": 743 |
| }, |
| { |
| "epoch": 3.0182278481012657, |
| "grad_norm": 0.6784381748678522, |
| "learning_rate": 1.619217781670663e-05, |
| "loss": 0.2412, |
| "step": 744 |
| }, |
| { |
| "epoch": 3.022278481012658, |
| "grad_norm": 0.4780430219569316, |
| "learning_rate": 1.6136472761995373e-05, |
| "loss": 0.2628, |
| "step": 745 |
| }, |
| { |
| "epoch": 3.0263291139240507, |
| "grad_norm": 0.5535168710898904, |
| "learning_rate": 1.608079882360134e-05, |
| "loss": 0.2722, |
| "step": 746 |
| }, |
| { |
| "epoch": 3.030379746835443, |
| "grad_norm": 0.5130995015330254, |
| "learning_rate": 1.60251564499148e-05, |
| "loss": 0.257, |
| "step": 747 |
| }, |
| { |
| "epoch": 3.0344303797468353, |
| "grad_norm": 0.4799694025465978, |
| "learning_rate": 1.596954608907176e-05, |
| "loss": 0.2284, |
| "step": 748 |
| }, |
| { |
| "epoch": 3.038481012658228, |
| "grad_norm": 0.4104062031923791, |
| "learning_rate": 1.591396818895043e-05, |
| "loss": 0.2481, |
| "step": 749 |
| }, |
| { |
| "epoch": 3.0425316455696203, |
| "grad_norm": 0.4751658307608332, |
| "learning_rate": 1.585842319716759e-05, |
| "loss": 0.2444, |
| "step": 750 |
| }, |
| { |
| "epoch": 3.046582278481013, |
| "grad_norm": 0.4930817371771081, |
| "learning_rate": 1.5802911561074944e-05, |
| "loss": 0.2331, |
| "step": 751 |
| }, |
| { |
| "epoch": 3.050632911392405, |
| "grad_norm": 0.42216540038906814, |
| "learning_rate": 1.5747433727755595e-05, |
| "loss": 0.2197, |
| "step": 752 |
| }, |
| { |
| "epoch": 3.0546835443037974, |
| "grad_norm": 0.423960657645564, |
| "learning_rate": 1.5691990144020376e-05, |
| "loss": 0.243, |
| "step": 753 |
| }, |
| { |
| "epoch": 3.05873417721519, |
| "grad_norm": 0.40838388079220367, |
| "learning_rate": 1.5636581256404297e-05, |
| "loss": 0.2262, |
| "step": 754 |
| }, |
| { |
| "epoch": 3.0627848101265824, |
| "grad_norm": 0.428046009764779, |
| "learning_rate": 1.558120751116291e-05, |
| "loss": 0.2527, |
| "step": 755 |
| }, |
| { |
| "epoch": 3.0668354430379745, |
| "grad_norm": 0.4612745385014847, |
| "learning_rate": 1.552586935426876e-05, |
| "loss": 0.2525, |
| "step": 756 |
| }, |
| { |
| "epoch": 3.070886075949367, |
| "grad_norm": 0.3765483652362484, |
| "learning_rate": 1.547056723140774e-05, |
| "loss": 0.2173, |
| "step": 757 |
| }, |
| { |
| "epoch": 3.0749367088607595, |
| "grad_norm": 0.4059287430934035, |
| "learning_rate": 1.5415301587975565e-05, |
| "loss": 0.2577, |
| "step": 758 |
| }, |
| { |
| "epoch": 3.078987341772152, |
| "grad_norm": 0.43997178922860963, |
| "learning_rate": 1.536007286907411e-05, |
| "loss": 0.2465, |
| "step": 759 |
| }, |
| { |
| "epoch": 3.083037974683544, |
| "grad_norm": 0.36835490308381386, |
| "learning_rate": 1.5304881519507896e-05, |
| "loss": 0.24, |
| "step": 760 |
| }, |
| { |
| "epoch": 3.0870886075949366, |
| "grad_norm": 0.39442759628397567, |
| "learning_rate": 1.5249727983780453e-05, |
| "loss": 0.2585, |
| "step": 761 |
| }, |
| { |
| "epoch": 3.091139240506329, |
| "grad_norm": 0.3652795422889439, |
| "learning_rate": 1.5194612706090786e-05, |
| "loss": 0.2505, |
| "step": 762 |
| }, |
| { |
| "epoch": 3.0951898734177217, |
| "grad_norm": 0.3698912972267679, |
| "learning_rate": 1.5139536130329771e-05, |
| "loss": 0.2501, |
| "step": 763 |
| }, |
| { |
| "epoch": 3.0992405063291137, |
| "grad_norm": 0.38244172986412495, |
| "learning_rate": 1.508449870007656e-05, |
| "loss": 0.245, |
| "step": 764 |
| }, |
| { |
| "epoch": 3.1032911392405063, |
| "grad_norm": 0.36616223124654396, |
| "learning_rate": 1.5029500858595056e-05, |
| "loss": 0.2507, |
| "step": 765 |
| }, |
| { |
| "epoch": 3.1073417721518988, |
| "grad_norm": 0.3660051274502978, |
| "learning_rate": 1.4974543048830328e-05, |
| "loss": 0.2534, |
| "step": 766 |
| }, |
| { |
| "epoch": 3.1113924050632913, |
| "grad_norm": 0.394140529317022, |
| "learning_rate": 1.4919625713405e-05, |
| "loss": 0.2425, |
| "step": 767 |
| }, |
| { |
| "epoch": 3.1154430379746834, |
| "grad_norm": 0.38088824982053454, |
| "learning_rate": 1.4864749294615756e-05, |
| "loss": 0.2607, |
| "step": 768 |
| }, |
| { |
| "epoch": 3.119493670886076, |
| "grad_norm": 0.38202506096924077, |
| "learning_rate": 1.4809914234429716e-05, |
| "loss": 0.2439, |
| "step": 769 |
| }, |
| { |
| "epoch": 3.1235443037974684, |
| "grad_norm": 0.37854017647136284, |
| "learning_rate": 1.4755120974480923e-05, |
| "loss": 0.224, |
| "step": 770 |
| }, |
| { |
| "epoch": 3.127594936708861, |
| "grad_norm": 0.3786524568135752, |
| "learning_rate": 1.4700369956066771e-05, |
| "loss": 0.2188, |
| "step": 771 |
| }, |
| { |
| "epoch": 3.131645569620253, |
| "grad_norm": 0.3706936019045695, |
| "learning_rate": 1.4645661620144413e-05, |
| "loss": 0.2526, |
| "step": 772 |
| }, |
| { |
| "epoch": 3.1356962025316455, |
| "grad_norm": 0.37029481830414046, |
| "learning_rate": 1.4590996407327284e-05, |
| "loss": 0.2082, |
| "step": 773 |
| }, |
| { |
| "epoch": 3.139746835443038, |
| "grad_norm": 0.3847368165382648, |
| "learning_rate": 1.4536374757881487e-05, |
| "loss": 0.2615, |
| "step": 774 |
| }, |
| { |
| "epoch": 3.1437974683544305, |
| "grad_norm": 0.36486334217896205, |
| "learning_rate": 1.4481797111722271e-05, |
| "loss": 0.2509, |
| "step": 775 |
| }, |
| { |
| "epoch": 3.1478481012658226, |
| "grad_norm": 0.3903652008149532, |
| "learning_rate": 1.4427263908410507e-05, |
| "loss": 0.2233, |
| "step": 776 |
| }, |
| { |
| "epoch": 3.151898734177215, |
| "grad_norm": 0.38566131267964304, |
| "learning_rate": 1.4372775587149108e-05, |
| "loss": 0.2379, |
| "step": 777 |
| }, |
| { |
| "epoch": 3.1559493670886076, |
| "grad_norm": 0.36658630224169164, |
| "learning_rate": 1.4318332586779522e-05, |
| "loss": 0.249, |
| "step": 778 |
| }, |
| { |
| "epoch": 3.16, |
| "grad_norm": 0.37201407269135556, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.246, |
| "step": 779 |
| }, |
| { |
| "epoch": 3.164050632911392, |
| "grad_norm": 0.3747484420571046, |
| "learning_rate": 1.420958430225303e-05, |
| "loss": 0.2409, |
| "step": 780 |
| }, |
| { |
| "epoch": 3.1681012658227847, |
| "grad_norm": 0.3710243007477183, |
| "learning_rate": 1.415527989393985e-05, |
| "loss": 0.2167, |
| "step": 781 |
| }, |
| { |
| "epoch": 3.1721518987341772, |
| "grad_norm": 0.3670259505783207, |
| "learning_rate": 1.410102255819891e-05, |
| "loss": 0.2211, |
| "step": 782 |
| }, |
| { |
| "epoch": 3.1762025316455698, |
| "grad_norm": 0.35972571227757055, |
| "learning_rate": 1.404681273201131e-05, |
| "loss": 0.238, |
| "step": 783 |
| }, |
| { |
| "epoch": 3.180253164556962, |
| "grad_norm": 0.36803586202105765, |
| "learning_rate": 1.399265085197556e-05, |
| "loss": 0.2685, |
| "step": 784 |
| }, |
| { |
| "epoch": 3.1843037974683543, |
| "grad_norm": 0.38740193641928755, |
| "learning_rate": 1.393853735430398e-05, |
| "loss": 0.2255, |
| "step": 785 |
| }, |
| { |
| "epoch": 3.188354430379747, |
| "grad_norm": 0.3511641639517541, |
| "learning_rate": 1.3884472674819246e-05, |
| "loss": 0.2631, |
| "step": 786 |
| }, |
| { |
| "epoch": 3.1924050632911394, |
| "grad_norm": 0.38777707845558546, |
| "learning_rate": 1.3830457248950864e-05, |
| "loss": 0.2291, |
| "step": 787 |
| }, |
| { |
| "epoch": 3.1964556962025314, |
| "grad_norm": 0.3857991049857386, |
| "learning_rate": 1.377649151173163e-05, |
| "loss": 0.2192, |
| "step": 788 |
| }, |
| { |
| "epoch": 3.200506329113924, |
| "grad_norm": 0.358426667909315, |
| "learning_rate": 1.3722575897794181e-05, |
| "loss": 0.2459, |
| "step": 789 |
| }, |
| { |
| "epoch": 3.2045569620253165, |
| "grad_norm": 0.35735821288829067, |
| "learning_rate": 1.3668710841367472e-05, |
| "loss": 0.2593, |
| "step": 790 |
| }, |
| { |
| "epoch": 3.208607594936709, |
| "grad_norm": 0.3882676679693686, |
| "learning_rate": 1.361489677627324e-05, |
| "loss": 0.2277, |
| "step": 791 |
| }, |
| { |
| "epoch": 3.212658227848101, |
| "grad_norm": 0.35170335939794645, |
| "learning_rate": 1.3561134135922585e-05, |
| "loss": 0.2474, |
| "step": 792 |
| }, |
| { |
| "epoch": 3.2167088607594936, |
| "grad_norm": 0.38078054002878947, |
| "learning_rate": 1.350742335331241e-05, |
| "loss": 0.2566, |
| "step": 793 |
| }, |
| { |
| "epoch": 3.220759493670886, |
| "grad_norm": 0.3864337954337756, |
| "learning_rate": 1.345376486102198e-05, |
| "loss": 0.2277, |
| "step": 794 |
| }, |
| { |
| "epoch": 3.2248101265822786, |
| "grad_norm": 0.3681622336990674, |
| "learning_rate": 1.3400159091209414e-05, |
| "loss": 0.2358, |
| "step": 795 |
| }, |
| { |
| "epoch": 3.2288607594936707, |
| "grad_norm": 0.36892047444651627, |
| "learning_rate": 1.3346606475608216e-05, |
| "loss": 0.2499, |
| "step": 796 |
| }, |
| { |
| "epoch": 3.232911392405063, |
| "grad_norm": 0.3528121705483557, |
| "learning_rate": 1.3293107445523781e-05, |
| "loss": 0.255, |
| "step": 797 |
| }, |
| { |
| "epoch": 3.2369620253164557, |
| "grad_norm": 0.381680591367992, |
| "learning_rate": 1.3239662431829949e-05, |
| "loss": 0.2448, |
| "step": 798 |
| }, |
| { |
| "epoch": 3.2410126582278482, |
| "grad_norm": 0.3556303132481771, |
| "learning_rate": 1.3186271864965509e-05, |
| "loss": 0.2408, |
| "step": 799 |
| }, |
| { |
| "epoch": 3.2450632911392403, |
| "grad_norm": 0.3502116470319604, |
| "learning_rate": 1.3132936174930756e-05, |
| "loss": 0.2561, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.249113924050633, |
| "grad_norm": 0.36675412000222224, |
| "learning_rate": 1.3079655791283995e-05, |
| "loss": 0.2456, |
| "step": 801 |
| }, |
| { |
| "epoch": 3.2531645569620253, |
| "grad_norm": 0.3533155671997567, |
| "learning_rate": 1.3026431143138108e-05, |
| "loss": 0.2715, |
| "step": 802 |
| }, |
| { |
| "epoch": 3.257215189873418, |
| "grad_norm": 0.3780210097972922, |
| "learning_rate": 1.2973262659157114e-05, |
| "loss": 0.2342, |
| "step": 803 |
| }, |
| { |
| "epoch": 3.26126582278481, |
| "grad_norm": 0.376689364400828, |
| "learning_rate": 1.2920150767552651e-05, |
| "loss": 0.2026, |
| "step": 804 |
| }, |
| { |
| "epoch": 3.2653164556962024, |
| "grad_norm": 0.36953504816107097, |
| "learning_rate": 1.2867095896080607e-05, |
| "loss": 0.2204, |
| "step": 805 |
| }, |
| { |
| "epoch": 3.269367088607595, |
| "grad_norm": 0.3768747520061229, |
| "learning_rate": 1.2814098472037612e-05, |
| "loss": 0.2493, |
| "step": 806 |
| }, |
| { |
| "epoch": 3.2734177215189875, |
| "grad_norm": 0.37109349995224783, |
| "learning_rate": 1.276115892225764e-05, |
| "loss": 0.2242, |
| "step": 807 |
| }, |
| { |
| "epoch": 3.27746835443038, |
| "grad_norm": 0.36411455274601, |
| "learning_rate": 1.2708277673108555e-05, |
| "loss": 0.2252, |
| "step": 808 |
| }, |
| { |
| "epoch": 3.281518987341772, |
| "grad_norm": 0.3771874014619997, |
| "learning_rate": 1.2655455150488649e-05, |
| "loss": 0.2237, |
| "step": 809 |
| }, |
| { |
| "epoch": 3.2855696202531646, |
| "grad_norm": 0.36208146515091877, |
| "learning_rate": 1.2602691779823272e-05, |
| "loss": 0.2412, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.289620253164557, |
| "grad_norm": 0.36977123661210365, |
| "learning_rate": 1.2549987986061355e-05, |
| "loss": 0.2575, |
| "step": 811 |
| }, |
| { |
| "epoch": 3.293670886075949, |
| "grad_norm": 0.36759092563536394, |
| "learning_rate": 1.2497344193672005e-05, |
| "loss": 0.2661, |
| "step": 812 |
| }, |
| { |
| "epoch": 3.2977215189873417, |
| "grad_norm": 0.37062543222407396, |
| "learning_rate": 1.2444760826641092e-05, |
| "loss": 0.2037, |
| "step": 813 |
| }, |
| { |
| "epoch": 3.301772151898734, |
| "grad_norm": 0.36328712913902295, |
| "learning_rate": 1.2392238308467817e-05, |
| "loss": 0.2479, |
| "step": 814 |
| }, |
| { |
| "epoch": 3.3058227848101267, |
| "grad_norm": 0.3627622436621377, |
| "learning_rate": 1.2339777062161326e-05, |
| "loss": 0.2337, |
| "step": 815 |
| }, |
| { |
| "epoch": 3.309873417721519, |
| "grad_norm": 0.37920378679420386, |
| "learning_rate": 1.2287377510237293e-05, |
| "loss": 0.2269, |
| "step": 816 |
| }, |
| { |
| "epoch": 3.3139240506329113, |
| "grad_norm": 0.35961396698889664, |
| "learning_rate": 1.2235040074714488e-05, |
| "loss": 0.2439, |
| "step": 817 |
| }, |
| { |
| "epoch": 3.317974683544304, |
| "grad_norm": 0.38551848660543353, |
| "learning_rate": 1.2182765177111434e-05, |
| "loss": 0.2316, |
| "step": 818 |
| }, |
| { |
| "epoch": 3.3220253164556963, |
| "grad_norm": 0.3690741723292421, |
| "learning_rate": 1.213055323844297e-05, |
| "loss": 0.2323, |
| "step": 819 |
| }, |
| { |
| "epoch": 3.3260759493670884, |
| "grad_norm": 0.34716669573764974, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.2292, |
| "step": 820 |
| }, |
| { |
| "epoch": 3.330126582278481, |
| "grad_norm": 0.3789257664735025, |
| "learning_rate": 1.2026319919430458e-05, |
| "loss": 0.2402, |
| "step": 821 |
| }, |
| { |
| "epoch": 3.3341772151898734, |
| "grad_norm": 0.3510599250914695, |
| "learning_rate": 1.1974299378567227e-05, |
| "loss": 0.2587, |
| "step": 822 |
| }, |
| { |
| "epoch": 3.338227848101266, |
| "grad_norm": 0.35659092965831746, |
| "learning_rate": 1.1922343475593462e-05, |
| "loss": 0.231, |
| "step": 823 |
| }, |
| { |
| "epoch": 3.3422784810126585, |
| "grad_norm": 0.35904604514641647, |
| "learning_rate": 1.187045262895488e-05, |
| "loss": 0.2201, |
| "step": 824 |
| }, |
| { |
| "epoch": 3.3463291139240505, |
| "grad_norm": 0.3637628431596781, |
| "learning_rate": 1.1818627256573203e-05, |
| "loss": 0.2358, |
| "step": 825 |
| }, |
| { |
| "epoch": 3.350379746835443, |
| "grad_norm": 0.37560807050048783, |
| "learning_rate": 1.1766867775842864e-05, |
| "loss": 0.2406, |
| "step": 826 |
| }, |
| { |
| "epoch": 3.3544303797468356, |
| "grad_norm": 0.38209008943166844, |
| "learning_rate": 1.1715174603627615e-05, |
| "loss": 0.2585, |
| "step": 827 |
| }, |
| { |
| "epoch": 3.3584810126582276, |
| "grad_norm": 0.36220645456599354, |
| "learning_rate": 1.1663548156257147e-05, |
| "loss": 0.2431, |
| "step": 828 |
| }, |
| { |
| "epoch": 3.36253164556962, |
| "grad_norm": 0.38959651244596466, |
| "learning_rate": 1.161198884952377e-05, |
| "loss": 0.2281, |
| "step": 829 |
| }, |
| { |
| "epoch": 3.3665822784810127, |
| "grad_norm": 0.39037047594015756, |
| "learning_rate": 1.1560497098679056e-05, |
| "loss": 0.2405, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.370632911392405, |
| "grad_norm": 0.35922969489390033, |
| "learning_rate": 1.1509073318430479e-05, |
| "loss": 0.2236, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.3746835443037977, |
| "grad_norm": 0.3538140719378987, |
| "learning_rate": 1.1457717922938116e-05, |
| "loss": 0.2283, |
| "step": 832 |
| }, |
| { |
| "epoch": 3.3787341772151898, |
| "grad_norm": 0.36402598636316025, |
| "learning_rate": 1.1406431325811233e-05, |
| "loss": 0.2393, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.3827848101265823, |
| "grad_norm": 0.35437169097639837, |
| "learning_rate": 1.135521394010506e-05, |
| "loss": 0.2498, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.386835443037975, |
| "grad_norm": 0.3604116768061497, |
| "learning_rate": 1.1304066178317367e-05, |
| "loss": 0.2362, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.390886075949367, |
| "grad_norm": 0.377009154334376, |
| "learning_rate": 1.1252988452385199e-05, |
| "loss": 0.2211, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.3949367088607594, |
| "grad_norm": 0.34374054406045684, |
| "learning_rate": 1.1201981173681536e-05, |
| "loss": 0.2281, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.398987341772152, |
| "grad_norm": 0.3554532796519636, |
| "learning_rate": 1.1151044753011991e-05, |
| "loss": 0.233, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.4030379746835444, |
| "grad_norm": 0.35764987100966716, |
| "learning_rate": 1.1100179600611491e-05, |
| "loss": 0.2219, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.407088607594937, |
| "grad_norm": 0.3455758710619347, |
| "learning_rate": 1.1049386126140985e-05, |
| "loss": 0.2363, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.411139240506329, |
| "grad_norm": 0.349284225187927, |
| "learning_rate": 1.0998664738684128e-05, |
| "loss": 0.2718, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.4151898734177215, |
| "grad_norm": 0.35980183063167676, |
| "learning_rate": 1.0948015846744e-05, |
| "loss": 0.2163, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.419240506329114, |
| "grad_norm": 0.37094867506904483, |
| "learning_rate": 1.0897439858239832e-05, |
| "loss": 0.2331, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.423291139240506, |
| "grad_norm": 0.3582364676165723, |
| "learning_rate": 1.0846937180503652e-05, |
| "loss": 0.2193, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.4273417721518986, |
| "grad_norm": 0.3729203123919673, |
| "learning_rate": 1.0796508220277117e-05, |
| "loss": 0.2184, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.431392405063291, |
| "grad_norm": 0.36148495442870365, |
| "learning_rate": 1.0746153383708107e-05, |
| "loss": 0.2248, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.4354430379746836, |
| "grad_norm": 0.34899492813895694, |
| "learning_rate": 1.0695873076347579e-05, |
| "loss": 0.2271, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.439493670886076, |
| "grad_norm": 0.3866207860041546, |
| "learning_rate": 1.0645667703146205e-05, |
| "loss": 0.2338, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.4435443037974682, |
| "grad_norm": 0.35208718242690173, |
| "learning_rate": 1.0595537668451161e-05, |
| "loss": 0.2644, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.4475949367088607, |
| "grad_norm": 0.3688733339720294, |
| "learning_rate": 1.0545483376002854e-05, |
| "loss": 0.2639, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.4516455696202533, |
| "grad_norm": 0.39006940619178004, |
| "learning_rate": 1.0495505228931676e-05, |
| "loss": 0.245, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.4556962025316453, |
| "grad_norm": 0.36208467910033526, |
| "learning_rate": 1.044560362975474e-05, |
| "loss": 0.2359, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.459746835443038, |
| "grad_norm": 0.3585074137894943, |
| "learning_rate": 1.0395778980372695e-05, |
| "loss": 0.232, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.4637974683544304, |
| "grad_norm": 0.34953078291827416, |
| "learning_rate": 1.0346031682066381e-05, |
| "loss": 0.2345, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.467848101265823, |
| "grad_norm": 0.35119671542224906, |
| "learning_rate": 1.0296362135493724e-05, |
| "loss": 0.2305, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.4718987341772154, |
| "grad_norm": 0.34126078220923317, |
| "learning_rate": 1.0246770740686422e-05, |
| "loss": 0.2374, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.4759493670886075, |
| "grad_norm": 0.3602346415572256, |
| "learning_rate": 1.0197257897046743e-05, |
| "loss": 0.2517, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.48, |
| "grad_norm": 0.3628681707636662, |
| "learning_rate": 1.014782400334433e-05, |
| "loss": 0.2221, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.4840506329113925, |
| "grad_norm": 0.3614850107235767, |
| "learning_rate": 1.009846945771296e-05, |
| "loss": 0.2548, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.4881012658227846, |
| "grad_norm": 0.35086595196033543, |
| "learning_rate": 1.0049194657647363e-05, |
| "loss": 0.2297, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.492151898734177, |
| "grad_norm": 0.3464055571730444, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.2509, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.4962025316455696, |
| "grad_norm": 0.3649434227014736, |
| "learning_rate": 9.950885880977891e-06, |
| "loss": 0.2281, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.500253164556962, |
| "grad_norm": 0.35418908098307056, |
| "learning_rate": 9.901852696139382e-06, |
| "loss": 0.2179, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.5043037974683546, |
| "grad_norm": 0.34620612744415824, |
| "learning_rate": 9.852900840391027e-06, |
| "loss": 0.2446, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.5083544303797467, |
| "grad_norm": 0.3640229960698763, |
| "learning_rate": 9.804030707984313e-06, |
| "loss": 0.2477, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.512405063291139, |
| "grad_norm": 0.33831168874490586, |
| "learning_rate": 9.755242692512599e-06, |
| "loss": 0.2365, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.5164556962025317, |
| "grad_norm": 0.3568429881108622, |
| "learning_rate": 9.70653718690782e-06, |
| "loss": 0.2421, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.520506329113924, |
| "grad_norm": 0.3426841687902959, |
| "learning_rate": 9.657914583437454e-06, |
| "loss": 0.2112, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.5245569620253163, |
| "grad_norm": 0.37676760434834033, |
| "learning_rate": 9.609375273701246e-06, |
| "loss": 0.2425, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.528607594936709, |
| "grad_norm": 0.3491725043330073, |
| "learning_rate": 9.560919648628133e-06, |
| "loss": 0.2343, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.5326582278481014, |
| "grad_norm": 0.33972042517973555, |
| "learning_rate": 9.512548098473047e-06, |
| "loss": 0.2327, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.536708860759494, |
| "grad_norm": 0.35862015362549887, |
| "learning_rate": 9.464261012813825e-06, |
| "loss": 0.2563, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.540759493670886, |
| "grad_norm": 0.36887694303662694, |
| "learning_rate": 9.416058780547987e-06, |
| "loss": 0.2481, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.5448101265822785, |
| "grad_norm": 0.3668330577163821, |
| "learning_rate": 9.367941789889714e-06, |
| "loss": 0.2408, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.548860759493671, |
| "grad_norm": 0.3629634501411095, |
| "learning_rate": 9.319910428366607e-06, |
| "loss": 0.2416, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.552911392405063, |
| "grad_norm": 0.3693454561691696, |
| "learning_rate": 9.271965082816667e-06, |
| "loss": 0.2258, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.5569620253164556, |
| "grad_norm": 0.373389323716455, |
| "learning_rate": 9.224106139385111e-06, |
| "loss": 0.2395, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.561012658227848, |
| "grad_norm": 0.3439747632535296, |
| "learning_rate": 9.176333983521291e-06, |
| "loss": 0.2452, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.5650632911392406, |
| "grad_norm": 0.3397306019977132, |
| "learning_rate": 9.12864899997558e-06, |
| "loss": 0.2517, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.569113924050633, |
| "grad_norm": 0.3693913417517541, |
| "learning_rate": 9.08105157279628e-06, |
| "loss": 0.2329, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.573164556962025, |
| "grad_norm": 0.363712603886341, |
| "learning_rate": 9.03354208532653e-06, |
| "loss": 0.23, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.5772151898734177, |
| "grad_norm": 0.336121707235696, |
| "learning_rate": 8.986120920201205e-06, |
| "loss": 0.2411, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.58126582278481, |
| "grad_norm": 0.3663818484701192, |
| "learning_rate": 8.938788459343852e-06, |
| "loss": 0.2372, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.5853164556962023, |
| "grad_norm": 0.3537359830424942, |
| "learning_rate": 8.8915450839636e-06, |
| "loss": 0.2256, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.589367088607595, |
| "grad_norm": 0.3597961378945756, |
| "learning_rate": 8.844391174552116e-06, |
| "loss": 0.2363, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.5934177215189873, |
| "grad_norm": 0.36728805904248596, |
| "learning_rate": 8.797327110880479e-06, |
| "loss": 0.2516, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.59746835443038, |
| "grad_norm": 0.3466095816583572, |
| "learning_rate": 8.750353271996206e-06, |
| "loss": 0.2129, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.6015189873417723, |
| "grad_norm": 0.3712152735615179, |
| "learning_rate": 8.703470036220132e-06, |
| "loss": 0.241, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.6055696202531644, |
| "grad_norm": 0.35999645875697844, |
| "learning_rate": 8.656677781143394e-06, |
| "loss": 0.2548, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.609620253164557, |
| "grad_norm": 0.35236787499382943, |
| "learning_rate": 8.609976883624377e-06, |
| "loss": 0.2393, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.6136708860759494, |
| "grad_norm": 0.3527265847875911, |
| "learning_rate": 8.563367719785698e-06, |
| "loss": 0.2383, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.6177215189873415, |
| "grad_norm": 0.35194448694034725, |
| "learning_rate": 8.516850665011138e-06, |
| "loss": 0.2256, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.621772151898734, |
| "grad_norm": 0.35652011175567366, |
| "learning_rate": 8.47042609394269e-06, |
| "loss": 0.2254, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.6258227848101265, |
| "grad_norm": 0.35745218871946427, |
| "learning_rate": 8.424094380477432e-06, |
| "loss": 0.2301, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.629873417721519, |
| "grad_norm": 0.34988123025335327, |
| "learning_rate": 8.37785589776465e-06, |
| "loss": 0.2433, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.6339240506329116, |
| "grad_norm": 0.3611449461901992, |
| "learning_rate": 8.331711018202694e-06, |
| "loss": 0.2283, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.6379746835443036, |
| "grad_norm": 0.34481087209785677, |
| "learning_rate": 8.285660113436104e-06, |
| "loss": 0.2273, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.642025316455696, |
| "grad_norm": 0.3585916898842242, |
| "learning_rate": 8.239703554352527e-06, |
| "loss": 0.2299, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.6460759493670887, |
| "grad_norm": 0.3479775294882408, |
| "learning_rate": 8.193841711079775e-06, |
| "loss": 0.2421, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.6501265822784807, |
| "grad_norm": 0.33398542895731176, |
| "learning_rate": 8.148074952982828e-06, |
| "loss": 0.2277, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.6541772151898733, |
| "grad_norm": 0.34505947974399986, |
| "learning_rate": 8.102403648660859e-06, |
| "loss": 0.2082, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.6582278481012658, |
| "grad_norm": 0.36199106304117956, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.2342, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.6622784810126583, |
| "grad_norm": 0.3667399772816841, |
| "learning_rate": 8.011348871891762e-06, |
| "loss": 0.2346, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.666329113924051, |
| "grad_norm": 0.3638523980170062, |
| "learning_rate": 7.965966132787287e-06, |
| "loss": 0.2318, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.670379746835443, |
| "grad_norm": 0.34698310868114474, |
| "learning_rate": 7.920680314137189e-06, |
| "loss": 0.2103, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.6744303797468354, |
| "grad_norm": 0.3478841033717775, |
| "learning_rate": 7.875491780667246e-06, |
| "loss": 0.2207, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.678481012658228, |
| "grad_norm": 0.3558141420780378, |
| "learning_rate": 7.830400896319667e-06, |
| "loss": 0.2478, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.68253164556962, |
| "grad_norm": 0.35842568657508944, |
| "learning_rate": 7.785408024250259e-06, |
| "loss": 0.2212, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.6865822784810125, |
| "grad_norm": 0.3305307397998585, |
| "learning_rate": 7.74051352682542e-06, |
| "loss": 0.2338, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.690632911392405, |
| "grad_norm": 0.34939598392627275, |
| "learning_rate": 7.695717765619257e-06, |
| "loss": 0.2324, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.6946835443037975, |
| "grad_norm": 0.3342825066081085, |
| "learning_rate": 7.651021101410673e-06, |
| "loss": 0.2399, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.69873417721519, |
| "grad_norm": 0.3397052758060018, |
| "learning_rate": 7.606423894180464e-06, |
| "loss": 0.2228, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.702784810126582, |
| "grad_norm": 0.3568688329698101, |
| "learning_rate": 7.56192650310839e-06, |
| "loss": 0.2404, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.7068354430379746, |
| "grad_norm": 0.35631057186988235, |
| "learning_rate": 7.517529286570349e-06, |
| "loss": 0.2248, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.710886075949367, |
| "grad_norm": 0.34118608534376116, |
| "learning_rate": 7.473232602135387e-06, |
| "loss": 0.2454, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.714936708860759, |
| "grad_norm": 0.34470546029797716, |
| "learning_rate": 7.429036806562935e-06, |
| "loss": 0.2688, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.7189873417721517, |
| "grad_norm": 0.3507330238439248, |
| "learning_rate": 7.3849422557998455e-06, |
| "loss": 0.2352, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.7230379746835442, |
| "grad_norm": 0.3456200597246797, |
| "learning_rate": 7.340949304977567e-06, |
| "loss": 0.2564, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.7270886075949368, |
| "grad_norm": 0.34730397575718125, |
| "learning_rate": 7.297058308409282e-06, |
| "loss": 0.25, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.7311392405063293, |
| "grad_norm": 0.3459425109588164, |
| "learning_rate": 7.25326961958704e-06, |
| "loss": 0.2291, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.7351898734177214, |
| "grad_norm": 0.3425306458152667, |
| "learning_rate": 7.209583591178921e-06, |
| "loss": 0.2286, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.739240506329114, |
| "grad_norm": 0.3469946966278309, |
| "learning_rate": 7.1660005750261925e-06, |
| "loss": 0.2352, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.7432911392405064, |
| "grad_norm": 0.34883864740216525, |
| "learning_rate": 7.1225209221404765e-06, |
| "loss": 0.2263, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.747341772151899, |
| "grad_norm": 0.3434824724033804, |
| "learning_rate": 7.079144982700909e-06, |
| "loss": 0.2099, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.7513924050632914, |
| "grad_norm": 0.3453823541966748, |
| "learning_rate": 7.0358731060513695e-06, |
| "loss": 0.2368, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.7554430379746835, |
| "grad_norm": 0.34795476082564203, |
| "learning_rate": 6.99270564069757e-06, |
| "loss": 0.2253, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.759493670886076, |
| "grad_norm": 0.3409587501704064, |
| "learning_rate": 6.949642934304375e-06, |
| "loss": 0.2529, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.7635443037974685, |
| "grad_norm": 0.3456507537266118, |
| "learning_rate": 6.906685333692871e-06, |
| "loss": 0.258, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.7675949367088606, |
| "grad_norm": 0.34136787067522867, |
| "learning_rate": 6.86383318483769e-06, |
| "loss": 0.236, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.771645569620253, |
| "grad_norm": 0.3670288459370995, |
| "learning_rate": 6.821086832864139e-06, |
| "loss": 0.2283, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.7756962025316456, |
| "grad_norm": 0.3510630501621811, |
| "learning_rate": 6.77844662204546e-06, |
| "loss": 0.2331, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.779746835443038, |
| "grad_norm": 0.3467680405444758, |
| "learning_rate": 6.7359128958000455e-06, |
| "loss": 0.2472, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.7837974683544306, |
| "grad_norm": 0.35306347901924673, |
| "learning_rate": 6.693485996688695e-06, |
| "loss": 0.2154, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.7878481012658227, |
| "grad_norm": 0.3594580981225422, |
| "learning_rate": 6.651166266411801e-06, |
| "loss": 0.2565, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.7918987341772152, |
| "grad_norm": 0.3361545736413372, |
| "learning_rate": 6.6089540458066725e-06, |
| "loss": 0.2059, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.7959493670886078, |
| "grad_norm": 0.3504317044612151, |
| "learning_rate": 6.566849674844711e-06, |
| "loss": 0.2379, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.8, |
| "grad_norm": 0.34350162172918586, |
| "learning_rate": 6.524853492628747e-06, |
| "loss": 0.2459, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.8040506329113923, |
| "grad_norm": 0.3466120309300032, |
| "learning_rate": 6.4829658373902536e-06, |
| "loss": 0.2424, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.808101265822785, |
| "grad_norm": 0.3440131188508643, |
| "learning_rate": 6.441187046486648e-06, |
| "loss": 0.2345, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.8121518987341774, |
| "grad_norm": 0.3627048790885728, |
| "learning_rate": 6.399517456398567e-06, |
| "loss": 0.2216, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.81620253164557, |
| "grad_norm": 0.3221543157438691, |
| "learning_rate": 6.357957402727164e-06, |
| "loss": 0.2481, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.820253164556962, |
| "grad_norm": 0.3419418445644442, |
| "learning_rate": 6.316507220191395e-06, |
| "loss": 0.2334, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.8243037974683545, |
| "grad_norm": 0.3544621594471216, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.2305, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.828354430379747, |
| "grad_norm": 0.33628262143393134, |
| "learning_rate": 6.233937802975471e-06, |
| "loss": 0.2445, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.832405063291139, |
| "grad_norm": 0.3393491946497233, |
| "learning_rate": 6.192819233298046e-06, |
| "loss": 0.1919, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.8364556962025316, |
| "grad_norm": 0.33537485137577694, |
| "learning_rate": 6.151811864756383e-06, |
| "loss": 0.2129, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.840506329113924, |
| "grad_norm": 0.36651532303584006, |
| "learning_rate": 6.1109160276181655e-06, |
| "loss": 0.2182, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.8445569620253166, |
| "grad_norm": 0.3550475917497399, |
| "learning_rate": 6.070132051252868e-06, |
| "loss": 0.2269, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.848607594936709, |
| "grad_norm": 0.3312980988315202, |
| "learning_rate": 6.0294602641290034e-06, |
| "loss": 0.2595, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.852658227848101, |
| "grad_norm": 0.3307025436009807, |
| "learning_rate": 5.988900993811575e-06, |
| "loss": 0.2543, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.8567088607594937, |
| "grad_norm": 0.3518762551545779, |
| "learning_rate": 5.948454566959363e-06, |
| "loss": 0.2346, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.8607594936708862, |
| "grad_norm": 0.3410730086064957, |
| "learning_rate": 5.908121309322328e-06, |
| "loss": 0.2267, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.8648101265822783, |
| "grad_norm": 0.33778789243239876, |
| "learning_rate": 5.867901545738976e-06, |
| "loss": 0.2542, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.868860759493671, |
| "grad_norm": 0.35766976840277154, |
| "learning_rate": 5.827795600133774e-06, |
| "loss": 0.2325, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.8729113924050633, |
| "grad_norm": 0.3494362397797124, |
| "learning_rate": 5.787803795514466e-06, |
| "loss": 0.2326, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.876962025316456, |
| "grad_norm": 0.354874005693334, |
| "learning_rate": 5.747926453969576e-06, |
| "loss": 0.2528, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.8810126582278484, |
| "grad_norm": 0.34066932371021597, |
| "learning_rate": 5.708163896665708e-06, |
| "loss": 0.2434, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.8850632911392404, |
| "grad_norm": 0.33571325618613684, |
| "learning_rate": 5.668516443845047e-06, |
| "loss": 0.2289, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.889113924050633, |
| "grad_norm": 0.34884257531957436, |
| "learning_rate": 5.6289844148227225e-06, |
| "loss": 0.2109, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.8931645569620255, |
| "grad_norm": 0.3413650524726005, |
| "learning_rate": 5.5895681279842615e-06, |
| "loss": 0.2546, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.8972151898734175, |
| "grad_norm": 0.3399442331098133, |
| "learning_rate": 5.550267900783019e-06, |
| "loss": 0.2392, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.90126582278481, |
| "grad_norm": 0.3278565372206573, |
| "learning_rate": 5.511084049737623e-06, |
| "loss": 0.2467, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.9053164556962026, |
| "grad_norm": 0.34816661479408706, |
| "learning_rate": 5.4720168904294215e-06, |
| "loss": 0.2077, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.909367088607595, |
| "grad_norm": 0.3450381801767028, |
| "learning_rate": 5.433066737499948e-06, |
| "loss": 0.2424, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.9134177215189876, |
| "grad_norm": 0.3377844557144171, |
| "learning_rate": 5.394233904648376e-06, |
| "loss": 0.2487, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.9174683544303797, |
| "grad_norm": 0.33194128606518714, |
| "learning_rate": 5.355518704628997e-06, |
| "loss": 0.236, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.921518987341772, |
| "grad_norm": 0.34815731496376845, |
| "learning_rate": 5.316921449248731e-06, |
| "loss": 0.2116, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.9255696202531647, |
| "grad_norm": 0.3481636989407873, |
| "learning_rate": 5.278442449364538e-06, |
| "loss": 0.219, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.9296202531645568, |
| "grad_norm": 0.34751111032255205, |
| "learning_rate": 5.240082014881016e-06, |
| "loss": 0.22, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.9336708860759493, |
| "grad_norm": 0.35692818581573316, |
| "learning_rate": 5.201840454747822e-06, |
| "loss": 0.24, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.937721518987342, |
| "grad_norm": 0.3340126635046371, |
| "learning_rate": 5.163718076957223e-06, |
| "loss": 0.2333, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.9417721518987343, |
| "grad_norm": 0.33586100370625976, |
| "learning_rate": 5.125715188541609e-06, |
| "loss": 0.232, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.945822784810127, |
| "grad_norm": 0.34541541371569817, |
| "learning_rate": 5.087832095571021e-06, |
| "loss": 0.2491, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.949873417721519, |
| "grad_norm": 0.3343779856570918, |
| "learning_rate": 5.0500691031506766e-06, |
| "loss": 0.2508, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.9539240506329114, |
| "grad_norm": 0.3406883643147221, |
| "learning_rate": 5.01242651541854e-06, |
| "loss": 0.2529, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.957974683544304, |
| "grad_norm": 0.32988755437379325, |
| "learning_rate": 4.974904635542815e-06, |
| "loss": 0.2423, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.962025316455696, |
| "grad_norm": 0.35751028622148373, |
| "learning_rate": 4.937503765719582e-06, |
| "loss": 0.2161, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.9660759493670885, |
| "grad_norm": 0.35021645157033676, |
| "learning_rate": 4.900224207170299e-06, |
| "loss": 0.2371, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.970126582278481, |
| "grad_norm": 0.33515209571073024, |
| "learning_rate": 4.8630662601394065e-06, |
| "loss": 0.2407, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.9741772151898735, |
| "grad_norm": 0.3316231007130163, |
| "learning_rate": 4.8260302238918995e-06, |
| "loss": 0.2496, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.978227848101266, |
| "grad_norm": 0.338003431658423, |
| "learning_rate": 4.789116396710924e-06, |
| "loss": 0.2444, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.982278481012658, |
| "grad_norm": 0.3428176406264192, |
| "learning_rate": 4.752325075895368e-06, |
| "loss": 0.2546, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.9863291139240506, |
| "grad_norm": 0.3290076454339016, |
| "learning_rate": 4.715656557757473e-06, |
| "loss": 0.2347, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.990379746835443, |
| "grad_norm": 0.34708990813086216, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.2313, |
| "step": 984 |
| }, |
| { |
| "epoch": 3.9944303797468352, |
| "grad_norm": 0.3343478708173886, |
| "learning_rate": 4.6426891098160585e-06, |
| "loss": 0.2431, |
| "step": 985 |
| }, |
| { |
| "epoch": 3.9984810126582278, |
| "grad_norm": 0.34148378086907427, |
| "learning_rate": 4.6063907676823474e-06, |
| "loss": 0.2405, |
| "step": 986 |
| }, |
| { |
| "epoch": 4.004050632911392, |
| "grad_norm": 0.49478389557046715, |
| "learning_rate": 4.570216403561141e-06, |
| "loss": 0.1751, |
| "step": 987 |
| }, |
| { |
| "epoch": 4.008101265822785, |
| "grad_norm": 0.39509479047480284, |
| "learning_rate": 4.534166308795815e-06, |
| "loss": 0.1812, |
| "step": 988 |
| }, |
| { |
| "epoch": 4.012151898734177, |
| "grad_norm": 0.3676547794531944, |
| "learning_rate": 4.498240773728859e-06, |
| "loss": 0.1877, |
| "step": 989 |
| }, |
| { |
| "epoch": 4.01620253164557, |
| "grad_norm": 0.2883728465025368, |
| "learning_rate": 4.462440087699609e-06, |
| "loss": 0.1769, |
| "step": 990 |
| }, |
| { |
| "epoch": 4.020253164556962, |
| "grad_norm": 0.28947420759301495, |
| "learning_rate": 4.426764539041861e-06, |
| "loss": 0.1633, |
| "step": 991 |
| }, |
| { |
| "epoch": 4.024303797468354, |
| "grad_norm": 0.4174396800510771, |
| "learning_rate": 4.391214415081582e-06, |
| "loss": 0.188, |
| "step": 992 |
| }, |
| { |
| "epoch": 4.028354430379747, |
| "grad_norm": 0.44838109760432593, |
| "learning_rate": 4.355790002134579e-06, |
| "loss": 0.1789, |
| "step": 993 |
| }, |
| { |
| "epoch": 4.032405063291139, |
| "grad_norm": 0.47555320284164043, |
| "learning_rate": 4.320491585504207e-06, |
| "loss": 0.1907, |
| "step": 994 |
| }, |
| { |
| "epoch": 4.036455696202531, |
| "grad_norm": 0.3667258278485744, |
| "learning_rate": 4.2853194494790615e-06, |
| "loss": 0.1712, |
| "step": 995 |
| }, |
| { |
| "epoch": 4.040506329113924, |
| "grad_norm": 0.30616434621313693, |
| "learning_rate": 4.250273877330691e-06, |
| "loss": 0.163, |
| "step": 996 |
| }, |
| { |
| "epoch": 4.044556962025316, |
| "grad_norm": 0.33198594809689547, |
| "learning_rate": 4.215355151311313e-06, |
| "loss": 0.1512, |
| "step": 997 |
| }, |
| { |
| "epoch": 4.048607594936709, |
| "grad_norm": 0.3541823969811357, |
| "learning_rate": 4.180563552651542e-06, |
| "loss": 0.1662, |
| "step": 998 |
| }, |
| { |
| "epoch": 4.052658227848101, |
| "grad_norm": 0.3499499485361766, |
| "learning_rate": 4.145899361558147e-06, |
| "loss": 0.1849, |
| "step": 999 |
| }, |
| { |
| "epoch": 4.056708860759493, |
| "grad_norm": 0.32897253798471604, |
| "learning_rate": 4.111362857211738e-06, |
| "loss": 0.1878, |
| "step": 1000 |
| }, |
| { |
| "epoch": 4.060759493670886, |
| "grad_norm": 0.3238530112407101, |
| "learning_rate": 4.076954317764592e-06, |
| "loss": 0.1931, |
| "step": 1001 |
| }, |
| { |
| "epoch": 4.0648101265822785, |
| "grad_norm": 0.2799072959470771, |
| "learning_rate": 4.042674020338335e-06, |
| "loss": 0.1819, |
| "step": 1002 |
| }, |
| { |
| "epoch": 4.0688607594936705, |
| "grad_norm": 0.2888832214919998, |
| "learning_rate": 4.0085222410217835e-06, |
| "loss": 0.1565, |
| "step": 1003 |
| }, |
| { |
| "epoch": 4.0729113924050635, |
| "grad_norm": 0.32708276672021824, |
| "learning_rate": 3.974499254868674e-06, |
| "loss": 0.1728, |
| "step": 1004 |
| }, |
| { |
| "epoch": 4.076962025316456, |
| "grad_norm": 0.3144302386212357, |
| "learning_rate": 3.940605335895451e-06, |
| "loss": 0.1902, |
| "step": 1005 |
| }, |
| { |
| "epoch": 4.0810126582278485, |
| "grad_norm": 0.32864443031886986, |
| "learning_rate": 3.90684075707908e-06, |
| "loss": 0.1828, |
| "step": 1006 |
| }, |
| { |
| "epoch": 4.085063291139241, |
| "grad_norm": 0.298666669702849, |
| "learning_rate": 3.8732057903548505e-06, |
| "loss": 0.1582, |
| "step": 1007 |
| }, |
| { |
| "epoch": 4.089113924050633, |
| "grad_norm": 0.29440440980317556, |
| "learning_rate": 3.8397007066141375e-06, |
| "loss": 0.1732, |
| "step": 1008 |
| }, |
| { |
| "epoch": 4.093164556962026, |
| "grad_norm": 0.2814080643936291, |
| "learning_rate": 3.806325775702304e-06, |
| "loss": 0.1677, |
| "step": 1009 |
| }, |
| { |
| "epoch": 4.097215189873418, |
| "grad_norm": 0.2710758863516794, |
| "learning_rate": 3.773081266416434e-06, |
| "loss": 0.1528, |
| "step": 1010 |
| }, |
| { |
| "epoch": 4.10126582278481, |
| "grad_norm": 0.2773386502377593, |
| "learning_rate": 3.739967446503245e-06, |
| "loss": 0.1542, |
| "step": 1011 |
| }, |
| { |
| "epoch": 4.105316455696203, |
| "grad_norm": 0.2935569792091573, |
| "learning_rate": 3.706984582656894e-06, |
| "loss": 0.1507, |
| "step": 1012 |
| }, |
| { |
| "epoch": 4.109367088607595, |
| "grad_norm": 0.28974412053400395, |
| "learning_rate": 3.6741329405168237e-06, |
| "loss": 0.1775, |
| "step": 1013 |
| }, |
| { |
| "epoch": 4.113417721518988, |
| "grad_norm": 0.2735130341936111, |
| "learning_rate": 3.641412784665648e-06, |
| "loss": 0.1529, |
| "step": 1014 |
| }, |
| { |
| "epoch": 4.11746835443038, |
| "grad_norm": 0.281770196640326, |
| "learning_rate": 3.608824378627005e-06, |
| "loss": 0.1872, |
| "step": 1015 |
| }, |
| { |
| "epoch": 4.121518987341772, |
| "grad_norm": 0.2698237082007911, |
| "learning_rate": 3.5763679848634337e-06, |
| "loss": 0.1652, |
| "step": 1016 |
| }, |
| { |
| "epoch": 4.125569620253165, |
| "grad_norm": 0.2781860725906129, |
| "learning_rate": 3.544043864774269e-06, |
| "loss": 0.1747, |
| "step": 1017 |
| }, |
| { |
| "epoch": 4.129620253164557, |
| "grad_norm": 0.2724834789169913, |
| "learning_rate": 3.5118522786935282e-06, |
| "loss": 0.1716, |
| "step": 1018 |
| }, |
| { |
| "epoch": 4.133670886075949, |
| "grad_norm": 0.27592262098143305, |
| "learning_rate": 3.479793485887819e-06, |
| "loss": 0.1613, |
| "step": 1019 |
| }, |
| { |
| "epoch": 4.137721518987342, |
| "grad_norm": 0.26712407777203434, |
| "learning_rate": 3.4478677445542653e-06, |
| "loss": 0.1675, |
| "step": 1020 |
| }, |
| { |
| "epoch": 4.141772151898734, |
| "grad_norm": 0.2780202071998212, |
| "learning_rate": 3.4160753118183767e-06, |
| "loss": 0.1751, |
| "step": 1021 |
| }, |
| { |
| "epoch": 4.145822784810127, |
| "grad_norm": 0.26458256710660294, |
| "learning_rate": 3.3844164437320527e-06, |
| "loss": 0.1634, |
| "step": 1022 |
| }, |
| { |
| "epoch": 4.149873417721519, |
| "grad_norm": 0.2670476635266229, |
| "learning_rate": 3.3528913952714558e-06, |
| "loss": 0.1645, |
| "step": 1023 |
| }, |
| { |
| "epoch": 4.153924050632911, |
| "grad_norm": 0.2677198000143161, |
| "learning_rate": 3.321500420335e-06, |
| "loss": 0.1994, |
| "step": 1024 |
| }, |
| { |
| "epoch": 4.157974683544304, |
| "grad_norm": 0.2662111846911306, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.1537, |
| "step": 1025 |
| }, |
| { |
| "epoch": 4.162025316455696, |
| "grad_norm": 0.2743163011108146, |
| "learning_rate": 3.2591217012270325e-06, |
| "loss": 0.1836, |
| "step": 1026 |
| }, |
| { |
| "epoch": 4.166075949367088, |
| "grad_norm": 0.2627591441546112, |
| "learning_rate": 3.228134459445149e-06, |
| "loss": 0.1662, |
| "step": 1027 |
| }, |
| { |
| "epoch": 4.170126582278481, |
| "grad_norm": 0.271445765612377, |
| "learning_rate": 3.1972822959626205e-06, |
| "loss": 0.1796, |
| "step": 1028 |
| }, |
| { |
| "epoch": 4.174177215189873, |
| "grad_norm": 0.27534757022435163, |
| "learning_rate": 3.166565459258513e-06, |
| "loss": 0.1693, |
| "step": 1029 |
| }, |
| { |
| "epoch": 4.178227848101266, |
| "grad_norm": 0.2772465609122766, |
| "learning_rate": 3.1359841967220193e-06, |
| "loss": 0.1734, |
| "step": 1030 |
| }, |
| { |
| "epoch": 4.182278481012658, |
| "grad_norm": 0.2582779651719193, |
| "learning_rate": 3.105538754650419e-06, |
| "loss": 0.1554, |
| "step": 1031 |
| }, |
| { |
| "epoch": 4.18632911392405, |
| "grad_norm": 0.272094345356668, |
| "learning_rate": 3.07522937824712e-06, |
| "loss": 0.1651, |
| "step": 1032 |
| }, |
| { |
| "epoch": 4.190379746835443, |
| "grad_norm": 0.2840066811534647, |
| "learning_rate": 3.0450563116196697e-06, |
| "loss": 0.1857, |
| "step": 1033 |
| }, |
| { |
| "epoch": 4.194430379746835, |
| "grad_norm": 0.2569797633268909, |
| "learning_rate": 3.0150197977778008e-06, |
| "loss": 0.1578, |
| "step": 1034 |
| }, |
| { |
| "epoch": 4.1984810126582275, |
| "grad_norm": 0.2649770818395828, |
| "learning_rate": 2.985120078631465e-06, |
| "loss": 0.1695, |
| "step": 1035 |
| }, |
| { |
| "epoch": 4.2025316455696204, |
| "grad_norm": 0.26457098401397844, |
| "learning_rate": 2.9553573949888893e-06, |
| "loss": 0.1642, |
| "step": 1036 |
| }, |
| { |
| "epoch": 4.2065822784810125, |
| "grad_norm": 0.26751735766007256, |
| "learning_rate": 2.9257319865546384e-06, |
| "loss": 0.1595, |
| "step": 1037 |
| }, |
| { |
| "epoch": 4.2106329113924055, |
| "grad_norm": 0.2494693010391481, |
| "learning_rate": 2.896244091927678e-06, |
| "loss": 0.1863, |
| "step": 1038 |
| }, |
| { |
| "epoch": 4.2146835443037975, |
| "grad_norm": 0.25996152544787887, |
| "learning_rate": 2.8668939485994584e-06, |
| "loss": 0.1611, |
| "step": 1039 |
| }, |
| { |
| "epoch": 4.21873417721519, |
| "grad_norm": 0.2971659369087782, |
| "learning_rate": 2.837681792951994e-06, |
| "loss": 0.1519, |
| "step": 1040 |
| }, |
| { |
| "epoch": 4.222784810126583, |
| "grad_norm": 0.2615860731202491, |
| "learning_rate": 2.808607860255981e-06, |
| "loss": 0.1556, |
| "step": 1041 |
| }, |
| { |
| "epoch": 4.226835443037975, |
| "grad_norm": 0.26402241677147503, |
| "learning_rate": 2.7796723846688634e-06, |
| "loss": 0.1485, |
| "step": 1042 |
| }, |
| { |
| "epoch": 4.230886075949367, |
| "grad_norm": 0.27801093675350974, |
| "learning_rate": 2.7508755992329937e-06, |
| "loss": 0.1935, |
| "step": 1043 |
| }, |
| { |
| "epoch": 4.23493670886076, |
| "grad_norm": 0.26344255737526695, |
| "learning_rate": 2.722217735873718e-06, |
| "loss": 0.1779, |
| "step": 1044 |
| }, |
| { |
| "epoch": 4.238987341772152, |
| "grad_norm": 0.27532996579081076, |
| "learning_rate": 2.6936990253975315e-06, |
| "loss": 0.1839, |
| "step": 1045 |
| }, |
| { |
| "epoch": 4.243037974683545, |
| "grad_norm": 0.24946074283022668, |
| "learning_rate": 2.665319697490205e-06, |
| "loss": 0.1457, |
| "step": 1046 |
| }, |
| { |
| "epoch": 4.247088607594937, |
| "grad_norm": 0.26301539130124424, |
| "learning_rate": 2.637079980714945e-06, |
| "loss": 0.1748, |
| "step": 1047 |
| }, |
| { |
| "epoch": 4.251139240506329, |
| "grad_norm": 0.2617008144422121, |
| "learning_rate": 2.6089801025105453e-06, |
| "loss": 0.1849, |
| "step": 1048 |
| }, |
| { |
| "epoch": 4.255189873417722, |
| "grad_norm": 0.26157492709381786, |
| "learning_rate": 2.581020289189571e-06, |
| "loss": 0.1536, |
| "step": 1049 |
| }, |
| { |
| "epoch": 4.259240506329114, |
| "grad_norm": 0.2696715604083065, |
| "learning_rate": 2.553200765936501e-06, |
| "loss": 0.1558, |
| "step": 1050 |
| }, |
| { |
| "epoch": 4.263291139240506, |
| "grad_norm": 0.26214254375650725, |
| "learning_rate": 2.525521756805962e-06, |
| "loss": 0.1827, |
| "step": 1051 |
| }, |
| { |
| "epoch": 4.267341772151899, |
| "grad_norm": 0.2637156419348116, |
| "learning_rate": 2.497983484720885e-06, |
| "loss": 0.1626, |
| "step": 1052 |
| }, |
| { |
| "epoch": 4.271392405063291, |
| "grad_norm": 0.2696072142336843, |
| "learning_rate": 2.470586171470728e-06, |
| "loss": 0.1744, |
| "step": 1053 |
| }, |
| { |
| "epoch": 4.275443037974684, |
| "grad_norm": 0.2688453424179305, |
| "learning_rate": 2.4433300377096836e-06, |
| "loss": 0.1732, |
| "step": 1054 |
| }, |
| { |
| "epoch": 4.279493670886076, |
| "grad_norm": 0.26352793170325667, |
| "learning_rate": 2.4162153029549073e-06, |
| "loss": 0.1651, |
| "step": 1055 |
| }, |
| { |
| "epoch": 4.283544303797468, |
| "grad_norm": 0.26107235134694434, |
| "learning_rate": 2.3892421855847458e-06, |
| "loss": 0.1731, |
| "step": 1056 |
| }, |
| { |
| "epoch": 4.287594936708861, |
| "grad_norm": 0.2576025404995936, |
| "learning_rate": 2.362410902836978e-06, |
| "loss": 0.169, |
| "step": 1057 |
| }, |
| { |
| "epoch": 4.291645569620253, |
| "grad_norm": 0.2728589001925702, |
| "learning_rate": 2.3357216708070653e-06, |
| "loss": 0.1694, |
| "step": 1058 |
| }, |
| { |
| "epoch": 4.295696202531645, |
| "grad_norm": 0.2647459864839984, |
| "learning_rate": 2.309174704446411e-06, |
| "loss": 0.1709, |
| "step": 1059 |
| }, |
| { |
| "epoch": 4.299746835443038, |
| "grad_norm": 0.2694704708337589, |
| "learning_rate": 2.2827702175606437e-06, |
| "loss": 0.1742, |
| "step": 1060 |
| }, |
| { |
| "epoch": 4.30379746835443, |
| "grad_norm": 0.2552370051048809, |
| "learning_rate": 2.256508422807855e-06, |
| "loss": 0.1747, |
| "step": 1061 |
| }, |
| { |
| "epoch": 4.307848101265823, |
| "grad_norm": 0.27514248455056767, |
| "learning_rate": 2.230389531696946e-06, |
| "loss": 0.185, |
| "step": 1062 |
| }, |
| { |
| "epoch": 4.311898734177215, |
| "grad_norm": 0.27720459274913495, |
| "learning_rate": 2.204413754585857e-06, |
| "loss": 0.1641, |
| "step": 1063 |
| }, |
| { |
| "epoch": 4.315949367088607, |
| "grad_norm": 0.2599187008039048, |
| "learning_rate": 2.1785813006799406e-06, |
| "loss": 0.1864, |
| "step": 1064 |
| }, |
| { |
| "epoch": 4.32, |
| "grad_norm": 0.26832345504989197, |
| "learning_rate": 2.1528923780302224e-06, |
| "loss": 0.1716, |
| "step": 1065 |
| }, |
| { |
| "epoch": 4.324050632911392, |
| "grad_norm": 0.2572351276377806, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.1471, |
| "step": 1066 |
| }, |
| { |
| "epoch": 4.328101265822784, |
| "grad_norm": 0.267637175730133, |
| "learning_rate": 2.101945952921942e-06, |
| "loss": 0.1816, |
| "step": 1067 |
| }, |
| { |
| "epoch": 4.332151898734177, |
| "grad_norm": 0.26845224014301966, |
| "learning_rate": 2.0766888607788906e-06, |
| "loss": 0.1582, |
| "step": 1068 |
| }, |
| { |
| "epoch": 4.3362025316455695, |
| "grad_norm": 0.2558701013350939, |
| "learning_rate": 2.0515761205197337e-06, |
| "loss": 0.1487, |
| "step": 1069 |
| }, |
| { |
| "epoch": 4.340253164556962, |
| "grad_norm": 0.2743645427006471, |
| "learning_rate": 2.0266079343990453e-06, |
| "loss": 0.1625, |
| "step": 1070 |
| }, |
| { |
| "epoch": 4.3443037974683545, |
| "grad_norm": 0.2557778251012477, |
| "learning_rate": 2.0017845035071494e-06, |
| "loss": 0.1702, |
| "step": 1071 |
| }, |
| { |
| "epoch": 4.348354430379747, |
| "grad_norm": 0.2564936980643495, |
| "learning_rate": 1.9771060277685537e-06, |
| "loss": 0.1769, |
| "step": 1072 |
| }, |
| { |
| "epoch": 4.3524050632911395, |
| "grad_norm": 0.27439646339722296, |
| "learning_rate": 1.95257270594031e-06, |
| "loss": 0.1658, |
| "step": 1073 |
| }, |
| { |
| "epoch": 4.356455696202532, |
| "grad_norm": 0.2587362553466058, |
| "learning_rate": 1.9281847356104188e-06, |
| "loss": 0.1654, |
| "step": 1074 |
| }, |
| { |
| "epoch": 4.360506329113924, |
| "grad_norm": 0.27250058301359037, |
| "learning_rate": 1.9039423131962365e-06, |
| "loss": 0.1905, |
| "step": 1075 |
| }, |
| { |
| "epoch": 4.364556962025317, |
| "grad_norm": 0.2728440093802848, |
| "learning_rate": 1.8798456339429027e-06, |
| "loss": 0.1619, |
| "step": 1076 |
| }, |
| { |
| "epoch": 4.368607594936709, |
| "grad_norm": 0.2704741143525539, |
| "learning_rate": 1.8558948919217612e-06, |
| "loss": 0.1539, |
| "step": 1077 |
| }, |
| { |
| "epoch": 4.372658227848102, |
| "grad_norm": 0.2658244517468011, |
| "learning_rate": 1.8320902800287954e-06, |
| "loss": 0.1686, |
| "step": 1078 |
| }, |
| { |
| "epoch": 4.376708860759494, |
| "grad_norm": 0.2698457274214847, |
| "learning_rate": 1.8084319899830726e-06, |
| "loss": 0.1813, |
| "step": 1079 |
| }, |
| { |
| "epoch": 4.380759493670886, |
| "grad_norm": 0.2811662252506199, |
| "learning_rate": 1.7849202123252097e-06, |
| "loss": 0.1754, |
| "step": 1080 |
| }, |
| { |
| "epoch": 4.384810126582279, |
| "grad_norm": 0.2611876041562969, |
| "learning_rate": 1.7615551364158401e-06, |
| "loss": 0.1572, |
| "step": 1081 |
| }, |
| { |
| "epoch": 4.388860759493671, |
| "grad_norm": 0.26742483844971904, |
| "learning_rate": 1.738336950434061e-06, |
| "loss": 0.1633, |
| "step": 1082 |
| }, |
| { |
| "epoch": 4.392911392405063, |
| "grad_norm": 0.2675926962794175, |
| "learning_rate": 1.715265841375957e-06, |
| "loss": 0.1586, |
| "step": 1083 |
| }, |
| { |
| "epoch": 4.396962025316456, |
| "grad_norm": 0.2748167226993404, |
| "learning_rate": 1.6923419950530684e-06, |
| "loss": 0.1315, |
| "step": 1084 |
| }, |
| { |
| "epoch": 4.401012658227848, |
| "grad_norm": 0.2662054197790258, |
| "learning_rate": 1.6695655960909008e-06, |
| "loss": 0.1531, |
| "step": 1085 |
| }, |
| { |
| "epoch": 4.405063291139241, |
| "grad_norm": 0.24811323160035825, |
| "learning_rate": 1.646936827927441e-06, |
| "loss": 0.1545, |
| "step": 1086 |
| }, |
| { |
| "epoch": 4.409113924050633, |
| "grad_norm": 0.269053135573093, |
| "learning_rate": 1.6244558728116766e-06, |
| "loss": 0.1599, |
| "step": 1087 |
| }, |
| { |
| "epoch": 4.413164556962025, |
| "grad_norm": 0.2682565535309368, |
| "learning_rate": 1.6021229118021265e-06, |
| "loss": 0.1698, |
| "step": 1088 |
| }, |
| { |
| "epoch": 4.417215189873418, |
| "grad_norm": 0.2593357931834693, |
| "learning_rate": 1.5799381247653967e-06, |
| "loss": 0.1506, |
| "step": 1089 |
| }, |
| { |
| "epoch": 4.42126582278481, |
| "grad_norm": 0.2580106371093722, |
| "learning_rate": 1.5579016903747013e-06, |
| "loss": 0.1672, |
| "step": 1090 |
| }, |
| { |
| "epoch": 4.425316455696202, |
| "grad_norm": 0.26361749263121653, |
| "learning_rate": 1.5360137861084656e-06, |
| "loss": 0.1764, |
| "step": 1091 |
| }, |
| { |
| "epoch": 4.429367088607595, |
| "grad_norm": 0.2675018442300554, |
| "learning_rate": 1.5142745882488475e-06, |
| "loss": 0.1563, |
| "step": 1092 |
| }, |
| { |
| "epoch": 4.433417721518987, |
| "grad_norm": 0.2776318793798391, |
| "learning_rate": 1.4926842718803691e-06, |
| "loss": 0.1654, |
| "step": 1093 |
| }, |
| { |
| "epoch": 4.43746835443038, |
| "grad_norm": 0.25523257490718565, |
| "learning_rate": 1.4712430108884657e-06, |
| "loss": 0.1849, |
| "step": 1094 |
| }, |
| { |
| "epoch": 4.441518987341772, |
| "grad_norm": 0.27340525376072977, |
| "learning_rate": 1.4499509779581078e-06, |
| "loss": 0.1636, |
| "step": 1095 |
| }, |
| { |
| "epoch": 4.445569620253164, |
| "grad_norm": 0.2689654797302995, |
| "learning_rate": 1.4288083445723988e-06, |
| "loss": 0.1664, |
| "step": 1096 |
| }, |
| { |
| "epoch": 4.449620253164557, |
| "grad_norm": 0.2657226120499649, |
| "learning_rate": 1.4078152810112045e-06, |
| "loss": 0.187, |
| "step": 1097 |
| }, |
| { |
| "epoch": 4.453670886075949, |
| "grad_norm": 0.25434846797552113, |
| "learning_rate": 1.3869719563497697e-06, |
| "loss": 0.1491, |
| "step": 1098 |
| }, |
| { |
| "epoch": 4.457721518987341, |
| "grad_norm": 0.2623498019533576, |
| "learning_rate": 1.3662785384573663e-06, |
| "loss": 0.1755, |
| "step": 1099 |
| }, |
| { |
| "epoch": 4.461772151898734, |
| "grad_norm": 0.26492407294706866, |
| "learning_rate": 1.3457351939959383e-06, |
| "loss": 0.162, |
| "step": 1100 |
| }, |
| { |
| "epoch": 4.465822784810126, |
| "grad_norm": 0.277354971275243, |
| "learning_rate": 1.3253420884187551e-06, |
| "loss": 0.1674, |
| "step": 1101 |
| }, |
| { |
| "epoch": 4.469873417721519, |
| "grad_norm": 0.260681902985479, |
| "learning_rate": 1.3050993859690953e-06, |
| "loss": 0.1714, |
| "step": 1102 |
| }, |
| { |
| "epoch": 4.473924050632911, |
| "grad_norm": 0.2807714864442416, |
| "learning_rate": 1.2850072496788869e-06, |
| "loss": 0.1549, |
| "step": 1103 |
| }, |
| { |
| "epoch": 4.4779746835443035, |
| "grad_norm": 0.26980005508090027, |
| "learning_rate": 1.2650658413674434e-06, |
| "loss": 0.1893, |
| "step": 1104 |
| }, |
| { |
| "epoch": 4.4820253164556965, |
| "grad_norm": 0.2711074595547318, |
| "learning_rate": 1.2452753216401226e-06, |
| "loss": 0.1762, |
| "step": 1105 |
| }, |
| { |
| "epoch": 4.4860759493670885, |
| "grad_norm": 0.26174007859984516, |
| "learning_rate": 1.2256358498870503e-06, |
| "loss": 0.17, |
| "step": 1106 |
| }, |
| { |
| "epoch": 4.490126582278481, |
| "grad_norm": 0.2689308924072279, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.1961, |
| "step": 1107 |
| }, |
| { |
| "epoch": 4.494177215189874, |
| "grad_norm": 0.2558147355460892, |
| "learning_rate": 1.1868106817802816e-06, |
| "loss": 0.1769, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.498227848101266, |
| "grad_norm": 0.24899487830490946, |
| "learning_rate": 1.1676252981191482e-06, |
| "loss": 0.1436, |
| "step": 1109 |
| }, |
| { |
| "epoch": 4.502278481012659, |
| "grad_norm": 0.2675193384344742, |
| "learning_rate": 1.1485915878148823e-06, |
| "loss": 0.1539, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.506329113924051, |
| "grad_norm": 0.2659005148679619, |
| "learning_rate": 1.1297097041623584e-06, |
| "loss": 0.1637, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.510379746835443, |
| "grad_norm": 0.26322920250742143, |
| "learning_rate": 1.1109797992336847e-06, |
| "loss": 0.1782, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.514430379746836, |
| "grad_norm": 0.26446474420129645, |
| "learning_rate": 1.092402023876933e-06, |
| "loss": 0.1651, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.518481012658228, |
| "grad_norm": 0.25809794532112207, |
| "learning_rate": 1.0739765277149527e-06, |
| "loss": 0.189, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.52253164556962, |
| "grad_norm": 0.27074321194153256, |
| "learning_rate": 1.0557034591441596e-06, |
| "loss": 0.159, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.526582278481013, |
| "grad_norm": 0.26122252411392183, |
| "learning_rate": 1.0375829653333324e-06, |
| "loss": 0.1879, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.530632911392405, |
| "grad_norm": 0.25633972812015343, |
| "learning_rate": 1.0196151922224385e-06, |
| "loss": 0.1736, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.534683544303798, |
| "grad_norm": 0.2739831725778443, |
| "learning_rate": 1.0018002845214526e-06, |
| "loss": 0.1752, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.53873417721519, |
| "grad_norm": 0.26258567476788414, |
| "learning_rate": 9.841383857091947e-07, |
| "loss": 0.1505, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.542784810126582, |
| "grad_norm": 0.2703493795995899, |
| "learning_rate": 9.666296380321616e-07, |
| "loss": 0.1685, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.546835443037975, |
| "grad_norm": 0.26205198464296187, |
| "learning_rate": 9.492741825034124e-07, |
| "loss": 0.1478, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.550886075949367, |
| "grad_norm": 0.26465934440646993, |
| "learning_rate": 9.320721589013892e-07, |
| "loss": 0.1743, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.55493670886076, |
| "grad_norm": 0.2651769973374547, |
| "learning_rate": 9.150237057688339e-07, |
| "loss": 0.1863, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.558987341772152, |
| "grad_norm": 0.26830554933937456, |
| "learning_rate": 8.981289604116328e-07, |
| "loss": 0.1688, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.563037974683544, |
| "grad_norm": 0.268011857362914, |
| "learning_rate": 8.813880588977542e-07, |
| "loss": 0.1671, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.567088607594937, |
| "grad_norm": 0.26764240633798625, |
| "learning_rate": 8.648011360561126e-07, |
| "loss": 0.1546, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.571139240506329, |
| "grad_norm": 0.26443322721987833, |
| "learning_rate": 8.483683254755037e-07, |
| "loss": 0.1691, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.575189873417721, |
| "grad_norm": 0.2564004744779444, |
| "learning_rate": 8.320897595035227e-07, |
| "loss": 0.1656, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.579240506329114, |
| "grad_norm": 0.2707511529184889, |
| "learning_rate": 8.159655692455093e-07, |
| "loss": 0.1442, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.583291139240506, |
| "grad_norm": 0.26075933195458456, |
| "learning_rate": 7.999958845634648e-07, |
| "loss": 0.1908, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.587341772151898, |
| "grad_norm": 0.26406762360634406, |
| "learning_rate": 7.841808340750478e-07, |
| "loss": 0.1613, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.591392405063291, |
| "grad_norm": 0.25492006378860776, |
| "learning_rate": 7.685205451524869e-07, |
| "loss": 0.1796, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.595443037974683, |
| "grad_norm": 0.26208516007467964, |
| "learning_rate": 7.530151439216027e-07, |
| "loss": 0.2042, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.599493670886076, |
| "grad_norm": 0.2840123638708133, |
| "learning_rate": 7.376647552607675e-07, |
| "loss": 0.1754, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.603544303797468, |
| "grad_norm": 0.2608478110556892, |
| "learning_rate": 7.224695027998963e-07, |
| "loss": 0.1504, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.6075949367088604, |
| "grad_norm": 0.24623866608530517, |
| "learning_rate": 7.07429508919466e-07, |
| "loss": 0.187, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.611645569620253, |
| "grad_norm": 0.27240157210081284, |
| "learning_rate": 6.925448947495206e-07, |
| "loss": 0.1766, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.6156962025316455, |
| "grad_norm": 0.27323543806137557, |
| "learning_rate": 6.778157801686936e-07, |
| "loss": 0.1734, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.619746835443038, |
| "grad_norm": 0.269327567226204, |
| "learning_rate": 6.632422838032515e-07, |
| "loss": 0.1504, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.6237974683544305, |
| "grad_norm": 0.2671861544947398, |
| "learning_rate": 6.488245230261281e-07, |
| "loss": 0.1727, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.627848101265823, |
| "grad_norm": 0.2494385183337245, |
| "learning_rate": 6.345626139559868e-07, |
| "loss": 0.1468, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.6318987341772155, |
| "grad_norm": 0.26242942467417146, |
| "learning_rate": 6.204566714562866e-07, |
| "loss": 0.1618, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.635949367088608, |
| "grad_norm": 0.2624756784680058, |
| "learning_rate": 6.06506809134344e-07, |
| "loss": 0.1459, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.64, |
| "grad_norm": 0.2684685433403018, |
| "learning_rate": 5.927131393404373e-07, |
| "loss": 0.1639, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.644050632911393, |
| "grad_norm": 0.2554651749016498, |
| "learning_rate": 5.790757731668817e-07, |
| "loss": 0.1604, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.648101265822785, |
| "grad_norm": 0.27032446162801654, |
| "learning_rate": 5.655948204471507e-07, |
| "loss": 0.1567, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.652151898734177, |
| "grad_norm": 0.2541898547966036, |
| "learning_rate": 5.522703897549875e-07, |
| "loss": 0.1875, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.65620253164557, |
| "grad_norm": 0.2650892834328462, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.1853, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.660253164556962, |
| "grad_norm": 0.248931354205672, |
| "learning_rate": 5.260915224444207e-07, |
| "loss": 0.1566, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.664303797468355, |
| "grad_norm": 0.26386727905196217, |
| "learning_rate": 5.132372966670129e-07, |
| "loss": 0.1545, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.668354430379747, |
| "grad_norm": 0.26900910666242306, |
| "learning_rate": 5.005400145974704e-07, |
| "loss": 0.1682, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.672405063291139, |
| "grad_norm": 0.26123915624338995, |
| "learning_rate": 4.879997784979562e-07, |
| "loss": 0.1648, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.676455696202532, |
| "grad_norm": 0.26476602400472377, |
| "learning_rate": 4.7561668936580984e-07, |
| "loss": 0.1835, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.680506329113924, |
| "grad_norm": 0.25086711775858517, |
| "learning_rate": 4.6339084693272306e-07, |
| "loss": 0.1644, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.684556962025317, |
| "grad_norm": 0.2642564196871151, |
| "learning_rate": 4.5132234966395847e-07, |
| "loss": 0.1929, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.688607594936709, |
| "grad_norm": 0.27411406062334387, |
| "learning_rate": 4.3941129475752795e-07, |
| "loss": 0.1709, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.692658227848101, |
| "grad_norm": 0.2686532701282629, |
| "learning_rate": 4.27657778143431e-07, |
| "loss": 0.1713, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.696708860759494, |
| "grad_norm": 0.25793649824424214, |
| "learning_rate": 4.1606189448287757e-07, |
| "loss": 0.1813, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.700759493670886, |
| "grad_norm": 0.253281275669205, |
| "learning_rate": 4.046237371675177e-07, |
| "loss": 0.1401, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.704810126582278, |
| "grad_norm": 0.25638276879021243, |
| "learning_rate": 3.9334339831869963e-07, |
| "loss": 0.1434, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.708860759493671, |
| "grad_norm": 0.26755855258425587, |
| "learning_rate": 3.8222096878671955e-07, |
| "loss": 0.1481, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.712911392405063, |
| "grad_norm": 0.27344425334121464, |
| "learning_rate": 3.7125653815009545e-07, |
| "loss": 0.1805, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.716962025316455, |
| "grad_norm": 0.27173543955462975, |
| "learning_rate": 3.6045019471484974e-07, |
| "loss": 0.1592, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.721012658227848, |
| "grad_norm": 0.26543561105954205, |
| "learning_rate": 3.498020255137813e-07, |
| "loss": 0.1621, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.72506329113924, |
| "grad_norm": 0.2535096807318926, |
| "learning_rate": 3.393121163057811e-07, |
| "loss": 0.1533, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.729113924050633, |
| "grad_norm": 0.263958240901493, |
| "learning_rate": 3.289805515751399e-07, |
| "loss": 0.1996, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.733164556962025, |
| "grad_norm": 0.2535013176474823, |
| "learning_rate": 3.188074145308573e-07, |
| "loss": 0.1772, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.737215189873417, |
| "grad_norm": 0.2659058133634098, |
| "learning_rate": 3.087927871059804e-07, |
| "loss": 0.1794, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.74126582278481, |
| "grad_norm": 0.2571448433915386, |
| "learning_rate": 2.989367499569418e-07, |
| "loss": 0.1596, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.745316455696202, |
| "grad_norm": 0.27104686416503093, |
| "learning_rate": 2.8923938246290917e-07, |
| "loss": 0.1665, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.749367088607595, |
| "grad_norm": 0.26574394096558107, |
| "learning_rate": 2.7970076272514804e-07, |
| "loss": 0.1629, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.7534177215189874, |
| "grad_norm": 0.2624791783876033, |
| "learning_rate": 2.703209675663887e-07, |
| "loss": 0.1458, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.7574683544303795, |
| "grad_norm": 0.26682033524691773, |
| "learning_rate": 2.6110007253021374e-07, |
| "loss": 0.1729, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.7615189873417725, |
| "grad_norm": 0.263983578237984, |
| "learning_rate": 2.520381518804471e-07, |
| "loss": 0.1617, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.7655696202531646, |
| "grad_norm": 0.2699068583378983, |
| "learning_rate": 2.4313527860054585e-07, |
| "loss": 0.1627, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.769620253164557, |
| "grad_norm": 0.2585431118568067, |
| "learning_rate": 2.343915243930317e-07, |
| "loss": 0.1591, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.77367088607595, |
| "grad_norm": 0.261892821949762, |
| "learning_rate": 2.2580695967889367e-07, |
| "loss": 0.1561, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.777721518987342, |
| "grad_norm": 0.2604876136985379, |
| "learning_rate": 2.1738165359704189e-07, |
| "loss": 0.1783, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.781772151898734, |
| "grad_norm": 0.25078012548771633, |
| "learning_rate": 2.0911567400373257e-07, |
| "loss": 0.1558, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.785822784810127, |
| "grad_norm": 0.2586097674578887, |
| "learning_rate": 2.0100908747202607e-07, |
| "loss": 0.1964, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.789873417721519, |
| "grad_norm": 0.2737702741766769, |
| "learning_rate": 1.9306195929125638e-07, |
| "loss": 0.1968, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.793924050632912, |
| "grad_norm": 0.26858207911690535, |
| "learning_rate": 1.8527435346650247e-07, |
| "loss": 0.1563, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.797974683544304, |
| "grad_norm": 0.2573242839772949, |
| "learning_rate": 1.7764633271807108e-07, |
| "loss": 0.1478, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.802025316455696, |
| "grad_norm": 0.2660377221616584, |
| "learning_rate": 1.7017795848099262e-07, |
| "loss": 0.1333, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.806075949367089, |
| "grad_norm": 0.25552961261654883, |
| "learning_rate": 1.6286929090452596e-07, |
| "loss": 0.1724, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.810126582278481, |
| "grad_norm": 0.25754289686374404, |
| "learning_rate": 1.557203888516745e-07, |
| "loss": 0.1334, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.814177215189874, |
| "grad_norm": 0.2690096077155245, |
| "learning_rate": 1.487313098987131e-07, |
| "loss": 0.1756, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.818227848101266, |
| "grad_norm": 0.2674507565673869, |
| "learning_rate": 1.4190211033472402e-07, |
| "loss": 0.1774, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.822278481012658, |
| "grad_norm": 0.2564121912323577, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.1748, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.826329113924051, |
| "grad_norm": 0.26509412351038975, |
| "learning_rate": 1.2872356809130682e-07, |
| "loss": 0.2, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.830379746835443, |
| "grad_norm": 0.2645249646445966, |
| "learning_rate": 1.2237433155004807e-07, |
| "loss": 0.1481, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.834430379746835, |
| "grad_norm": 0.26981064386576065, |
| "learning_rate": 1.1618518667323886e-07, |
| "loss": 0.1687, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.838481012658228, |
| "grad_norm": 0.26782116245749465, |
| "learning_rate": 1.1015618330740385e-07, |
| "loss": 0.1884, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.84253164556962, |
| "grad_norm": 0.2514024102268895, |
| "learning_rate": 1.042873700093061e-07, |
| "loss": 0.1578, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.846582278481012, |
| "grad_norm": 0.2753429290093358, |
| "learning_rate": 9.857879404556291e-08, |
| "loss": 0.1452, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.850632911392405, |
| "grad_norm": 0.23701454113236958, |
| "learning_rate": 9.303050139225722e-08, |
| "loss": 0.1582, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.854683544303797, |
| "grad_norm": 0.26276166534114004, |
| "learning_rate": 8.76425367345779e-08, |
| "loss": 0.1631, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.85873417721519, |
| "grad_norm": 0.2684990721535729, |
| "learning_rate": 8.241494346644897e-08, |
| "loss": 0.1679, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.862784810126582, |
| "grad_norm": 0.25790993659023015, |
| "learning_rate": 7.734776369019204e-08, |
| "loss": 0.1601, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.866835443037974, |
| "grad_norm": 0.25603315530584453, |
| "learning_rate": 7.244103821617332e-08, |
| "loss": 0.174, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.870886075949367, |
| "grad_norm": 0.25777661381204153, |
| "learning_rate": 6.769480656248606e-08, |
| "loss": 0.176, |
| "step": 1201 |
| }, |
| { |
| "epoch": 4.874936708860759, |
| "grad_norm": 0.27077553694979056, |
| "learning_rate": 6.310910695462635e-08, |
| "loss": 0.1673, |
| "step": 1202 |
| }, |
| { |
| "epoch": 4.878987341772152, |
| "grad_norm": 0.2608617591444112, |
| "learning_rate": 5.8683976325191185e-08, |
| "loss": 0.1434, |
| "step": 1203 |
| }, |
| { |
| "epoch": 4.883037974683544, |
| "grad_norm": 0.2554304354593244, |
| "learning_rate": 5.4419450313571984e-08, |
| "loss": 0.1755, |
| "step": 1204 |
| }, |
| { |
| "epoch": 4.8870886075949365, |
| "grad_norm": 0.25967802577506915, |
| "learning_rate": 5.031556326567488e-08, |
| "loss": 0.2016, |
| "step": 1205 |
| }, |
| { |
| "epoch": 4.891139240506329, |
| "grad_norm": 0.2760692261055218, |
| "learning_rate": 4.637234823364312e-08, |
| "loss": 0.1641, |
| "step": 1206 |
| }, |
| { |
| "epoch": 4.8951898734177215, |
| "grad_norm": 0.2639131073660296, |
| "learning_rate": 4.258983697558838e-08, |
| "loss": 0.1607, |
| "step": 1207 |
| }, |
| { |
| "epoch": 4.899240506329114, |
| "grad_norm": 0.2678188779459348, |
| "learning_rate": 3.896805995533548e-08, |
| "loss": 0.1794, |
| "step": 1208 |
| }, |
| { |
| "epoch": 4.9032911392405065, |
| "grad_norm": 0.2563692107054463, |
| "learning_rate": 3.550704634218028e-08, |
| "loss": 0.1602, |
| "step": 1209 |
| }, |
| { |
| "epoch": 4.907341772151899, |
| "grad_norm": 0.2709785015743075, |
| "learning_rate": 3.2206824010647676e-08, |
| "loss": 0.1623, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.911392405063291, |
| "grad_norm": 0.26099997141621345, |
| "learning_rate": 2.9067419540278476e-08, |
| "loss": 0.1885, |
| "step": 1211 |
| }, |
| { |
| "epoch": 4.915443037974684, |
| "grad_norm": 0.2645757824631569, |
| "learning_rate": 2.6088858215400638e-08, |
| "loss": 0.1891, |
| "step": 1212 |
| }, |
| { |
| "epoch": 4.919493670886076, |
| "grad_norm": 0.25947618098034825, |
| "learning_rate": 2.3271164024940564e-08, |
| "loss": 0.1704, |
| "step": 1213 |
| }, |
| { |
| "epoch": 4.923544303797469, |
| "grad_norm": 0.26501637031177094, |
| "learning_rate": 2.061435966221881e-08, |
| "loss": 0.1808, |
| "step": 1214 |
| }, |
| { |
| "epoch": 4.927594936708861, |
| "grad_norm": 0.2661891635099563, |
| "learning_rate": 1.811846652477245e-08, |
| "loss": 0.1788, |
| "step": 1215 |
| }, |
| { |
| "epoch": 4.931645569620253, |
| "grad_norm": 0.26040408618766353, |
| "learning_rate": 1.5783504714184106e-08, |
| "loss": 0.1603, |
| "step": 1216 |
| }, |
| { |
| "epoch": 4.935696202531646, |
| "grad_norm": 0.2666521347929813, |
| "learning_rate": 1.360949303591097e-08, |
| "loss": 0.1601, |
| "step": 1217 |
| }, |
| { |
| "epoch": 4.939746835443038, |
| "grad_norm": 0.275122912083145, |
| "learning_rate": 1.1596448999144916e-08, |
| "loss": 0.1892, |
| "step": 1218 |
| }, |
| { |
| "epoch": 4.943797468354431, |
| "grad_norm": 0.2698195550962356, |
| "learning_rate": 9.744388816668172e-09, |
| "loss": 0.1776, |
| "step": 1219 |
| }, |
| { |
| "epoch": 4.947848101265823, |
| "grad_norm": 0.26051283549308396, |
| "learning_rate": 8.05332740472009e-09, |
| "loss": 0.1381, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.951898734177215, |
| "grad_norm": 0.2731372263982851, |
| "learning_rate": 6.523278382872811e-09, |
| "loss": 0.1821, |
| "step": 1221 |
| }, |
| { |
| "epoch": 4.955949367088608, |
| "grad_norm": 0.26752330107722505, |
| "learning_rate": 5.15425407393133e-09, |
| "loss": 0.1666, |
| "step": 1222 |
| }, |
| { |
| "epoch": 4.96, |
| "grad_norm": 0.2577065884920341, |
| "learning_rate": 3.94626550383137e-09, |
| "loss": 0.1614, |
| "step": 1223 |
| }, |
| { |
| "epoch": 4.964050632911392, |
| "grad_norm": 0.2736598953539788, |
| "learning_rate": 2.899322401546112e-09, |
| "loss": 0.1587, |
| "step": 1224 |
| }, |
| { |
| "epoch": 4.968101265822785, |
| "grad_norm": 0.26320274095288565, |
| "learning_rate": 2.013433199010706e-09, |
| "loss": 0.1884, |
| "step": 1225 |
| }, |
| { |
| "epoch": 4.972151898734177, |
| "grad_norm": 0.25841368892509026, |
| "learning_rate": 1.2886050310556563e-09, |
| "loss": 0.1765, |
| "step": 1226 |
| }, |
| { |
| "epoch": 4.976202531645569, |
| "grad_norm": 0.2545444414757548, |
| "learning_rate": 7.248437353468695e-10, |
| "loss": 0.1495, |
| "step": 1227 |
| }, |
| { |
| "epoch": 4.980253164556962, |
| "grad_norm": 0.2565064312660701, |
| "learning_rate": 3.221538523412449e-10, |
| "loss": 0.1926, |
| "step": 1228 |
| }, |
| { |
| "epoch": 4.984303797468354, |
| "grad_norm": 0.25697630418562484, |
| "learning_rate": 8.053862524670663e-11, |
| "loss": 0.169, |
| "step": 1229 |
| }, |
| { |
| "epoch": 4.988354430379747, |
| "grad_norm": 0.2610147404934558, |
| "learning_rate": 0.0, |
| "loss": 0.1747, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.988354430379747, |
| "step": 1230, |
| "total_flos": 2.1322399681253212e+18, |
| "train_loss": 0.42330787913586065, |
| "train_runtime": 80213.6281, |
| "train_samples_per_second": 1.97, |
| "train_steps_per_second": 0.015 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1230, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.1322399681253212e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|