| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.259780907668231, |
| "global_step": 500, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.992088607594937e-05, |
| "loss": 3.0081, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 9.984177215189874e-05, |
| "loss": 2.8615, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 9.97626582278481e-05, |
| "loss": 2.8922, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 9.968354430379747e-05, |
| "loss": 2.7182, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 9.960443037974683e-05, |
| "loss": 2.8407, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 9.952531645569621e-05, |
| "loss": 2.7972, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 9.944620253164557e-05, |
| "loss": 2.7558, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 9.936708860759493e-05, |
| "loss": 2.8176, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 9.928797468354431e-05, |
| "loss": 2.7444, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 9.920886075949367e-05, |
| "loss": 2.8396, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 9.912974683544304e-05, |
| "loss": 2.7709, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 9.90506329113924e-05, |
| "loss": 2.9788, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 9.897151898734177e-05, |
| "loss": 2.8087, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 9.889240506329115e-05, |
| "loss": 2.6894, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 9.881329113924051e-05, |
| "loss": 2.7998, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 9.873417721518988e-05, |
| "loss": 2.747, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 9.865506329113925e-05, |
| "loss": 2.788, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 9.857594936708862e-05, |
| "loss": 2.7536, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 9.849683544303798e-05, |
| "loss": 2.7604, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 9.841772151898735e-05, |
| "loss": 2.6864, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 9.833860759493672e-05, |
| "loss": 2.7934, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 9.825949367088608e-05, |
| "loss": 2.6659, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 9.818037974683544e-05, |
| "loss": 2.7234, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 9.810126582278482e-05, |
| "loss": 2.7003, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 9.802215189873418e-05, |
| "loss": 2.828, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 9.794303797468355e-05, |
| "loss": 2.668, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 9.786392405063292e-05, |
| "loss": 2.7377, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 9.778481012658228e-05, |
| "loss": 2.7635, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 9.770569620253165e-05, |
| "loss": 2.7134, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 9.762658227848101e-05, |
| "loss": 2.6454, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 9.754746835443039e-05, |
| "loss": 2.6397, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 9.746835443037975e-05, |
| "loss": 2.7034, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 9.738924050632911e-05, |
| "loss": 2.7257, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 9.731012658227849e-05, |
| "loss": 2.6564, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 9.723101265822785e-05, |
| "loss": 2.7504, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 9.715189873417721e-05, |
| "loss": 2.597, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 9.707278481012658e-05, |
| "loss": 2.6232, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 9.699367088607595e-05, |
| "loss": 2.6682, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 9.691455696202532e-05, |
| "loss": 2.6603, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 9.683544303797469e-05, |
| "loss": 2.6109, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 9.675632911392405e-05, |
| "loss": 2.6726, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 9.667721518987343e-05, |
| "loss": 2.7147, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 9.65981012658228e-05, |
| "loss": 2.6925, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 9.651898734177216e-05, |
| "loss": 2.7408, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 9.643987341772153e-05, |
| "loss": 2.7109, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 9.63607594936709e-05, |
| "loss": 2.732, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 9.628164556962026e-05, |
| "loss": 2.7063, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 9.620253164556962e-05, |
| "loss": 2.7112, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 9.6123417721519e-05, |
| "loss": 2.5878, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 9.604430379746836e-05, |
| "loss": 2.6371, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 9.596518987341772e-05, |
| "loss": 2.6441, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 9.58860759493671e-05, |
| "loss": 2.6282, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 9.580696202531646e-05, |
| "loss": 2.608, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.572784810126582e-05, |
| "loss": 2.6978, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.564873417721519e-05, |
| "loss": 2.7785, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 9.556962025316456e-05, |
| "loss": 2.6797, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.549050632911393e-05, |
| "loss": 2.7262, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 9.541139240506329e-05, |
| "loss": 2.6339, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 9.533227848101267e-05, |
| "loss": 2.6378, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 9.525316455696203e-05, |
| "loss": 2.7182, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 9.517405063291139e-05, |
| "loss": 2.6166, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 9.509493670886075e-05, |
| "loss": 2.7077, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 9.501582278481013e-05, |
| "loss": 2.6853, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 9.493670886075949e-05, |
| "loss": 2.7928, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 9.485759493670886e-05, |
| "loss": 2.7323, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 9.477848101265823e-05, |
| "loss": 2.6468, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 9.469936708860761e-05, |
| "loss": 2.6338, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 9.462025316455697e-05, |
| "loss": 2.6622, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 9.454113924050633e-05, |
| "loss": 2.679, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 9.446202531645571e-05, |
| "loss": 2.7418, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 9.438291139240507e-05, |
| "loss": 2.6329, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.430379746835444e-05, |
| "loss": 2.7766, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.42246835443038e-05, |
| "loss": 2.6382, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 9.414556962025317e-05, |
| "loss": 2.7261, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 9.406645569620254e-05, |
| "loss": 2.732, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 9.39873417721519e-05, |
| "loss": 2.7585, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 9.390822784810128e-05, |
| "loss": 2.7085, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 9.382911392405064e-05, |
| "loss": 2.7088, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 9.375e-05, |
| "loss": 2.6059, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 9.367088607594936e-05, |
| "loss": 2.6854, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 9.359177215189874e-05, |
| "loss": 2.5747, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 9.35126582278481e-05, |
| "loss": 2.695, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 9.343354430379747e-05, |
| "loss": 2.6193, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 9.335443037974684e-05, |
| "loss": 2.5892, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 9.32753164556962e-05, |
| "loss": 2.5214, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 9.319620253164557e-05, |
| "loss": 2.6025, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 9.311708860759493e-05, |
| "loss": 2.6703, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 9.303797468354431e-05, |
| "loss": 2.6365, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 9.295886075949367e-05, |
| "loss": 2.5981, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 9.287974683544303e-05, |
| "loss": 2.6546, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 9.280063291139241e-05, |
| "loss": 2.6226, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 9.272151898734177e-05, |
| "loss": 2.5896, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 9.264240506329115e-05, |
| "loss": 2.6591, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 9.256329113924051e-05, |
| "loss": 2.5707, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 9.248417721518989e-05, |
| "loss": 2.781, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 9.240506329113925e-05, |
| "loss": 2.5795, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 9.232594936708861e-05, |
| "loss": 2.6313, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 9.224683544303798e-05, |
| "loss": 2.683, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 9.216772151898735e-05, |
| "loss": 2.6623, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 9.208860759493671e-05, |
| "loss": 2.7411, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 9.200949367088608e-05, |
| "loss": 2.6245, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 9.193037974683545e-05, |
| "loss": 2.6614, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 9.185126582278482e-05, |
| "loss": 2.6445, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 9.177215189873418e-05, |
| "loss": 2.5834, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 9.169303797468354e-05, |
| "loss": 2.6261, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 9.161392405063292e-05, |
| "loss": 2.6625, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 9.153481012658228e-05, |
| "loss": 2.6882, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 9.145569620253164e-05, |
| "loss": 2.6707, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 9.137658227848102e-05, |
| "loss": 2.6243, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 9.129746835443038e-05, |
| "loss": 2.6536, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 9.121835443037975e-05, |
| "loss": 2.6165, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 9.113924050632912e-05, |
| "loss": 2.6707, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 9.106012658227849e-05, |
| "loss": 2.5652, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 9.098101265822785e-05, |
| "loss": 2.6854, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 9.090189873417721e-05, |
| "loss": 2.6967, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 9.082278481012659e-05, |
| "loss": 2.609, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 9.074367088607595e-05, |
| "loss": 2.6294, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 9.066455696202531e-05, |
| "loss": 2.6093, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 9.058544303797469e-05, |
| "loss": 2.6536, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 9.050632911392407e-05, |
| "loss": 2.6064, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 9.042721518987343e-05, |
| "loss": 2.5646, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 9.034810126582279e-05, |
| "loss": 2.68, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 9.026898734177215e-05, |
| "loss": 2.6302, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 9.018987341772153e-05, |
| "loss": 2.6241, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 9.011075949367089e-05, |
| "loss": 2.5816, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 9.003164556962026e-05, |
| "loss": 2.6094, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 8.995253164556963e-05, |
| "loss": 2.5592, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 8.9873417721519e-05, |
| "loss": 2.7118, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 8.979430379746836e-05, |
| "loss": 2.6414, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 8.971518987341772e-05, |
| "loss": 2.644, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 8.96360759493671e-05, |
| "loss": 2.6476, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 8.955696202531646e-05, |
| "loss": 2.6763, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 8.947784810126582e-05, |
| "loss": 2.6349, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 8.93987341772152e-05, |
| "loss": 2.6594, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 8.931962025316456e-05, |
| "loss": 2.7204, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 8.924050632911392e-05, |
| "loss": 2.6037, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 8.91613924050633e-05, |
| "loss": 2.5948, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 8.908227848101266e-05, |
| "loss": 2.653, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 8.900316455696203e-05, |
| "loss": 2.6549, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 8.892405063291139e-05, |
| "loss": 2.602, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 8.884493670886076e-05, |
| "loss": 2.5651, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 8.876582278481013e-05, |
| "loss": 2.6517, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 8.868670886075949e-05, |
| "loss": 2.6373, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 8.860759493670887e-05, |
| "loss": 2.6907, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 8.852848101265824e-05, |
| "loss": 2.5885, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 8.84493670886076e-05, |
| "loss": 2.579, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 8.837025316455697e-05, |
| "loss": 2.7174, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 8.829113924050633e-05, |
| "loss": 2.6545, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 8.821202531645571e-05, |
| "loss": 2.659, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 8.813291139240507e-05, |
| "loss": 2.6381, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 8.805379746835443e-05, |
| "loss": 2.595, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 8.797468354430381e-05, |
| "loss": 2.6264, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 8.789556962025317e-05, |
| "loss": 2.6035, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 8.781645569620253e-05, |
| "loss": 2.6478, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 8.773734177215191e-05, |
| "loss": 2.6257, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 8.765822784810127e-05, |
| "loss": 2.5814, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 8.757911392405064e-05, |
| "loss": 2.585, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 8.75e-05, |
| "loss": 2.6877, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 8.742088607594938e-05, |
| "loss": 2.5594, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 8.734177215189874e-05, |
| "loss": 2.665, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 8.72626582278481e-05, |
| "loss": 2.6302, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 8.718354430379748e-05, |
| "loss": 2.5499, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 8.710443037974684e-05, |
| "loss": 2.5304, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 8.70253164556962e-05, |
| "loss": 2.5099, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 8.694620253164557e-05, |
| "loss": 2.5894, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 8.686708860759494e-05, |
| "loss": 2.5542, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 8.67879746835443e-05, |
| "loss": 2.5983, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 8.670886075949367e-05, |
| "loss": 2.6068, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 8.662974683544304e-05, |
| "loss": 2.6554, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 8.65506329113924e-05, |
| "loss": 2.6135, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 8.647151898734177e-05, |
| "loss": 2.5338, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 8.639240506329115e-05, |
| "loss": 2.5368, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 8.631329113924052e-05, |
| "loss": 2.5368, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 8.623417721518988e-05, |
| "loss": 2.5696, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 8.615506329113925e-05, |
| "loss": 2.5253, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 8.607594936708861e-05, |
| "loss": 2.4822, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 8.599683544303799e-05, |
| "loss": 2.5394, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 8.591772151898735e-05, |
| "loss": 2.6708, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 8.583860759493671e-05, |
| "loss": 2.5803, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 8.575949367088609e-05, |
| "loss": 2.5357, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 8.568037974683545e-05, |
| "loss": 2.5887, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 8.560126582278481e-05, |
| "loss": 2.6092, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 8.552215189873418e-05, |
| "loss": 2.5351, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 8.544303797468355e-05, |
| "loss": 2.518, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 8.536392405063292e-05, |
| "loss": 2.6434, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 8.528481012658228e-05, |
| "loss": 2.5951, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 8.520569620253165e-05, |
| "loss": 2.6177, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 8.512658227848102e-05, |
| "loss": 2.5993, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 8.504746835443038e-05, |
| "loss": 2.487, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 8.496835443037974e-05, |
| "loss": 2.5761, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 8.488924050632912e-05, |
| "loss": 2.599, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 8.481012658227848e-05, |
| "loss": 2.5772, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 8.473101265822784e-05, |
| "loss": 2.5415, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 8.465189873417722e-05, |
| "loss": 2.5598, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 8.457278481012658e-05, |
| "loss": 2.5532, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 8.449367088607595e-05, |
| "loss": 2.539, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 8.441455696202531e-05, |
| "loss": 2.5363, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 8.43354430379747e-05, |
| "loss": 2.5749, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 8.425632911392406e-05, |
| "loss": 2.5694, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 8.417721518987342e-05, |
| "loss": 2.5436, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 8.409810126582279e-05, |
| "loss": 2.5848, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 8.401898734177216e-05, |
| "loss": 2.6092, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 8.393987341772153e-05, |
| "loss": 2.6622, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 8.386075949367089e-05, |
| "loss": 2.5721, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 8.378164556962027e-05, |
| "loss": 2.6728, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 8.370253164556963e-05, |
| "loss": 2.6081, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 8.362341772151899e-05, |
| "loss": 2.6626, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 8.354430379746835e-05, |
| "loss": 2.6292, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 8.346518987341773e-05, |
| "loss": 2.5524, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 8.33860759493671e-05, |
| "loss": 2.5729, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 8.330696202531646e-05, |
| "loss": 2.5565, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 8.322784810126583e-05, |
| "loss": 2.6039, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 8.31487341772152e-05, |
| "loss": 2.5557, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 8.306962025316456e-05, |
| "loss": 2.5331, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 8.299050632911392e-05, |
| "loss": 2.6137, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 8.29113924050633e-05, |
| "loss": 2.5428, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 8.283227848101266e-05, |
| "loss": 2.5655, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 8.275316455696202e-05, |
| "loss": 2.5869, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 8.26740506329114e-05, |
| "loss": 2.6131, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.259493670886076e-05, |
| "loss": 2.5545, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.251582278481012e-05, |
| "loss": 2.6437, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.243670886075949e-05, |
| "loss": 2.5611, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.235759493670886e-05, |
| "loss": 2.5757, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.227848101265824e-05, |
| "loss": 2.6145, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.21993670886076e-05, |
| "loss": 2.573, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.212025316455697e-05, |
| "loss": 2.6449, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.204113924050634e-05, |
| "loss": 2.5274, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.19620253164557e-05, |
| "loss": 2.5667, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.188291139240507e-05, |
| "loss": 2.5355, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.180379746835444e-05, |
| "loss": 2.58, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.17246835443038e-05, |
| "loss": 2.6295, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.164556962025317e-05, |
| "loss": 2.5535, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.156645569620253e-05, |
| "loss": 2.6072, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.148734177215191e-05, |
| "loss": 2.5914, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.140822784810127e-05, |
| "loss": 2.5315, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.132911392405063e-05, |
| "loss": 2.5623, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.125000000000001e-05, |
| "loss": 2.5596, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.117088607594937e-05, |
| "loss": 2.563, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.109177215189874e-05, |
| "loss": 2.5054, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 8.10126582278481e-05, |
| "loss": 2.5953, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 8.093354430379747e-05, |
| "loss": 2.4561, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 8.085443037974684e-05, |
| "loss": 2.5268, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 8.07753164556962e-05, |
| "loss": 2.5851, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 8.069620253164558e-05, |
| "loss": 2.5286, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 8.061708860759494e-05, |
| "loss": 2.5408, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 8.05379746835443e-05, |
| "loss": 2.5685, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 8.045886075949366e-05, |
| "loss": 2.453, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 8.037974683544304e-05, |
| "loss": 2.5055, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 8.03006329113924e-05, |
| "loss": 2.4933, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 8.022151898734177e-05, |
| "loss": 2.5857, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 8.014240506329114e-05, |
| "loss": 2.4926, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 8.006329113924052e-05, |
| "loss": 2.4991, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.998417721518988e-05, |
| "loss": 2.492, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.990506329113924e-05, |
| "loss": 2.5084, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.982594936708862e-05, |
| "loss": 2.5279, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.974683544303798e-05, |
| "loss": 2.4119, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.966772151898735e-05, |
| "loss": 2.496, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.958860759493671e-05, |
| "loss": 2.5748, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.950949367088609e-05, |
| "loss": 2.4665, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 7.943037974683545e-05, |
| "loss": 2.4504, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 7.935126582278481e-05, |
| "loss": 2.5703, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 7.927215189873419e-05, |
| "loss": 2.5075, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 7.919303797468355e-05, |
| "loss": 2.5279, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 7.911392405063291e-05, |
| "loss": 2.4899, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 7.903481012658228e-05, |
| "loss": 2.4601, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 7.895569620253165e-05, |
| "loss": 2.6405, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 7.887658227848101e-05, |
| "loss": 2.4824, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 7.879746835443038e-05, |
| "loss": 2.5388, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 7.871835443037975e-05, |
| "loss": 2.5799, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 7.863924050632912e-05, |
| "loss": 2.5451, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 7.856012658227848e-05, |
| "loss": 2.4391, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 7.848101265822784e-05, |
| "loss": 2.4616, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 7.840189873417722e-05, |
| "loss": 2.5493, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 7.832278481012658e-05, |
| "loss": 2.5831, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 7.824367088607594e-05, |
| "loss": 2.5137, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 7.816455696202532e-05, |
| "loss": 2.4994, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 7.80854430379747e-05, |
| "loss": 2.5692, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 7.800632911392406e-05, |
| "loss": 2.5474, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 7.792721518987342e-05, |
| "loss": 2.551, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 7.78481012658228e-05, |
| "loss": 2.5591, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 7.776898734177216e-05, |
| "loss": 2.5553, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 7.768987341772152e-05, |
| "loss": 2.4976, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 7.761075949367089e-05, |
| "loss": 2.4964, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 7.753164556962026e-05, |
| "loss": 2.4731, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 7.745253164556963e-05, |
| "loss": 2.5036, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 7.737341772151899e-05, |
| "loss": 2.5074, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 7.729430379746836e-05, |
| "loss": 2.5307, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 7.721518987341773e-05, |
| "loss": 2.5479, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 7.713607594936709e-05, |
| "loss": 2.5085, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 7.705696202531645e-05, |
| "loss": 2.5537, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 7.697784810126583e-05, |
| "loss": 2.6198, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 7.689873417721519e-05, |
| "loss": 2.5547, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 7.681962025316455e-05, |
| "loss": 2.4875, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 7.674050632911393e-05, |
| "loss": 2.4859, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 7.66613924050633e-05, |
| "loss": 2.5, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 7.658227848101266e-05, |
| "loss": 2.4782, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 7.650316455696202e-05, |
| "loss": 2.5015, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 7.64240506329114e-05, |
| "loss": 2.4838, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 7.634493670886076e-05, |
| "loss": 2.4972, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 7.626582278481012e-05, |
| "loss": 2.5914, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 7.61867088607595e-05, |
| "loss": 2.6018, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 7.610759493670886e-05, |
| "loss": 2.5574, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 7.602848101265824e-05, |
| "loss": 2.5567, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 7.59493670886076e-05, |
| "loss": 2.519, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 7.587025316455698e-05, |
| "loss": 2.6013, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 7.579113924050634e-05, |
| "loss": 2.5606, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 7.57120253164557e-05, |
| "loss": 2.5331, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 7.563291139240506e-05, |
| "loss": 2.5034, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 7.555379746835444e-05, |
| "loss": 2.5237, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 7.54746835443038e-05, |
| "loss": 2.4697, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 7.539556962025317e-05, |
| "loss": 2.5559, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 7.531645569620254e-05, |
| "loss": 2.5485, |
| "step": 312 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 7.52373417721519e-05, |
| "loss": 2.5404, |
| "step": 313 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 7.515822784810127e-05, |
| "loss": 2.5336, |
| "step": 314 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 7.507911392405063e-05, |
| "loss": 2.4819, |
| "step": 315 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 7.500000000000001e-05, |
| "loss": 2.5207, |
| "step": 316 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 7.492088607594937e-05, |
| "loss": 2.5366, |
| "step": 317 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 7.484177215189873e-05, |
| "loss": 2.5074, |
| "step": 318 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 7.476265822784811e-05, |
| "loss": 2.5523, |
| "step": 319 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 7.468354430379747e-05, |
| "loss": 2.5471, |
| "step": 320 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 7.460443037974683e-05, |
| "loss": 2.4375, |
| "step": 321 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 7.45253164556962e-05, |
| "loss": 2.5254, |
| "step": 322 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 7.444620253164557e-05, |
| "loss": 2.4685, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 7.436708860759494e-05, |
| "loss": 2.3927, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 7.42879746835443e-05, |
| "loss": 2.467, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 7.420886075949368e-05, |
| "loss": 2.4507, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 7.412974683544304e-05, |
| "loss": 2.5232, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 7.40506329113924e-05, |
| "loss": 2.5789, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 7.397151898734176e-05, |
| "loss": 2.3973, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 7.389240506329115e-05, |
| "loss": 2.5065, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 7.381329113924052e-05, |
| "loss": 2.4783, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 7.373417721518988e-05, |
| "loss": 2.4642, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 7.365506329113924e-05, |
| "loss": 2.4137, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 7.357594936708862e-05, |
| "loss": 2.4881, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 7.349683544303798e-05, |
| "loss": 2.5382, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 7.341772151898734e-05, |
| "loss": 2.4962, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 7.333860759493672e-05, |
| "loss": 2.4236, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 7.325949367088608e-05, |
| "loss": 2.4238, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 7.318037974683545e-05, |
| "loss": 2.4651, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 7.310126582278481e-05, |
| "loss": 2.434, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 7.302215189873418e-05, |
| "loss": 2.4445, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 7.294303797468355e-05, |
| "loss": 2.4645, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 7.286392405063291e-05, |
| "loss": 2.4203, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 7.278481012658229e-05, |
| "loss": 2.4255, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 7.270569620253165e-05, |
| "loss": 2.5024, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 7.262658227848101e-05, |
| "loss": 2.4631, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 7.254746835443037e-05, |
| "loss": 2.4118, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 7.246835443037975e-05, |
| "loss": 2.4434, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 7.238924050632911e-05, |
| "loss": 2.4452, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 7.231012658227848e-05, |
| "loss": 2.4091, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 7.223101265822785e-05, |
| "loss": 2.4788, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 7.215189873417722e-05, |
| "loss": 2.4385, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 7.207278481012658e-05, |
| "loss": 2.4585, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 7.199367088607595e-05, |
| "loss": 2.5625, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 7.191455696202532e-05, |
| "loss": 2.4731, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 7.18354430379747e-05, |
| "loss": 2.5434, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 7.175632911392406e-05, |
| "loss": 2.4586, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 7.167721518987342e-05, |
| "loss": 2.4995, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 7.15981012658228e-05, |
| "loss": 2.4497, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 7.151898734177216e-05, |
| "loss": 2.4311, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 7.143987341772152e-05, |
| "loss": 2.5488, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 7.13607594936709e-05, |
| "loss": 2.4446, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 7.128164556962026e-05, |
| "loss": 2.5131, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 7.120253164556962e-05, |
| "loss": 2.4211, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 7.112341772151899e-05, |
| "loss": 2.5215, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 7.104430379746836e-05, |
| "loss": 2.3838, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 7.096518987341772e-05, |
| "loss": 2.4798, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 7.088607594936709e-05, |
| "loss": 2.464, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 7.080696202531646e-05, |
| "loss": 2.4536, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 7.072784810126583e-05, |
| "loss": 2.5556, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 7.064873417721519e-05, |
| "loss": 2.5225, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 7.056962025316457e-05, |
| "loss": 2.5068, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 7.049050632911393e-05, |
| "loss": 2.5099, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 7.041139240506329e-05, |
| "loss": 2.4494, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 7.033227848101265e-05, |
| "loss": 2.4185, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 7.025316455696203e-05, |
| "loss": 2.4423, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 7.017405063291139e-05, |
| "loss": 2.534, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 7.009493670886076e-05, |
| "loss": 2.4981, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 7.001582278481013e-05, |
| "loss": 2.4773, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 6.99367088607595e-05, |
| "loss": 2.4654, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 6.985759493670886e-05, |
| "loss": 2.5398, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 6.977848101265823e-05, |
| "loss": 2.5319, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 6.96993670886076e-05, |
| "loss": 2.4655, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 6.962025316455697e-05, |
| "loss": 2.4246, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 6.954113924050634e-05, |
| "loss": 2.4681, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.94620253164557e-05, |
| "loss": 2.5336, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 6.938291139240507e-05, |
| "loss": 2.4236, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 6.930379746835444e-05, |
| "loss": 2.4617, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 6.92246835443038e-05, |
| "loss": 2.4018, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 6.914556962025316e-05, |
| "loss": 2.4973, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 6.906645569620254e-05, |
| "loss": 2.511, |
| "step": 391 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 6.89873417721519e-05, |
| "loss": 2.5253, |
| "step": 392 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 6.890822784810127e-05, |
| "loss": 2.3835, |
| "step": 393 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 6.882911392405064e-05, |
| "loss": 2.4304, |
| "step": 394 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 6.875e-05, |
| "loss": 2.5319, |
| "step": 395 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 6.867088607594937e-05, |
| "loss": 2.4936, |
| "step": 396 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 6.859177215189874e-05, |
| "loss": 2.4887, |
| "step": 397 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 6.85126582278481e-05, |
| "loss": 2.5686, |
| "step": 398 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 6.843354430379747e-05, |
| "loss": 2.3999, |
| "step": 399 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 6.835443037974683e-05, |
| "loss": 2.3567, |
| "step": 400 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 6.827531645569621e-05, |
| "loss": 2.3757, |
| "step": 401 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 6.819620253164557e-05, |
| "loss": 2.3118, |
| "step": 402 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 6.811708860759493e-05, |
| "loss": 2.4044, |
| "step": 403 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 6.803797468354431e-05, |
| "loss": 2.5191, |
| "step": 404 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 6.795886075949367e-05, |
| "loss": 2.3796, |
| "step": 405 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 6.787974683544304e-05, |
| "loss": 2.3092, |
| "step": 406 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 6.78006329113924e-05, |
| "loss": 2.4262, |
| "step": 407 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 6.772151898734177e-05, |
| "loss": 2.4117, |
| "step": 408 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 6.764240506329115e-05, |
| "loss": 2.4613, |
| "step": 409 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 6.756329113924051e-05, |
| "loss": 2.4062, |
| "step": 410 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 6.748417721518988e-05, |
| "loss": 2.4383, |
| "step": 411 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 6.740506329113925e-05, |
| "loss": 2.4523, |
| "step": 412 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 6.732594936708862e-05, |
| "loss": 2.4081, |
| "step": 413 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 6.724683544303798e-05, |
| "loss": 2.4348, |
| "step": 414 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 6.716772151898735e-05, |
| "loss": 2.4608, |
| "step": 415 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 6.708860759493672e-05, |
| "loss": 2.4999, |
| "step": 416 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 6.700949367088608e-05, |
| "loss": 2.3906, |
| "step": 417 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 6.693037974683544e-05, |
| "loss": 2.3757, |
| "step": 418 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 6.685126582278482e-05, |
| "loss": 2.4686, |
| "step": 419 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 6.677215189873418e-05, |
| "loss": 2.4821, |
| "step": 420 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 6.669303797468354e-05, |
| "loss": 2.423, |
| "step": 421 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 6.661392405063292e-05, |
| "loss": 2.3645, |
| "step": 422 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 6.653481012658228e-05, |
| "loss": 2.452, |
| "step": 423 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 6.645569620253165e-05, |
| "loss": 2.3124, |
| "step": 424 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 6.637658227848101e-05, |
| "loss": 2.4703, |
| "step": 425 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 6.629746835443039e-05, |
| "loss": 2.3629, |
| "step": 426 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 6.621835443037975e-05, |
| "loss": 2.433, |
| "step": 427 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 6.613924050632911e-05, |
| "loss": 2.3075, |
| "step": 428 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 6.606012658227849e-05, |
| "loss": 2.4005, |
| "step": 429 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 6.598101265822785e-05, |
| "loss": 2.3672, |
| "step": 430 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 6.590189873417721e-05, |
| "loss": 2.4682, |
| "step": 431 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 6.582278481012658e-05, |
| "loss": 2.4498, |
| "step": 432 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 6.574367088607595e-05, |
| "loss": 2.4776, |
| "step": 433 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 6.566455696202531e-05, |
| "loss": 2.387, |
| "step": 434 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 6.558544303797469e-05, |
| "loss": 2.4102, |
| "step": 435 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 6.550632911392405e-05, |
| "loss": 2.4719, |
| "step": 436 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 6.542721518987343e-05, |
| "loss": 2.4796, |
| "step": 437 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 6.534810126582279e-05, |
| "loss": 2.3951, |
| "step": 438 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 6.526898734177216e-05, |
| "loss": 2.4206, |
| "step": 439 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 6.518987341772153e-05, |
| "loss": 2.4221, |
| "step": 440 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 6.51107594936709e-05, |
| "loss": 2.4705, |
| "step": 441 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 6.503164556962026e-05, |
| "loss": 2.4581, |
| "step": 442 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 6.495253164556962e-05, |
| "loss": 2.35, |
| "step": 443 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 6.4873417721519e-05, |
| "loss": 2.3569, |
| "step": 444 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 6.479430379746836e-05, |
| "loss": 2.4375, |
| "step": 445 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 6.471518987341772e-05, |
| "loss": 2.4266, |
| "step": 446 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 6.46360759493671e-05, |
| "loss": 2.5028, |
| "step": 447 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 6.455696202531646e-05, |
| "loss": 2.3958, |
| "step": 448 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 6.447784810126582e-05, |
| "loss": 2.3978, |
| "step": 449 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 6.439873417721519e-05, |
| "loss": 2.4629, |
| "step": 450 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 6.431962025316456e-05, |
| "loss": 2.4269, |
| "step": 451 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 6.424050632911393e-05, |
| "loss": 2.3565, |
| "step": 452 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 6.416139240506329e-05, |
| "loss": 2.4155, |
| "step": 453 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 6.408227848101266e-05, |
| "loss": 2.4556, |
| "step": 454 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 6.400316455696203e-05, |
| "loss": 2.4244, |
| "step": 455 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 6.392405063291139e-05, |
| "loss": 2.2978, |
| "step": 456 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 6.384493670886075e-05, |
| "loss": 2.4742, |
| "step": 457 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 6.376582278481013e-05, |
| "loss": 2.3629, |
| "step": 458 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 6.368670886075949e-05, |
| "loss": 2.4468, |
| "step": 459 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 6.360759493670885e-05, |
| "loss": 2.4993, |
| "step": 460 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 6.352848101265823e-05, |
| "loss": 2.4546, |
| "step": 461 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 6.344936708860761e-05, |
| "loss": 2.442, |
| "step": 462 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 6.337025316455697e-05, |
| "loss": 2.4744, |
| "step": 463 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 6.329113924050633e-05, |
| "loss": 2.5044, |
| "step": 464 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 6.321202531645571e-05, |
| "loss": 2.4399, |
| "step": 465 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 6.313291139240507e-05, |
| "loss": 2.3402, |
| "step": 466 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 6.305379746835443e-05, |
| "loss": 2.432, |
| "step": 467 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 6.29746835443038e-05, |
| "loss": 2.3745, |
| "step": 468 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 6.289556962025317e-05, |
| "loss": 2.4026, |
| "step": 469 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 6.281645569620254e-05, |
| "loss": 2.3891, |
| "step": 470 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 6.27373417721519e-05, |
| "loss": 2.4781, |
| "step": 471 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 6.265822784810128e-05, |
| "loss": 2.468, |
| "step": 472 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 6.257911392405064e-05, |
| "loss": 2.4899, |
| "step": 473 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 6.25e-05, |
| "loss": 2.4882, |
| "step": 474 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 6.242088607594936e-05, |
| "loss": 2.3793, |
| "step": 475 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 6.234177215189874e-05, |
| "loss": 2.4381, |
| "step": 476 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 6.22626582278481e-05, |
| "loss": 2.4239, |
| "step": 477 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 6.218354430379747e-05, |
| "loss": 2.4914, |
| "step": 478 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 6.210443037974684e-05, |
| "loss": 2.4158, |
| "step": 479 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 6.20253164556962e-05, |
| "loss": 2.3808, |
| "step": 480 |
| }, |
| { |
| "epoch": 6.02, |
| "learning_rate": 6.194620253164557e-05, |
| "loss": 2.3322, |
| "step": 481 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 6.186708860759493e-05, |
| "loss": 2.3791, |
| "step": 482 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 6.178797468354431e-05, |
| "loss": 2.3428, |
| "step": 483 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 6.170886075949367e-05, |
| "loss": 2.3487, |
| "step": 484 |
| }, |
| { |
| "epoch": 6.07, |
| "learning_rate": 6.162974683544303e-05, |
| "loss": 2.324, |
| "step": 485 |
| }, |
| { |
| "epoch": 6.08, |
| "learning_rate": 6.155063291139241e-05, |
| "loss": 2.439, |
| "step": 486 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 6.147151898734177e-05, |
| "loss": 2.4255, |
| "step": 487 |
| }, |
| { |
| "epoch": 6.11, |
| "learning_rate": 6.139240506329115e-05, |
| "loss": 2.3432, |
| "step": 488 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 6.131329113924051e-05, |
| "loss": 2.391, |
| "step": 489 |
| }, |
| { |
| "epoch": 6.13, |
| "learning_rate": 6.123417721518989e-05, |
| "loss": 2.3704, |
| "step": 490 |
| }, |
| { |
| "epoch": 6.15, |
| "learning_rate": 6.115506329113925e-05, |
| "loss": 2.2892, |
| "step": 491 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 6.107594936708861e-05, |
| "loss": 2.3995, |
| "step": 492 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 6.099683544303798e-05, |
| "loss": 2.3633, |
| "step": 493 |
| }, |
| { |
| "epoch": 6.18, |
| "learning_rate": 6.0917721518987345e-05, |
| "loss": 2.3074, |
| "step": 494 |
| }, |
| { |
| "epoch": 6.2, |
| "learning_rate": 6.0838607594936714e-05, |
| "loss": 2.3454, |
| "step": 495 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 6.0759493670886084e-05, |
| "loss": 2.3856, |
| "step": 496 |
| }, |
| { |
| "epoch": 6.22, |
| "learning_rate": 6.0680379746835447e-05, |
| "loss": 2.3991, |
| "step": 497 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 6.0601265822784816e-05, |
| "loss": 2.3348, |
| "step": 498 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 6.052215189873418e-05, |
| "loss": 2.3506, |
| "step": 499 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 6.044303797468355e-05, |
| "loss": 2.3444, |
| "step": 500 |
| } |
| ], |
| "max_steps": 1264, |
| "num_train_epochs": 16, |
| "total_flos": 1.3096315313163264e+17, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|