| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.996455696202531, |
| "eval_steps": 500, |
| "global_step": 1230, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004050632911392405, |
| "grad_norm": 6.738287542991625, |
| "learning_rate": 3.2520325203252037e-07, |
| "loss": 1.0806, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00810126582278481, |
| "grad_norm": 6.542608559747063, |
| "learning_rate": 6.504065040650407e-07, |
| "loss": 1.0661, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012151898734177215, |
| "grad_norm": 6.849511736909724, |
| "learning_rate": 9.75609756097561e-07, |
| "loss": 1.0719, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01620253164556962, |
| "grad_norm": 6.5977412268017375, |
| "learning_rate": 1.3008130081300815e-06, |
| "loss": 1.0781, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.020253164556962026, |
| "grad_norm": 6.263618019686107, |
| "learning_rate": 1.6260162601626018e-06, |
| "loss": 1.0626, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02430379746835443, |
| "grad_norm": 6.032334944539777, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 1.0495, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028354430379746835, |
| "grad_norm": 4.776175902053029, |
| "learning_rate": 2.2764227642276426e-06, |
| "loss": 1.0204, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03240506329113924, |
| "grad_norm": 4.3922250895688215, |
| "learning_rate": 2.601626016260163e-06, |
| "loss": 0.9892, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03645569620253165, |
| "grad_norm": 2.8742026793118804, |
| "learning_rate": 2.926829268292683e-06, |
| "loss": 0.9774, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04050632911392405, |
| "grad_norm": 2.6130857953506834, |
| "learning_rate": 3.2520325203252037e-06, |
| "loss": 0.949, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.044556962025316456, |
| "grad_norm": 2.49296143052197, |
| "learning_rate": 3.577235772357724e-06, |
| "loss": 0.9399, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.04860759493670886, |
| "grad_norm": 4.029758883824027, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 0.9256, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.052658227848101265, |
| "grad_norm": 4.234670076909704, |
| "learning_rate": 4.227642276422765e-06, |
| "loss": 0.9457, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05670886075949367, |
| "grad_norm": 4.017648020579245, |
| "learning_rate": 4.552845528455285e-06, |
| "loss": 0.9329, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.060759493670886074, |
| "grad_norm": 3.764562461788489, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 0.9284, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06481012658227848, |
| "grad_norm": 2.6325849071267404, |
| "learning_rate": 5.203252032520326e-06, |
| "loss": 0.8658, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06886075949367089, |
| "grad_norm": 2.428609725531793, |
| "learning_rate": 5.528455284552846e-06, |
| "loss": 0.8589, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0729113924050633, |
| "grad_norm": 2.0615523494199985, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 0.8494, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0769620253164557, |
| "grad_norm": 1.5575108417499004, |
| "learning_rate": 6.178861788617887e-06, |
| "loss": 0.8331, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0810126582278481, |
| "grad_norm": 1.2186724873428703, |
| "learning_rate": 6.504065040650407e-06, |
| "loss": 0.8115, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08506329113924051, |
| "grad_norm": 1.2045366760059986, |
| "learning_rate": 6.829268292682928e-06, |
| "loss": 0.8061, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08911392405063291, |
| "grad_norm": 1.2986035356703982, |
| "learning_rate": 7.154471544715448e-06, |
| "loss": 0.8068, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09316455696202532, |
| "grad_norm": 1.0827354172912425, |
| "learning_rate": 7.4796747967479676e-06, |
| "loss": 0.7939, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09721518987341772, |
| "grad_norm": 1.07595550643684, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 0.7628, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 0.921739799416865, |
| "learning_rate": 8.130081300813009e-06, |
| "loss": 0.7701, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10531645569620253, |
| "grad_norm": 0.8913377658112851, |
| "learning_rate": 8.45528455284553e-06, |
| "loss": 0.7489, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10936708860759493, |
| "grad_norm": 0.8740402299249534, |
| "learning_rate": 8.78048780487805e-06, |
| "loss": 0.7452, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11341772151898734, |
| "grad_norm": 0.7181797967090451, |
| "learning_rate": 9.10569105691057e-06, |
| "loss": 0.7513, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11746835443037974, |
| "grad_norm": 0.707043498369653, |
| "learning_rate": 9.43089430894309e-06, |
| "loss": 0.7426, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.12151898734177215, |
| "grad_norm": 0.7795075813118547, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.7458, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12556962025316457, |
| "grad_norm": 0.7810740346700721, |
| "learning_rate": 1.008130081300813e-05, |
| "loss": 0.7124, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.12962025316455697, |
| "grad_norm": 0.6172063345243425, |
| "learning_rate": 1.0406504065040652e-05, |
| "loss": 0.7238, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13367088607594937, |
| "grad_norm": 0.6425004905980444, |
| "learning_rate": 1.0731707317073172e-05, |
| "loss": 0.7271, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.13772151898734178, |
| "grad_norm": 0.7165347276544026, |
| "learning_rate": 1.1056910569105692e-05, |
| "loss": 0.7056, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.14177215189873418, |
| "grad_norm": 0.6097827791463603, |
| "learning_rate": 1.1382113821138213e-05, |
| "loss": 0.7213, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.1458227848101266, |
| "grad_norm": 0.605062322454569, |
| "learning_rate": 1.1707317073170731e-05, |
| "loss": 0.7057, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.149873417721519, |
| "grad_norm": 0.7460007400603945, |
| "learning_rate": 1.2032520325203254e-05, |
| "loss": 0.6943, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1539240506329114, |
| "grad_norm": 0.7256310255941819, |
| "learning_rate": 1.2357723577235774e-05, |
| "loss": 0.7, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1579746835443038, |
| "grad_norm": 0.6820598805760124, |
| "learning_rate": 1.2682926829268294e-05, |
| "loss": 0.6986, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1620253164556962, |
| "grad_norm": 0.7328735649129293, |
| "learning_rate": 1.3008130081300815e-05, |
| "loss": 0.6809, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1660759493670886, |
| "grad_norm": 0.6165706964216056, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.6974, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.17012658227848101, |
| "grad_norm": 0.7094471943637539, |
| "learning_rate": 1.3658536585365855e-05, |
| "loss": 0.6835, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17417721518987342, |
| "grad_norm": 0.6226239494461768, |
| "learning_rate": 1.3983739837398376e-05, |
| "loss": 0.69, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17822784810126582, |
| "grad_norm": 0.740257135286521, |
| "learning_rate": 1.4308943089430896e-05, |
| "loss": 0.6991, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18227848101265823, |
| "grad_norm": 0.5395674229923124, |
| "learning_rate": 1.4634146341463415e-05, |
| "loss": 0.6817, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18632911392405063, |
| "grad_norm": 0.47712308592567626, |
| "learning_rate": 1.4959349593495935e-05, |
| "loss": 0.6649, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19037974683544304, |
| "grad_norm": 0.5875877196983049, |
| "learning_rate": 1.528455284552846e-05, |
| "loss": 0.683, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.19443037974683544, |
| "grad_norm": 0.5768677954308158, |
| "learning_rate": 1.5609756097560978e-05, |
| "loss": 0.6637, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19848101265822785, |
| "grad_norm": 0.4870277977051461, |
| "learning_rate": 1.5934959349593496e-05, |
| "loss": 0.6914, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 0.5999438342082523, |
| "learning_rate": 1.6260162601626018e-05, |
| "loss": 0.6606, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20658227848101265, |
| "grad_norm": 0.6275672779094137, |
| "learning_rate": 1.6585365853658537e-05, |
| "loss": 0.6655, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21063291139240506, |
| "grad_norm": 0.4916121532186434, |
| "learning_rate": 1.691056910569106e-05, |
| "loss": 0.6762, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.21468354430379746, |
| "grad_norm": 0.6275643590273882, |
| "learning_rate": 1.7235772357723578e-05, |
| "loss": 0.6519, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21873417721518987, |
| "grad_norm": 0.5421684830145347, |
| "learning_rate": 1.75609756097561e-05, |
| "loss": 0.6575, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.22278481012658227, |
| "grad_norm": 0.5857658555354628, |
| "learning_rate": 1.788617886178862e-05, |
| "loss": 0.6721, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22683544303797468, |
| "grad_norm": 0.7369822115864848, |
| "learning_rate": 1.821138211382114e-05, |
| "loss": 0.6632, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23088607594936708, |
| "grad_norm": 0.5140594945833844, |
| "learning_rate": 1.8536585365853663e-05, |
| "loss": 0.671, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23493670886075949, |
| "grad_norm": 0.7264624171820753, |
| "learning_rate": 1.886178861788618e-05, |
| "loss": 0.6639, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2389873417721519, |
| "grad_norm": 0.6988087822604373, |
| "learning_rate": 1.91869918699187e-05, |
| "loss": 0.6471, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2430379746835443, |
| "grad_norm": 0.644423274463255, |
| "learning_rate": 1.9512195121951222e-05, |
| "loss": 0.6619, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2470886075949367, |
| "grad_norm": 0.6555403913994627, |
| "learning_rate": 1.983739837398374e-05, |
| "loss": 0.6439, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.25113924050632913, |
| "grad_norm": 0.6769496454357151, |
| "learning_rate": 2.016260162601626e-05, |
| "loss": 0.6382, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25518987341772154, |
| "grad_norm": 0.5598560459012197, |
| "learning_rate": 2.048780487804878e-05, |
| "loss": 0.6372, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.25924050632911394, |
| "grad_norm": 0.6661264335933762, |
| "learning_rate": 2.0813008130081303e-05, |
| "loss": 0.6549, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.26329113924050634, |
| "grad_norm": 0.6124846877847517, |
| "learning_rate": 2.1138211382113822e-05, |
| "loss": 0.6367, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26734177215189875, |
| "grad_norm": 0.6064264983692613, |
| "learning_rate": 2.1463414634146344e-05, |
| "loss": 0.6469, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.27139240506329115, |
| "grad_norm": 0.6557728470259327, |
| "learning_rate": 2.1788617886178863e-05, |
| "loss": 0.6454, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.27544303797468356, |
| "grad_norm": 0.9300706772077086, |
| "learning_rate": 2.2113821138211385e-05, |
| "loss": 0.6363, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.27949367088607596, |
| "grad_norm": 1.0492281307221136, |
| "learning_rate": 2.2439024390243907e-05, |
| "loss": 0.6568, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.28354430379746837, |
| "grad_norm": 0.8958220686180282, |
| "learning_rate": 2.2764227642276426e-05, |
| "loss": 0.6343, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.28759493670886077, |
| "grad_norm": 0.7792352981445946, |
| "learning_rate": 2.3089430894308948e-05, |
| "loss": 0.6574, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2916455696202532, |
| "grad_norm": 0.9538112181944111, |
| "learning_rate": 2.3414634146341463e-05, |
| "loss": 0.6484, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2956962025316456, |
| "grad_norm": 1.1048206263158775, |
| "learning_rate": 2.3739837398373985e-05, |
| "loss": 0.622, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.299746835443038, |
| "grad_norm": 0.7139235587685786, |
| "learning_rate": 2.4065040650406507e-05, |
| "loss": 0.6418, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 1.0085966346214033, |
| "learning_rate": 2.4390243902439026e-05, |
| "loss": 0.646, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3078481012658228, |
| "grad_norm": 1.1224307094587818, |
| "learning_rate": 2.4715447154471548e-05, |
| "loss": 0.6366, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3118987341772152, |
| "grad_norm": 0.8071547676116162, |
| "learning_rate": 2.5040650406504066e-05, |
| "loss": 0.6411, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3159493670886076, |
| "grad_norm": 0.7220666918289218, |
| "learning_rate": 2.536585365853659e-05, |
| "loss": 0.6263, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 1.15206519778764, |
| "learning_rate": 2.569105691056911e-05, |
| "loss": 0.6371, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3240506329113924, |
| "grad_norm": 0.8427451751239377, |
| "learning_rate": 2.601626016260163e-05, |
| "loss": 0.6429, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3281012658227848, |
| "grad_norm": 1.335159073116733, |
| "learning_rate": 2.634146341463415e-05, |
| "loss": 0.6439, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3321518987341772, |
| "grad_norm": 1.2279985352749798, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.6292, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3362025316455696, |
| "grad_norm": 0.9968651862184996, |
| "learning_rate": 2.699186991869919e-05, |
| "loss": 0.6248, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.34025316455696203, |
| "grad_norm": 0.92432074161681, |
| "learning_rate": 2.731707317073171e-05, |
| "loss": 0.6259, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34430379746835443, |
| "grad_norm": 0.9152694053383925, |
| "learning_rate": 2.764227642276423e-05, |
| "loss": 0.6299, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.34835443037974684, |
| "grad_norm": 1.0342263674035677, |
| "learning_rate": 2.796747967479675e-05, |
| "loss": 0.6455, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.35240506329113924, |
| "grad_norm": 1.3566149171552424, |
| "learning_rate": 2.829268292682927e-05, |
| "loss": 0.6525, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35645569620253165, |
| "grad_norm": 1.05112394930475, |
| "learning_rate": 2.8617886178861792e-05, |
| "loss": 0.6277, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.36050632911392405, |
| "grad_norm": 0.8820265237873344, |
| "learning_rate": 2.8943089430894314e-05, |
| "loss": 0.6098, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.36455696202531646, |
| "grad_norm": 1.1907471833005645, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.6361, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.36860759493670886, |
| "grad_norm": 0.9028375530137814, |
| "learning_rate": 2.959349593495935e-05, |
| "loss": 0.6368, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37265822784810126, |
| "grad_norm": 0.9914876129804059, |
| "learning_rate": 2.991869918699187e-05, |
| "loss": 0.6362, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.37670886075949367, |
| "grad_norm": 1.1285922478398236, |
| "learning_rate": 3.0243902439024392e-05, |
| "loss": 0.6225, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3807594936708861, |
| "grad_norm": 1.1241415918310629, |
| "learning_rate": 3.056910569105692e-05, |
| "loss": 0.6357, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3848101265822785, |
| "grad_norm": 1.0604781664159002, |
| "learning_rate": 3.089430894308943e-05, |
| "loss": 0.5917, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3888607594936709, |
| "grad_norm": 0.8013453010277558, |
| "learning_rate": 3.1219512195121955e-05, |
| "loss": 0.609, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3929113924050633, |
| "grad_norm": 0.9956706764644188, |
| "learning_rate": 3.154471544715447e-05, |
| "loss": 0.623, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3969620253164557, |
| "grad_norm": 1.1011405835438484, |
| "learning_rate": 3.186991869918699e-05, |
| "loss": 0.6196, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4010126582278481, |
| "grad_norm": 0.9416786691666534, |
| "learning_rate": 3.2195121951219514e-05, |
| "loss": 0.6376, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 1.1733826889626178, |
| "learning_rate": 3.2520325203252037e-05, |
| "loss": 0.6325, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4091139240506329, |
| "grad_norm": 1.0068366992005267, |
| "learning_rate": 3.284552845528456e-05, |
| "loss": 0.6215, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4131645569620253, |
| "grad_norm": 1.2510286035647835, |
| "learning_rate": 3.3170731707317074e-05, |
| "loss": 0.6325, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4172151898734177, |
| "grad_norm": 1.0462826859430412, |
| "learning_rate": 3.3495934959349596e-05, |
| "loss": 0.6096, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.4212658227848101, |
| "grad_norm": 0.7970921663419797, |
| "learning_rate": 3.382113821138212e-05, |
| "loss": 0.5945, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4253164556962025, |
| "grad_norm": 0.8882229949241042, |
| "learning_rate": 3.414634146341463e-05, |
| "loss": 0.6142, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4293670886075949, |
| "grad_norm": 1.3830423563579617, |
| "learning_rate": 3.4471544715447155e-05, |
| "loss": 0.6307, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43341772151898733, |
| "grad_norm": 0.9182060399433707, |
| "learning_rate": 3.479674796747968e-05, |
| "loss": 0.6041, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.43746835443037974, |
| "grad_norm": 1.0532235785533424, |
| "learning_rate": 3.51219512195122e-05, |
| "loss": 0.628, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.44151898734177214, |
| "grad_norm": 0.7858735846858491, |
| "learning_rate": 3.544715447154472e-05, |
| "loss": 0.6222, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.44556962025316454, |
| "grad_norm": 1.2044145024734438, |
| "learning_rate": 3.577235772357724e-05, |
| "loss": 0.6117, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.44962025316455695, |
| "grad_norm": 1.082843807507481, |
| "learning_rate": 3.609756097560976e-05, |
| "loss": 0.6194, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.45367088607594935, |
| "grad_norm": 0.867543457888737, |
| "learning_rate": 3.642276422764228e-05, |
| "loss": 0.6136, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.45772151898734176, |
| "grad_norm": 1.0067868146869017, |
| "learning_rate": 3.67479674796748e-05, |
| "loss": 0.6245, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46177215189873416, |
| "grad_norm": 1.0529926694001814, |
| "learning_rate": 3.7073170731707325e-05, |
| "loss": 0.6278, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.46582278481012657, |
| "grad_norm": 0.6379571113800462, |
| "learning_rate": 3.739837398373984e-05, |
| "loss": 0.6375, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.46987341772151897, |
| "grad_norm": 1.2911538950758965, |
| "learning_rate": 3.772357723577236e-05, |
| "loss": 0.6192, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.4739240506329114, |
| "grad_norm": 0.7287062188412803, |
| "learning_rate": 3.804878048780488e-05, |
| "loss": 0.6177, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.4779746835443038, |
| "grad_norm": 0.9301539008329622, |
| "learning_rate": 3.83739837398374e-05, |
| "loss": 0.5998, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4820253164556962, |
| "grad_norm": 0.9273456019166456, |
| "learning_rate": 3.869918699186992e-05, |
| "loss": 0.6021, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.4860759493670886, |
| "grad_norm": 0.7608867724201146, |
| "learning_rate": 3.9024390243902444e-05, |
| "loss": 0.6198, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.490126582278481, |
| "grad_norm": 0.7468635562971582, |
| "learning_rate": 3.9349593495934966e-05, |
| "loss": 0.6182, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4941772151898734, |
| "grad_norm": 0.9545424225876119, |
| "learning_rate": 3.967479674796748e-05, |
| "loss": 0.6278, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.4982278481012658, |
| "grad_norm": 1.153880910810473, |
| "learning_rate": 4e-05, |
| "loss": 0.6064, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.5022784810126583, |
| "grad_norm": 1.2857290537684014, |
| "learning_rate": 3.999991946137476e-05, |
| "loss": 0.6026, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 0.7924274410600898, |
| "learning_rate": 3.999967784614766e-05, |
| "loss": 0.6053, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5103797468354431, |
| "grad_norm": 0.8018380535692923, |
| "learning_rate": 3.9999275156264656e-05, |
| "loss": 0.6078, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5144303797468355, |
| "grad_norm": 1.1089443716203504, |
| "learning_rate": 3.999871139496895e-05, |
| "loss": 0.6021, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5184810126582279, |
| "grad_norm": 0.9117369316029953, |
| "learning_rate": 3.9997986566800995e-05, |
| "loss": 0.6021, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5225316455696203, |
| "grad_norm": 0.8205036742404014, |
| "learning_rate": 3.999710067759846e-05, |
| "loss": 0.6123, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5265822784810127, |
| "grad_norm": 0.8818353860880652, |
| "learning_rate": 3.999605373449617e-05, |
| "loss": 0.6192, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5306329113924051, |
| "grad_norm": 1.0571149587637743, |
| "learning_rate": 3.9994845745926075e-05, |
| "loss": 0.6094, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5346835443037975, |
| "grad_norm": 0.7847326783145264, |
| "learning_rate": 3.999347672161713e-05, |
| "loss": 0.6147, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5387341772151899, |
| "grad_norm": 0.9412779017795371, |
| "learning_rate": 3.999194667259528e-05, |
| "loss": 0.6149, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5427848101265823, |
| "grad_norm": 1.3413235321153902, |
| "learning_rate": 3.999025561118334e-05, |
| "loss": 0.5987, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5468354430379747, |
| "grad_norm": 0.7961374938819796, |
| "learning_rate": 3.998840355100086e-05, |
| "loss": 0.601, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5508860759493671, |
| "grad_norm": 0.6919020580536248, |
| "learning_rate": 3.998639050696409e-05, |
| "loss": 0.6023, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5549367088607595, |
| "grad_norm": 0.8504593985554899, |
| "learning_rate": 3.998421649528582e-05, |
| "loss": 0.6242, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5589873417721519, |
| "grad_norm": 0.6370465183029322, |
| "learning_rate": 3.9981881533475234e-05, |
| "loss": 0.5921, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5630379746835443, |
| "grad_norm": 0.7687406402938488, |
| "learning_rate": 3.997938564033779e-05, |
| "loss": 0.5945, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5670886075949367, |
| "grad_norm": 0.7556626859984393, |
| "learning_rate": 3.9976728835975064e-05, |
| "loss": 0.6214, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5711392405063291, |
| "grad_norm": 1.0069395822768785, |
| "learning_rate": 3.9973911141784605e-05, |
| "loss": 0.6136, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5751898734177215, |
| "grad_norm": 0.8314580764461865, |
| "learning_rate": 3.997093258045973e-05, |
| "loss": 0.6064, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.579240506329114, |
| "grad_norm": 0.6546494718297714, |
| "learning_rate": 3.996779317598936e-05, |
| "loss": 0.5902, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5832911392405064, |
| "grad_norm": 0.7609689362630502, |
| "learning_rate": 3.996449295365782e-05, |
| "loss": 0.5973, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5873417721518988, |
| "grad_norm": 0.6673299846824245, |
| "learning_rate": 3.996103194004467e-05, |
| "loss": 0.5926, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5913924050632912, |
| "grad_norm": 0.7012689867689355, |
| "learning_rate": 3.995741016302441e-05, |
| "loss": 0.6068, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5954430379746836, |
| "grad_norm": 0.8964608572934819, |
| "learning_rate": 3.9953627651766364e-05, |
| "loss": 0.5958, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.599493670886076, |
| "grad_norm": 0.8816542580908874, |
| "learning_rate": 3.9949684436734325e-05, |
| "loss": 0.617, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6035443037974684, |
| "grad_norm": 0.8108374233765954, |
| "learning_rate": 3.994558054968643e-05, |
| "loss": 0.5905, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 0.6963818141570929, |
| "learning_rate": 3.994131602367481e-05, |
| "loss": 0.6128, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6116455696202532, |
| "grad_norm": 0.5805120153563735, |
| "learning_rate": 3.9936890893045376e-05, |
| "loss": 0.6077, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6156962025316456, |
| "grad_norm": 0.6649228562254051, |
| "learning_rate": 3.993230519343752e-05, |
| "loss": 0.5857, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.619746835443038, |
| "grad_norm": 0.751129399106458, |
| "learning_rate": 3.992755896178383e-05, |
| "loss": 0.5911, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6237974683544304, |
| "grad_norm": 0.7051528562000836, |
| "learning_rate": 3.992265223630981e-05, |
| "loss": 0.6021, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6278481012658228, |
| "grad_norm": 0.9151121202737386, |
| "learning_rate": 3.991758505653355e-05, |
| "loss": 0.6207, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6318987341772152, |
| "grad_norm": 0.9610705277681216, |
| "learning_rate": 3.991235746326543e-05, |
| "loss": 0.5947, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6359493670886076, |
| "grad_norm": 1.1322819196863696, |
| "learning_rate": 3.9906969498607745e-05, |
| "loss": 0.6332, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.7814944509528635, |
| "learning_rate": 3.990142120595444e-05, |
| "loss": 0.591, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6440506329113924, |
| "grad_norm": 0.8300452154950092, |
| "learning_rate": 3.98957126299907e-05, |
| "loss": 0.5968, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6481012658227848, |
| "grad_norm": 0.5536084320506756, |
| "learning_rate": 3.9889843816692596e-05, |
| "loss": 0.5948, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6521518987341772, |
| "grad_norm": 0.9732524004411103, |
| "learning_rate": 3.9883814813326766e-05, |
| "loss": 0.597, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.6562025316455696, |
| "grad_norm": 0.946981560894278, |
| "learning_rate": 3.9877625668449956e-05, |
| "loss": 0.6009, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.660253164556962, |
| "grad_norm": 0.8271362577005459, |
| "learning_rate": 3.98712764319087e-05, |
| "loss": 0.6161, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6643037974683544, |
| "grad_norm": 0.8333846209604925, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.6083, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6683544303797468, |
| "grad_norm": 0.5578710088428925, |
| "learning_rate": 3.9858097889665277e-05, |
| "loss": 0.6002, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6724050632911392, |
| "grad_norm": 0.7085237498750874, |
| "learning_rate": 3.985126869010129e-05, |
| "loss": 0.6127, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6764556962025317, |
| "grad_norm": 0.6282757529817404, |
| "learning_rate": 3.984427961114833e-05, |
| "loss": 0.6075, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6805063291139241, |
| "grad_norm": 0.5771798826859482, |
| "learning_rate": 3.9837130709095475e-05, |
| "loss": 0.6098, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6845569620253165, |
| "grad_norm": 0.5425180995879001, |
| "learning_rate": 3.982982204151901e-05, |
| "loss": 0.5858, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6886075949367089, |
| "grad_norm": 0.5994316713186891, |
| "learning_rate": 3.982235366728193e-05, |
| "loss": 0.5801, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6926582278481013, |
| "grad_norm": 0.5750402747636504, |
| "learning_rate": 3.9814725646533505e-05, |
| "loss": 0.5982, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.6967088607594937, |
| "grad_norm": 0.49554660148252216, |
| "learning_rate": 3.9806938040708746e-05, |
| "loss": 0.5933, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7007594936708861, |
| "grad_norm": 0.5749656462812941, |
| "learning_rate": 3.9798990912527976e-05, |
| "loss": 0.5874, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7048101265822785, |
| "grad_norm": 0.5415539020777699, |
| "learning_rate": 3.979088432599627e-05, |
| "loss": 0.6039, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 0.5581190799490662, |
| "learning_rate": 3.9782618346402964e-05, |
| "loss": 0.5958, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7129113924050633, |
| "grad_norm": 0.710364229552805, |
| "learning_rate": 3.977419304032111e-05, |
| "loss": 0.5843, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7169620253164557, |
| "grad_norm": 0.6192098486359041, |
| "learning_rate": 3.976560847560697e-05, |
| "loss": 0.593, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7210126582278481, |
| "grad_norm": 0.5959496988053944, |
| "learning_rate": 3.9756864721399456e-05, |
| "loss": 0.6108, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7250632911392405, |
| "grad_norm": 0.6381047205996736, |
| "learning_rate": 3.974796184811956e-05, |
| "loss": 0.5821, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7291139240506329, |
| "grad_norm": 0.7382255538103657, |
| "learning_rate": 3.973889992746979e-05, |
| "loss": 0.589, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7331645569620253, |
| "grad_norm": 0.550490950164061, |
| "learning_rate": 3.972967903243361e-05, |
| "loss": 0.5978, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7372151898734177, |
| "grad_norm": 0.9095825511380173, |
| "learning_rate": 3.972029923727486e-05, |
| "loss": 0.6002, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7412658227848101, |
| "grad_norm": 0.8370833711912574, |
| "learning_rate": 3.971076061753709e-05, |
| "loss": 0.5809, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7453164556962025, |
| "grad_norm": 0.5827636420158379, |
| "learning_rate": 3.9701063250043066e-05, |
| "loss": 0.5741, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7493670886075949, |
| "grad_norm": 0.7519483882841465, |
| "learning_rate": 3.969120721289402e-05, |
| "loss": 0.5747, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7534177215189873, |
| "grad_norm": 0.7114321678994879, |
| "learning_rate": 3.9681192585469146e-05, |
| "loss": 0.6016, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7574683544303797, |
| "grad_norm": 0.793932336630299, |
| "learning_rate": 3.9671019448424865e-05, |
| "loss": 0.5833, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7615189873417721, |
| "grad_norm": 0.5534385462907034, |
| "learning_rate": 3.966068788369422e-05, |
| "loss": 0.6006, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.7655696202531646, |
| "grad_norm": 0.6954464881286568, |
| "learning_rate": 3.965019797448622e-05, |
| "loss": 0.5859, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.769620253164557, |
| "grad_norm": 0.6030744816056058, |
| "learning_rate": 3.963954980528515e-05, |
| "loss": 0.5821, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7736708860759494, |
| "grad_norm": 0.5318938304075698, |
| "learning_rate": 3.9628743461849905e-05, |
| "loss": 0.5828, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7777215189873418, |
| "grad_norm": 0.5673938641303236, |
| "learning_rate": 3.961777903121329e-05, |
| "loss": 0.5948, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7817721518987342, |
| "grad_norm": 0.49209406982845333, |
| "learning_rate": 3.960665660168131e-05, |
| "loss": 0.5966, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7858227848101266, |
| "grad_norm": 0.6495088749880803, |
| "learning_rate": 3.9595376262832485e-05, |
| "loss": 0.5912, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.789873417721519, |
| "grad_norm": 0.635831620116715, |
| "learning_rate": 3.9583938105517127e-05, |
| "loss": 0.5842, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7939240506329114, |
| "grad_norm": 0.5956256540885025, |
| "learning_rate": 3.957234222185657e-05, |
| "loss": 0.5852, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.7979746835443038, |
| "grad_norm": 0.6585537300599066, |
| "learning_rate": 3.9560588705242474e-05, |
| "loss": 0.6038, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8020253164556962, |
| "grad_norm": 0.7003649074922432, |
| "learning_rate": 3.954867765033605e-05, |
| "loss": 0.5907, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8060759493670886, |
| "grad_norm": 0.8167590338559435, |
| "learning_rate": 3.953660915306728e-05, |
| "loss": 0.571, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 0.7744044369657477, |
| "learning_rate": 3.952438331063419e-05, |
| "loss": 0.5977, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8141772151898734, |
| "grad_norm": 0.7497573280473149, |
| "learning_rate": 3.951200022150205e-05, |
| "loss": 0.591, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8182278481012658, |
| "grad_norm": 0.5760277833693489, |
| "learning_rate": 3.949945998540253e-05, |
| "loss": 0.5829, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8222784810126582, |
| "grad_norm": 0.5832154288018033, |
| "learning_rate": 3.9486762703332993e-05, |
| "loss": 0.6067, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8263291139240506, |
| "grad_norm": 0.6872999268364194, |
| "learning_rate": 3.947390847755559e-05, |
| "loss": 0.586, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.830379746835443, |
| "grad_norm": 0.6847875306950293, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.5943, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8344303797468354, |
| "grad_norm": 0.6915669130089204, |
| "learning_rate": 3.944772961024501e-05, |
| "loss": 0.564, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8384810126582278, |
| "grad_norm": 0.6193622176081065, |
| "learning_rate": 3.943440517955285e-05, |
| "loss": 0.5737, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8425316455696202, |
| "grad_norm": 0.6919387489287836, |
| "learning_rate": 3.9420924226833126e-05, |
| "loss": 0.5943, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8465822784810126, |
| "grad_norm": 0.6292996694330613, |
| "learning_rate": 3.9407286860659566e-05, |
| "loss": 0.5836, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.850632911392405, |
| "grad_norm": 0.5952800711346196, |
| "learning_rate": 3.9393493190865657e-05, |
| "loss": 0.5888, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8546835443037974, |
| "grad_norm": 0.5819548515143846, |
| "learning_rate": 3.937954332854371e-05, |
| "loss": 0.5924, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8587341772151899, |
| "grad_norm": 0.5254322045657019, |
| "learning_rate": 3.9365437386044016e-05, |
| "loss": 0.5971, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8627848101265823, |
| "grad_norm": 0.58187990818179, |
| "learning_rate": 3.935117547697387e-05, |
| "loss": 0.572, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8668354430379747, |
| "grad_norm": 0.48465047345149165, |
| "learning_rate": 3.933675771619675e-05, |
| "loss": 0.5873, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8708860759493671, |
| "grad_norm": 0.5879327869655029, |
| "learning_rate": 3.932218421983131e-05, |
| "loss": 0.5815, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8749367088607595, |
| "grad_norm": 0.6759655220932984, |
| "learning_rate": 3.9307455105250484e-05, |
| "loss": 0.5774, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8789873417721519, |
| "grad_norm": 0.403152683672324, |
| "learning_rate": 3.929257049108054e-05, |
| "loss": 0.5732, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8830379746835443, |
| "grad_norm": 0.5759068851838417, |
| "learning_rate": 3.927753049720011e-05, |
| "loss": 0.603, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8870886075949367, |
| "grad_norm": 0.6315358530698811, |
| "learning_rate": 3.9262335244739234e-05, |
| "loss": 0.5909, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8911392405063291, |
| "grad_norm": 0.5202436233496539, |
| "learning_rate": 3.92469848560784e-05, |
| "loss": 0.5882, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8951898734177215, |
| "grad_norm": 0.545218400701768, |
| "learning_rate": 3.923147945484751e-05, |
| "loss": 0.5839, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.8992405063291139, |
| "grad_norm": 0.4465423541262704, |
| "learning_rate": 3.9215819165924956e-05, |
| "loss": 0.5768, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9032911392405063, |
| "grad_norm": 0.591444164076622, |
| "learning_rate": 3.920000411543654e-05, |
| "loss": 0.5779, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9073417721518987, |
| "grad_norm": 0.5067647322970245, |
| "learning_rate": 3.9184034430754495e-05, |
| "loss": 0.5761, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 0.667973085642129, |
| "learning_rate": 3.916791024049648e-05, |
| "loss": 0.5795, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9154430379746835, |
| "grad_norm": 0.6587836362991222, |
| "learning_rate": 3.91516316745245e-05, |
| "loss": 0.5965, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9194936708860759, |
| "grad_norm": 0.5978900765105984, |
| "learning_rate": 3.913519886394389e-05, |
| "loss": 0.5828, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9235443037974683, |
| "grad_norm": 0.5269556146741639, |
| "learning_rate": 3.911861194110225e-05, |
| "loss": 0.5773, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9275949367088607, |
| "grad_norm": 0.5805420328487702, |
| "learning_rate": 3.910187103958837e-05, |
| "loss": 0.5726, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9316455696202531, |
| "grad_norm": 0.4599491842374781, |
| "learning_rate": 3.908497629423117e-05, |
| "loss": 0.5839, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9356962025316455, |
| "grad_norm": 0.5232169116831269, |
| "learning_rate": 3.9067927841098614e-05, |
| "loss": 0.5831, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.9397468354430379, |
| "grad_norm": 0.5356721030610526, |
| "learning_rate": 3.9050725817496594e-05, |
| "loss": 0.5744, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9437974683544303, |
| "grad_norm": 0.4996786359983786, |
| "learning_rate": 3.9033370361967844e-05, |
| "loss": 0.5901, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9478481012658228, |
| "grad_norm": 0.5779090332509196, |
| "learning_rate": 3.901586161429081e-05, |
| "loss": 0.5848, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9518987341772152, |
| "grad_norm": 0.5328027398285838, |
| "learning_rate": 3.8998199715478545e-05, |
| "loss": 0.5773, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9559493670886076, |
| "grad_norm": 0.46897215926836844, |
| "learning_rate": 3.8980384807777564e-05, |
| "loss": 0.5752, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.4524798826522803, |
| "learning_rate": 3.896241703466667e-05, |
| "loss": 0.6031, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9640506329113924, |
| "grad_norm": 0.5935230314332655, |
| "learning_rate": 3.894429654085585e-05, |
| "loss": 0.574, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9681012658227848, |
| "grad_norm": 0.565714551068876, |
| "learning_rate": 3.892602347228505e-05, |
| "loss": 0.5722, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9721518987341772, |
| "grad_norm": 0.5377980260312725, |
| "learning_rate": 3.890759797612307e-05, |
| "loss": 0.5789, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9762025316455696, |
| "grad_norm": 0.4477418159548943, |
| "learning_rate": 3.888902020076632e-05, |
| "loss": 0.584, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.980253164556962, |
| "grad_norm": 0.5180767886338304, |
| "learning_rate": 3.887029029583764e-05, |
| "loss": 0.584, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9843037974683544, |
| "grad_norm": 0.4997935668569391, |
| "learning_rate": 3.8851408412185125e-05, |
| "loss": 0.5583, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9883544303797468, |
| "grad_norm": 0.49460314026892577, |
| "learning_rate": 3.8832374701880855e-05, |
| "loss": 0.5781, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9924050632911392, |
| "grad_norm": 0.5267511130886267, |
| "learning_rate": 3.881318931821972e-05, |
| "loss": 0.5862, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9964556962025316, |
| "grad_norm": 0.4797726876110699, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.5678, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.000506329113924, |
| "grad_norm": 0.44015171169980577, |
| "learning_rate": 3.8774364150112955e-05, |
| "loss": 0.5746, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.0045569620253165, |
| "grad_norm": 0.5763751545191491, |
| "learning_rate": 3.8754724678359884e-05, |
| "loss": 0.5419, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0086075949367088, |
| "grad_norm": 0.4366512569043157, |
| "learning_rate": 3.873493415863256e-05, |
| "loss": 0.5237, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.0126582278481013, |
| "grad_norm": 0.516553485437505, |
| "learning_rate": 3.871499275032111e-05, |
| "loss": 0.5368, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0167088607594936, |
| "grad_norm": 0.45717417896506085, |
| "learning_rate": 3.869490061403091e-05, |
| "loss": 0.5042, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0207594936708861, |
| "grad_norm": 0.45550272064477465, |
| "learning_rate": 3.867465791158124e-05, |
| "loss": 0.5236, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.0248101265822784, |
| "grad_norm": 0.4699199184668589, |
| "learning_rate": 3.865426480600407e-05, |
| "loss": 0.5087, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.028860759493671, |
| "grad_norm": 0.5448904816072212, |
| "learning_rate": 3.863372146154264e-05, |
| "loss": 0.5238, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0329113924050632, |
| "grad_norm": 0.614102408256393, |
| "learning_rate": 3.861302804365024e-05, |
| "loss": 0.5285, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0369620253164558, |
| "grad_norm": 0.5932475078005108, |
| "learning_rate": 3.85921847189888e-05, |
| "loss": 0.5387, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.041012658227848, |
| "grad_norm": 0.5439538574429171, |
| "learning_rate": 3.85711916554276e-05, |
| "loss": 0.5317, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0450632911392406, |
| "grad_norm": 0.7427858214262804, |
| "learning_rate": 3.85500490220419e-05, |
| "loss": 0.5267, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.0491139240506329, |
| "grad_norm": 0.6686302392902618, |
| "learning_rate": 3.852875698911154e-05, |
| "loss": 0.5306, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0531645569620254, |
| "grad_norm": 0.5702186896972319, |
| "learning_rate": 3.850731572811963e-05, |
| "loss": 0.5185, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0572151898734177, |
| "grad_norm": 0.5612929209378398, |
| "learning_rate": 3.848572541175116e-05, |
| "loss": 0.5333, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0612658227848102, |
| "grad_norm": 0.5569490779397605, |
| "learning_rate": 3.846398621389154e-05, |
| "loss": 0.5122, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0653164556962025, |
| "grad_norm": 0.6668989323150166, |
| "learning_rate": 3.84420983096253e-05, |
| "loss": 0.5313, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.069367088607595, |
| "grad_norm": 0.5270660428525326, |
| "learning_rate": 3.8420061875234606e-05, |
| "loss": 0.5359, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.0734177215189873, |
| "grad_norm": 0.5654248561294556, |
| "learning_rate": 3.839787708819787e-05, |
| "loss": 0.521, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0774683544303798, |
| "grad_norm": 0.536846877525359, |
| "learning_rate": 3.8375544127188325e-05, |
| "loss": 0.5345, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.081518987341772, |
| "grad_norm": 0.5550268957907518, |
| "learning_rate": 3.8353063172072564e-05, |
| "loss": 0.5382, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0855696202531646, |
| "grad_norm": 0.4858203120685367, |
| "learning_rate": 3.8330434403909105e-05, |
| "loss": 0.5268, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.089620253164557, |
| "grad_norm": 0.757267701499995, |
| "learning_rate": 3.8307658004946934e-05, |
| "loss": 0.5337, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0936708860759494, |
| "grad_norm": 0.4874291683820026, |
| "learning_rate": 3.8284734158624046e-05, |
| "loss": 0.5176, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.0977215189873417, |
| "grad_norm": 0.5737464255954232, |
| "learning_rate": 3.826166304956594e-05, |
| "loss": 0.5148, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1017721518987342, |
| "grad_norm": 0.5303228841884151, |
| "learning_rate": 3.8238444863584164e-05, |
| "loss": 0.5369, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1058227848101265, |
| "grad_norm": 0.5339534931941811, |
| "learning_rate": 3.821507978767479e-05, |
| "loss": 0.5037, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.109873417721519, |
| "grad_norm": 0.5381353890505471, |
| "learning_rate": 3.819156801001693e-05, |
| "loss": 0.514, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.1139240506329113, |
| "grad_norm": 0.6036920009865743, |
| "learning_rate": 3.816790971997121e-05, |
| "loss": 0.5447, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.1179746835443038, |
| "grad_norm": 0.5421609028653582, |
| "learning_rate": 3.8144105108078246e-05, |
| "loss": 0.5119, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1220253164556961, |
| "grad_norm": 0.6903401930341807, |
| "learning_rate": 3.81201543660571e-05, |
| "loss": 0.5349, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1260759493670887, |
| "grad_norm": 0.5007330844179998, |
| "learning_rate": 3.809605768680377e-05, |
| "loss": 0.5286, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.130126582278481, |
| "grad_norm": 0.5394097749473281, |
| "learning_rate": 3.807181526438958e-05, |
| "loss": 0.5401, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.1341772151898735, |
| "grad_norm": 0.5199334980571413, |
| "learning_rate": 3.8047427294059697e-05, |
| "loss": 0.5213, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.1382278481012658, |
| "grad_norm": 0.5077047174318814, |
| "learning_rate": 3.802289397223145e-05, |
| "loss": 0.5115, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1422784810126583, |
| "grad_norm": 0.6765742531737086, |
| "learning_rate": 3.7998215496492854e-05, |
| "loss": 0.5458, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.1463291139240506, |
| "grad_norm": 0.457252425912824, |
| "learning_rate": 3.797339206560096e-05, |
| "loss": 0.5313, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.150379746835443, |
| "grad_norm": 0.5948876928653285, |
| "learning_rate": 3.794842387948027e-05, |
| "loss": 0.5278, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1544303797468354, |
| "grad_norm": 0.5652372094368342, |
| "learning_rate": 3.7923311139221114e-05, |
| "loss": 0.5362, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.158481012658228, |
| "grad_norm": 0.4080822817606289, |
| "learning_rate": 3.7898054047078054e-05, |
| "loss": 0.5033, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1625316455696202, |
| "grad_norm": 0.48232307480618153, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.5282, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1665822784810127, |
| "grad_norm": 0.43750220041145793, |
| "learning_rate": 3.7847107621969786e-05, |
| "loss": 0.534, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.170632911392405, |
| "grad_norm": 0.4796541665144287, |
| "learning_rate": 3.7821418699320064e-05, |
| "loss": 0.5261, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1746835443037975, |
| "grad_norm": 0.4138383753321028, |
| "learning_rate": 3.7795586245414145e-05, |
| "loss": 0.5306, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1787341772151898, |
| "grad_norm": 0.49333157852010345, |
| "learning_rate": 3.776961046830306e-05, |
| "loss": 0.5192, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.1827848101265823, |
| "grad_norm": 0.4923627584947353, |
| "learning_rate": 3.774349157719215e-05, |
| "loss": 0.5266, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1868354430379746, |
| "grad_norm": 0.4307585918518453, |
| "learning_rate": 3.7717229782439365e-05, |
| "loss": 0.5125, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1908860759493671, |
| "grad_norm": 0.41904269066020877, |
| "learning_rate": 3.769082529555359e-05, |
| "loss": 0.5205, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.1949367088607594, |
| "grad_norm": 0.5762755393170474, |
| "learning_rate": 3.766427832919294e-05, |
| "loss": 0.5243, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.198987341772152, |
| "grad_norm": 0.5859285388121581, |
| "learning_rate": 3.7637589097163024e-05, |
| "loss": 0.5265, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2030379746835442, |
| "grad_norm": 0.4207332122002054, |
| "learning_rate": 3.761075781441526e-05, |
| "loss": 0.5303, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2070886075949367, |
| "grad_norm": 0.5482074449767633, |
| "learning_rate": 3.75837846970451e-05, |
| "loss": 0.5165, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.211139240506329, |
| "grad_norm": 0.5758140519358926, |
| "learning_rate": 3.755666996229032e-05, |
| "loss": 0.5224, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.2151898734177216, |
| "grad_norm": 0.500636439391135, |
| "learning_rate": 3.752941382852927e-05, |
| "loss": 0.5476, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.2192405063291138, |
| "grad_norm": 0.5216445568090008, |
| "learning_rate": 3.7502016515279115e-05, |
| "loss": 0.5351, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.2232911392405064, |
| "grad_norm": 0.5387472268530127, |
| "learning_rate": 3.7474478243194043e-05, |
| "loss": 0.517, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.2273417721518987, |
| "grad_norm": 0.4722742119247957, |
| "learning_rate": 3.744679923406351e-05, |
| "loss": 0.5361, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2313924050632912, |
| "grad_norm": 0.6096892022198732, |
| "learning_rate": 3.741897971081043e-05, |
| "loss": 0.5169, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2354430379746835, |
| "grad_norm": 0.4418087099172121, |
| "learning_rate": 3.739101989748946e-05, |
| "loss": 0.5247, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.239493670886076, |
| "grad_norm": 0.4625823326919767, |
| "learning_rate": 3.7362920019285066e-05, |
| "loss": 0.5342, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2435443037974683, |
| "grad_norm": 0.486617533356407, |
| "learning_rate": 3.73346803025098e-05, |
| "loss": 0.5243, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2475949367088608, |
| "grad_norm": 0.5040719087583494, |
| "learning_rate": 3.730630097460247e-05, |
| "loss": 0.5305, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.251645569620253, |
| "grad_norm": 0.49238049678839496, |
| "learning_rate": 3.727778226412628e-05, |
| "loss": 0.5185, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2556962025316456, |
| "grad_norm": 0.4950317897604242, |
| "learning_rate": 3.7249124400767006e-05, |
| "loss": 0.525, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.259746835443038, |
| "grad_norm": 0.5178938171621305, |
| "learning_rate": 3.722032761533114e-05, |
| "loss": 0.5422, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2637974683544304, |
| "grad_norm": 0.4334893253763877, |
| "learning_rate": 3.719139213974403e-05, |
| "loss": 0.5357, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.2678481012658227, |
| "grad_norm": 0.6372078985814459, |
| "learning_rate": 3.7162318207048006e-05, |
| "loss": 0.5342, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2718987341772152, |
| "grad_norm": 0.46554381873549805, |
| "learning_rate": 3.713310605140055e-05, |
| "loss": 0.5426, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2759493670886077, |
| "grad_norm": 0.4283373015579432, |
| "learning_rate": 3.710375590807233e-05, |
| "loss": 0.5318, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.4353378681045531, |
| "learning_rate": 3.7074268013445365e-05, |
| "loss": 0.5136, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2840506329113923, |
| "grad_norm": 0.4819911884546959, |
| "learning_rate": 3.7044642605011114e-05, |
| "loss": 0.5179, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2881012658227848, |
| "grad_norm": 0.3750576339783036, |
| "learning_rate": 3.701487992136854e-05, |
| "loss": 0.5378, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2921518987341774, |
| "grad_norm": 0.4937441538880353, |
| "learning_rate": 3.69849802022222e-05, |
| "loss": 0.5248, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.2962025316455696, |
| "grad_norm": 0.38509234328369685, |
| "learning_rate": 3.6954943688380334e-05, |
| "loss": 0.5295, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.300253164556962, |
| "grad_norm": 0.5263626922285329, |
| "learning_rate": 3.692477062175289e-05, |
| "loss": 0.5302, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3043037974683545, |
| "grad_norm": 0.39784999868810006, |
| "learning_rate": 3.689446124534958e-05, |
| "loss": 0.526, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.308354430379747, |
| "grad_norm": 0.4723177948859221, |
| "learning_rate": 3.686401580327799e-05, |
| "loss": 0.5153, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.3124050632911393, |
| "grad_norm": 0.3962795899793681, |
| "learning_rate": 3.683343454074149e-05, |
| "loss": 0.5161, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3164556962025316, |
| "grad_norm": 0.5537780354864236, |
| "learning_rate": 3.6802717704037386e-05, |
| "loss": 0.5169, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.320506329113924, |
| "grad_norm": 0.42949036056011114, |
| "learning_rate": 3.6771865540554855e-05, |
| "loss": 0.5295, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3245569620253166, |
| "grad_norm": 0.4345537015686956, |
| "learning_rate": 3.674087829877297e-05, |
| "loss": 0.548, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.3286075949367089, |
| "grad_norm": 0.4260594728383931, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.5262, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.3326582278481012, |
| "grad_norm": 0.39876309940275534, |
| "learning_rate": 3.667849957966501e-05, |
| "loss": 0.5296, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3367088607594937, |
| "grad_norm": 0.7380267685900821, |
| "learning_rate": 3.6647108604728546e-05, |
| "loss": 0.5326, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3407594936708862, |
| "grad_norm": 0.40574487546659843, |
| "learning_rate": 3.661558355626795e-05, |
| "loss": 0.5441, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3448101265822785, |
| "grad_norm": 0.4561318227935842, |
| "learning_rate": 3.658392468818163e-05, |
| "loss": 0.5217, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.3488607594936708, |
| "grad_norm": 0.49655020404342737, |
| "learning_rate": 3.655213225544574e-05, |
| "loss": 0.5258, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3529113924050633, |
| "grad_norm": 0.43699964624606696, |
| "learning_rate": 3.652020651411218e-05, |
| "loss": 0.5104, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.3569620253164558, |
| "grad_norm": 0.44762687204428964, |
| "learning_rate": 3.6488147721306474e-05, |
| "loss": 0.5398, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3610126582278481, |
| "grad_norm": 0.47643034446384264, |
| "learning_rate": 3.645595613522574e-05, |
| "loss": 0.5468, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3650632911392404, |
| "grad_norm": 0.5333223608941613, |
| "learning_rate": 3.642363201513657e-05, |
| "loss": 0.5287, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.369113924050633, |
| "grad_norm": 0.46829975127751383, |
| "learning_rate": 3.6391175621373006e-05, |
| "loss": 0.5213, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3731645569620254, |
| "grad_norm": 0.6191676091833654, |
| "learning_rate": 3.6358587215334355e-05, |
| "loss": 0.5131, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.3772151898734177, |
| "grad_norm": 0.629848679453359, |
| "learning_rate": 3.632586705948318e-05, |
| "loss": 0.5163, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.38126582278481, |
| "grad_norm": 0.5124154585446715, |
| "learning_rate": 3.629301541734311e-05, |
| "loss": 0.512, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3853164556962025, |
| "grad_norm": 0.5018636314058451, |
| "learning_rate": 3.626003255349676e-05, |
| "loss": 0.5317, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.389367088607595, |
| "grad_norm": 0.5307220677962601, |
| "learning_rate": 3.622691873358357e-05, |
| "loss": 0.5336, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.3934177215189874, |
| "grad_norm": 0.4677306959497668, |
| "learning_rate": 3.61936742242977e-05, |
| "loss": 0.5193, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.3974683544303796, |
| "grad_norm": 0.4473949041191856, |
| "learning_rate": 3.6160299293385864e-05, |
| "loss": 0.5107, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4015189873417722, |
| "grad_norm": 0.5113610225396805, |
| "learning_rate": 3.612679420964516e-05, |
| "loss": 0.5203, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4055696202531647, |
| "grad_norm": 0.5401540836792521, |
| "learning_rate": 3.609315924292092e-05, |
| "loss": 0.5342, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.409620253164557, |
| "grad_norm": 0.44851639763465456, |
| "learning_rate": 3.6059394664104554e-05, |
| "loss": 0.5249, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4136708860759493, |
| "grad_norm": 0.44327313633548104, |
| "learning_rate": 3.602550074513133e-05, |
| "loss": 0.5163, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.4177215189873418, |
| "grad_norm": 0.5072157879463485, |
| "learning_rate": 3.599147775897822e-05, |
| "loss": 0.5158, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.4217721518987343, |
| "grad_norm": 0.4551548588389448, |
| "learning_rate": 3.595732597966167e-05, |
| "loss": 0.5242, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4258227848101266, |
| "grad_norm": 0.4533666122568206, |
| "learning_rate": 3.592304568223542e-05, |
| "loss": 0.5241, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4298734177215189, |
| "grad_norm": 0.43433752051375524, |
| "learning_rate": 3.588863714278826e-05, |
| "loss": 0.53, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4339240506329114, |
| "grad_norm": 0.37479344328774516, |
| "learning_rate": 3.585410063844186e-05, |
| "loss": 0.5215, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.437974683544304, |
| "grad_norm": 0.5115765373404997, |
| "learning_rate": 3.581943644734846e-05, |
| "loss": 0.5183, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4420253164556962, |
| "grad_norm": 0.41267290231226844, |
| "learning_rate": 3.578464484868869e-05, |
| "loss": 0.5389, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4460759493670885, |
| "grad_norm": 0.4340108503380144, |
| "learning_rate": 3.5749726122669316e-05, |
| "loss": 0.5355, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.450126582278481, |
| "grad_norm": 0.4161801621560667, |
| "learning_rate": 3.5714680550520943e-05, |
| "loss": 0.519, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4541772151898735, |
| "grad_norm": 0.4331850041089976, |
| "learning_rate": 3.5679508414495794e-05, |
| "loss": 0.5189, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4582278481012658, |
| "grad_norm": 0.602233790199386, |
| "learning_rate": 3.564420999786543e-05, |
| "loss": 0.5242, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4622784810126581, |
| "grad_norm": 0.465284984824843, |
| "learning_rate": 3.560878558491842e-05, |
| "loss": 0.5164, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4663291139240506, |
| "grad_norm": 0.448302817054784, |
| "learning_rate": 3.5573235460958145e-05, |
| "loss": 0.5169, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4703797468354431, |
| "grad_norm": 0.4470911996678425, |
| "learning_rate": 3.553755991230039e-05, |
| "loss": 0.5235, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4744303797468354, |
| "grad_norm": 0.40022422690888654, |
| "learning_rate": 3.5501759226271144e-05, |
| "loss": 0.5195, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.4784810126582277, |
| "grad_norm": 0.42044021977289875, |
| "learning_rate": 3.546583369120419e-05, |
| "loss": 0.5211, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4825316455696202, |
| "grad_norm": 0.3958140737274046, |
| "learning_rate": 3.5429783596438864e-05, |
| "loss": 0.5179, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.4865822784810128, |
| "grad_norm": 0.44007289287624285, |
| "learning_rate": 3.539360923231766e-05, |
| "loss": 0.5285, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.490632911392405, |
| "grad_norm": 0.5078026351586084, |
| "learning_rate": 3.535731089018394e-05, |
| "loss": 0.5192, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.4946835443037974, |
| "grad_norm": 0.38756032602716145, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.5266, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.4987341772151899, |
| "grad_norm": 0.40482551346285706, |
| "learning_rate": 3.528434344224253e-05, |
| "loss": 0.5326, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5027848101265824, |
| "grad_norm": 0.46846601812604854, |
| "learning_rate": 3.524767492410464e-05, |
| "loss": 0.5177, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.5068354430379747, |
| "grad_norm": 0.4921057393032905, |
| "learning_rate": 3.521088360328908e-05, |
| "loss": 0.5032, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.510886075949367, |
| "grad_norm": 0.40472349974983735, |
| "learning_rate": 3.517396977610811e-05, |
| "loss": 0.5246, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5149367088607595, |
| "grad_norm": 0.4983090025755157, |
| "learning_rate": 3.5136933739860595e-05, |
| "loss": 0.5225, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.518987341772152, |
| "grad_norm": 0.560351569525954, |
| "learning_rate": 3.509977579282971e-05, |
| "loss": 0.5233, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5230379746835443, |
| "grad_norm": 0.45132853132084555, |
| "learning_rate": 3.5062496234280424e-05, |
| "loss": 0.5248, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.5270886075949366, |
| "grad_norm": 0.3937130823951717, |
| "learning_rate": 3.502509536445719e-05, |
| "loss": 0.5256, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.531139240506329, |
| "grad_norm": 0.4677989647647185, |
| "learning_rate": 3.498757348458147e-05, |
| "loss": 0.524, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5351898734177216, |
| "grad_norm": 0.3936052651142823, |
| "learning_rate": 3.4949930896849324e-05, |
| "loss": 0.532, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.539240506329114, |
| "grad_norm": 0.5111631305640455, |
| "learning_rate": 3.491216790442899e-05, |
| "loss": 0.5213, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5432911392405062, |
| "grad_norm": 0.4813834445062157, |
| "learning_rate": 3.487428481145839e-05, |
| "loss": 0.523, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.5473417721518987, |
| "grad_norm": 0.5023978038624926, |
| "learning_rate": 3.483628192304278e-05, |
| "loss": 0.5215, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5513924050632912, |
| "grad_norm": 0.4428344567339314, |
| "learning_rate": 3.479815954525219e-05, |
| "loss": 0.5315, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5554430379746835, |
| "grad_norm": 0.4201355815296683, |
| "learning_rate": 3.475991798511899e-05, |
| "loss": 0.5256, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.5594936708860758, |
| "grad_norm": 0.42267468467455693, |
| "learning_rate": 3.4721557550635464e-05, |
| "loss": 0.5341, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5635443037974683, |
| "grad_norm": 0.4781679282694953, |
| "learning_rate": 3.468307855075128e-05, |
| "loss": 0.5261, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5675949367088609, |
| "grad_norm": 0.37207857751449497, |
| "learning_rate": 3.4644481295371005e-05, |
| "loss": 0.5191, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.5716455696202531, |
| "grad_norm": 0.42645996290429844, |
| "learning_rate": 3.460576609535163e-05, |
| "loss": 0.5159, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.5756962025316454, |
| "grad_norm": 0.4457339866358204, |
| "learning_rate": 3.456693326250006e-05, |
| "loss": 0.5189, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.579746835443038, |
| "grad_norm": 0.4185595421478181, |
| "learning_rate": 3.452798310957058e-05, |
| "loss": 0.5232, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5837974683544305, |
| "grad_norm": 0.4378488961475587, |
| "learning_rate": 3.4488915950262386e-05, |
| "loss": 0.512, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5878481012658228, |
| "grad_norm": 0.38943877215436995, |
| "learning_rate": 3.4449732099216985e-05, |
| "loss": 0.5172, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.591898734177215, |
| "grad_norm": 0.3972478950907895, |
| "learning_rate": 3.441043187201574e-05, |
| "loss": 0.5146, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.5959493670886076, |
| "grad_norm": 0.41867165068526085, |
| "learning_rate": 3.437101558517728e-05, |
| "loss": 0.5199, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.4553782007772934, |
| "learning_rate": 3.433148355615496e-05, |
| "loss": 0.5324, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6040506329113924, |
| "grad_norm": 0.45869644686998556, |
| "learning_rate": 3.4291836103334294e-05, |
| "loss": 0.5352, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.6081012658227847, |
| "grad_norm": 0.44331652313665404, |
| "learning_rate": 3.425207354603043e-05, |
| "loss": 0.5292, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.6121518987341772, |
| "grad_norm": 0.4126961489615901, |
| "learning_rate": 3.421219620448553e-05, |
| "loss": 0.5139, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.6162025316455697, |
| "grad_norm": 0.414024297179943, |
| "learning_rate": 3.417220439986623e-05, |
| "loss": 0.5207, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.620253164556962, |
| "grad_norm": 0.3887850270008592, |
| "learning_rate": 3.4132098454261024e-05, |
| "loss": 0.4996, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6243037974683543, |
| "grad_norm": 0.4966589317274025, |
| "learning_rate": 3.4091878690677676e-05, |
| "loss": 0.5391, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.6283544303797468, |
| "grad_norm": 0.4760292586869162, |
| "learning_rate": 3.405154543304065e-05, |
| "loss": 0.5215, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6324050632911393, |
| "grad_norm": 0.3531307688976516, |
| "learning_rate": 3.401109900618843e-05, |
| "loss": 0.5186, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.6364556962025316, |
| "grad_norm": 0.5114384800129358, |
| "learning_rate": 3.3970539735870996e-05, |
| "loss": 0.5221, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.640506329113924, |
| "grad_norm": 0.4601655038100024, |
| "learning_rate": 3.392986794874714e-05, |
| "loss": 0.5196, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6445569620253164, |
| "grad_norm": 0.425178784403157, |
| "learning_rate": 3.388908397238184e-05, |
| "loss": 0.5161, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.648607594936709, |
| "grad_norm": 0.44216170051278814, |
| "learning_rate": 3.384818813524362e-05, |
| "loss": 0.5191, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6526582278481012, |
| "grad_norm": 0.41158518557212415, |
| "learning_rate": 3.380718076670195e-05, |
| "loss": 0.5368, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6567088607594935, |
| "grad_norm": 0.3946501012791333, |
| "learning_rate": 3.376606219702454e-05, |
| "loss": 0.5305, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.660759493670886, |
| "grad_norm": 0.43953830306786895, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.5089, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.6648101265822786, |
| "grad_norm": 0.4221373658766732, |
| "learning_rate": 3.368349277980861e-05, |
| "loss": 0.5223, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6688607594936709, |
| "grad_norm": 0.3892613587574305, |
| "learning_rate": 3.3642042597272844e-05, |
| "loss": 0.5252, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6729113924050631, |
| "grad_norm": 0.38835969150553357, |
| "learning_rate": 3.360048254360144e-05, |
| "loss": 0.5012, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.6769620253164557, |
| "grad_norm": 0.4189422946435912, |
| "learning_rate": 3.355881295351336e-05, |
| "loss": 0.5167, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.6810126582278482, |
| "grad_norm": 0.4528539681838692, |
| "learning_rate": 3.351703416260975e-05, |
| "loss": 0.5214, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6850632911392405, |
| "grad_norm": 0.3913270099351062, |
| "learning_rate": 3.347514650737126e-05, |
| "loss": 0.5253, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.6891139240506328, |
| "grad_norm": 0.41909065937608647, |
| "learning_rate": 3.3433150325155295e-05, |
| "loss": 0.5105, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.6931645569620253, |
| "grad_norm": 0.4630154896440805, |
| "learning_rate": 3.339104595419334e-05, |
| "loss": 0.5226, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.6972151898734178, |
| "grad_norm": 0.4565405906329168, |
| "learning_rate": 3.3348833733588204e-05, |
| "loss": 0.5372, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.70126582278481, |
| "grad_norm": 0.4418430433184848, |
| "learning_rate": 3.3306514003311305e-05, |
| "loss": 0.5168, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7053164556962024, |
| "grad_norm": 0.5275810473084175, |
| "learning_rate": 3.326408710419996e-05, |
| "loss": 0.513, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.709367088607595, |
| "grad_norm": 0.43982408400692696, |
| "learning_rate": 3.322155337795454e-05, |
| "loss": 0.5166, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7134177215189874, |
| "grad_norm": 0.5241360948078384, |
| "learning_rate": 3.317891316713587e-05, |
| "loss": 0.5152, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.7174683544303797, |
| "grad_norm": 0.36569836613978246, |
| "learning_rate": 3.313616681516231e-05, |
| "loss": 0.5198, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.721518987341772, |
| "grad_norm": 0.5154180478979966, |
| "learning_rate": 3.309331466630713e-05, |
| "loss": 0.512, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.7255696202531645, |
| "grad_norm": 0.3842535390365219, |
| "learning_rate": 3.305035706569563e-05, |
| "loss": 0.539, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.729620253164557, |
| "grad_norm": 0.4133869850466135, |
| "learning_rate": 3.3007294359302433e-05, |
| "loss": 0.5221, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7336708860759493, |
| "grad_norm": 0.3748021165902318, |
| "learning_rate": 3.296412689394864e-05, |
| "loss": 0.5138, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7377215189873416, |
| "grad_norm": 0.3918730019701728, |
| "learning_rate": 3.292085501729909e-05, |
| "loss": 0.5268, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7417721518987341, |
| "grad_norm": 0.40681152137672705, |
| "learning_rate": 3.2877479077859534e-05, |
| "loss": 0.5084, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7458227848101266, |
| "grad_norm": 0.39279415589295147, |
| "learning_rate": 3.283399942497381e-05, |
| "loss": 0.5434, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.749873417721519, |
| "grad_norm": 0.34247475969306557, |
| "learning_rate": 3.279041640882108e-05, |
| "loss": 0.5056, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7539240506329112, |
| "grad_norm": 0.35417098024998256, |
| "learning_rate": 3.2746730380412964e-05, |
| "loss": 0.5245, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7579746835443038, |
| "grad_norm": 0.3300904490913442, |
| "learning_rate": 3.2702941691590726e-05, |
| "loss": 0.5119, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7620253164556963, |
| "grad_norm": 0.3449299694261345, |
| "learning_rate": 3.265905069502244e-05, |
| "loss": 0.5346, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7660759493670886, |
| "grad_norm": 0.37668034187052946, |
| "learning_rate": 3.261505774420016e-05, |
| "loss": 0.5229, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.7701265822784809, |
| "grad_norm": 0.3316439324959394, |
| "learning_rate": 3.257096319343707e-05, |
| "loss": 0.5173, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7741772151898734, |
| "grad_norm": 0.3418155732302691, |
| "learning_rate": 3.2526767397864614e-05, |
| "loss": 0.5289, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.7782278481012659, |
| "grad_norm": 0.37309477671485036, |
| "learning_rate": 3.248247071342966e-05, |
| "loss": 0.5074, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7822784810126582, |
| "grad_norm": 0.36266706646072977, |
| "learning_rate": 3.243807349689161e-05, |
| "loss": 0.5247, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7863291139240505, |
| "grad_norm": 0.4593260650554597, |
| "learning_rate": 3.2393576105819544e-05, |
| "loss": 0.5271, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.790379746835443, |
| "grad_norm": 0.3629926468651907, |
| "learning_rate": 3.2348978898589333e-05, |
| "loss": 0.5318, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.7944303797468355, |
| "grad_norm": 0.36797711667010596, |
| "learning_rate": 3.230428223438075e-05, |
| "loss": 0.4893, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.7984810126582278, |
| "grad_norm": 0.31627554638063876, |
| "learning_rate": 3.225948647317459e-05, |
| "loss": 0.5014, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.80253164556962, |
| "grad_norm": 0.35735580957754787, |
| "learning_rate": 3.2214591975749745e-05, |
| "loss": 0.5083, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8065822784810126, |
| "grad_norm": 0.4071422806600287, |
| "learning_rate": 3.216959910368034e-05, |
| "loss": 0.5288, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.8106329113924051, |
| "grad_norm": 0.36932504334410726, |
| "learning_rate": 3.212450821933277e-05, |
| "loss": 0.5167, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.8146835443037974, |
| "grad_norm": 0.3798934784120581, |
| "learning_rate": 3.207931968586281e-05, |
| "loss": 0.5306, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.81873417721519, |
| "grad_norm": 0.41064615846605496, |
| "learning_rate": 3.203403386721272e-05, |
| "loss": 0.5133, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8227848101265822, |
| "grad_norm": 0.37057422747799906, |
| "learning_rate": 3.1988651128108245e-05, |
| "loss": 0.5163, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.8268354430379747, |
| "grad_norm": 0.4380056407532793, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.5033, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.8308860759493673, |
| "grad_norm": 0.3849898045400577, |
| "learning_rate": 3.189759635133914e-05, |
| "loss": 0.5276, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.8349367088607595, |
| "grad_norm": 0.3449260884016905, |
| "learning_rate": 3.185192504701718e-05, |
| "loss": 0.518, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.8389873417721518, |
| "grad_norm": 0.46886903060053686, |
| "learning_rate": 3.1806158288920234e-05, |
| "loss": 0.5251, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8430379746835444, |
| "grad_norm": 0.3437676612917836, |
| "learning_rate": 3.1760296445647477e-05, |
| "loss": 0.522, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8470886075949369, |
| "grad_norm": 0.4694304905759313, |
| "learning_rate": 3.1714339886563896e-05, |
| "loss": 0.5417, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8511392405063292, |
| "grad_norm": 0.3545362137534388, |
| "learning_rate": 3.166828898179731e-05, |
| "loss": 0.5292, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.8551898734177215, |
| "grad_norm": 0.38958349750474475, |
| "learning_rate": 3.162214410223536e-05, |
| "loss": 0.5127, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.859240506329114, |
| "grad_norm": 0.4069254663175485, |
| "learning_rate": 3.157590561952257e-05, |
| "loss": 0.5196, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8632911392405065, |
| "grad_norm": 0.4281534979811216, |
| "learning_rate": 3.152957390605732e-05, |
| "loss": 0.5355, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8673417721518988, |
| "grad_norm": 0.31621296053520315, |
| "learning_rate": 3.148314933498886e-05, |
| "loss": 0.5062, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.871392405063291, |
| "grad_norm": 0.3855577749026557, |
| "learning_rate": 3.143663228021431e-05, |
| "loss": 0.534, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8754430379746836, |
| "grad_norm": 0.3514560680900497, |
| "learning_rate": 3.1390023116375624e-05, |
| "loss": 0.5202, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.879493670886076, |
| "grad_norm": 0.4330512286045491, |
| "learning_rate": 3.134332221885661e-05, |
| "loss": 0.508, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.8835443037974684, |
| "grad_norm": 0.41771387961430934, |
| "learning_rate": 3.129652996377987e-05, |
| "loss": 0.5078, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8875949367088607, |
| "grad_norm": 0.3730325483390968, |
| "learning_rate": 3.12496467280038e-05, |
| "loss": 0.5347, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.8916455696202532, |
| "grad_norm": 0.4582155883213999, |
| "learning_rate": 3.120267288911952e-05, |
| "loss": 0.5266, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.8956962025316457, |
| "grad_norm": 0.37239364295738103, |
| "learning_rate": 3.11556088254479e-05, |
| "loss": 0.4984, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.899746835443038, |
| "grad_norm": 0.3964472528326846, |
| "learning_rate": 3.11084549160364e-05, |
| "loss": 0.5005, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9037974683544303, |
| "grad_norm": 0.45967343560386503, |
| "learning_rate": 3.106121154065615e-05, |
| "loss": 0.5008, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.9078481012658228, |
| "grad_norm": 0.33825695946229706, |
| "learning_rate": 3.1013879079798805e-05, |
| "loss": 0.5268, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.9118987341772153, |
| "grad_norm": 0.4239785833477351, |
| "learning_rate": 3.096645791467348e-05, |
| "loss": 0.5251, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9159493670886076, |
| "grad_norm": 0.49053770338550023, |
| "learning_rate": 3.091894842720373e-05, |
| "loss": 0.5214, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.37145843919247507, |
| "learning_rate": 3.0871351000024425e-05, |
| "loss": 0.5169, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9240506329113924, |
| "grad_norm": 0.42298253149645976, |
| "learning_rate": 3.0823666016478716e-05, |
| "loss": 0.5174, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.928101265822785, |
| "grad_norm": 0.441879167587309, |
| "learning_rate": 3.0775893860614896e-05, |
| "loss": 0.5105, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.9321518987341773, |
| "grad_norm": 0.43264299542842927, |
| "learning_rate": 3.0728034917183336e-05, |
| "loss": 0.5172, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9362025316455695, |
| "grad_norm": 0.4165645060851816, |
| "learning_rate": 3.06800895716334e-05, |
| "loss": 0.5064, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.940253164556962, |
| "grad_norm": 0.42626753854240856, |
| "learning_rate": 3.063205821011029e-05, |
| "loss": 0.5258, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.9443037974683546, |
| "grad_norm": 0.3997392762518282, |
| "learning_rate": 3.0583941219452016e-05, |
| "loss": 0.5221, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9483544303797469, |
| "grad_norm": 0.38617812104066007, |
| "learning_rate": 3.053573898718618e-05, |
| "loss": 0.5129, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9524050632911392, |
| "grad_norm": 0.4311832496644064, |
| "learning_rate": 3.0487451901526956e-05, |
| "loss": 0.528, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9564556962025317, |
| "grad_norm": 0.361665718822663, |
| "learning_rate": 3.0439080351371875e-05, |
| "loss": 0.519, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9605063291139242, |
| "grad_norm": 0.4189778515299404, |
| "learning_rate": 3.0390624726298764e-05, |
| "loss": 0.5139, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9645569620253165, |
| "grad_norm": 0.3694751248453239, |
| "learning_rate": 3.034208541656255e-05, |
| "loss": 0.5187, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9686075949367088, |
| "grad_norm": 0.38111119049435027, |
| "learning_rate": 3.029346281309218e-05, |
| "loss": 0.5157, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9726582278481013, |
| "grad_norm": 0.40899552391278465, |
| "learning_rate": 3.0244757307487415e-05, |
| "loss": 0.5151, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9767088607594938, |
| "grad_norm": 0.40311796258582216, |
| "learning_rate": 3.019596929201569e-05, |
| "loss": 0.5319, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.980759493670886, |
| "grad_norm": 0.36722201846641245, |
| "learning_rate": 3.0147099159608985e-05, |
| "loss": 0.5237, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9848101265822784, |
| "grad_norm": 0.4145965030646606, |
| "learning_rate": 3.0098147303860616e-05, |
| "loss": 0.5013, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.988860759493671, |
| "grad_norm": 0.3825806447643572, |
| "learning_rate": 3.0049114119022117e-05, |
| "loss": 0.5129, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.9929113924050634, |
| "grad_norm": 0.3715885361557003, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.5157, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.9969620253164557, |
| "grad_norm": 0.4201131503181122, |
| "learning_rate": 2.995080534235264e-05, |
| "loss": 0.5089, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.001012658227848, |
| "grad_norm": 0.3355863843156142, |
| "learning_rate": 2.9901530542287044e-05, |
| "loss": 0.5045, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.0050632911392405, |
| "grad_norm": 0.4974028393685011, |
| "learning_rate": 2.9852175996655676e-05, |
| "loss": 0.4542, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.009113924050633, |
| "grad_norm": 0.4707741774750515, |
| "learning_rate": 2.980274210295326e-05, |
| "loss": 0.4683, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.013164556962025, |
| "grad_norm": 0.43922251613048435, |
| "learning_rate": 2.9753229259313578e-05, |
| "loss": 0.4439, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.0172151898734176, |
| "grad_norm": 0.47039519016745407, |
| "learning_rate": 2.9703637864506274e-05, |
| "loss": 0.4649, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.02126582278481, |
| "grad_norm": 0.43822555332137125, |
| "learning_rate": 2.965396831793362e-05, |
| "loss": 0.4415, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.0253164556962027, |
| "grad_norm": 0.40228727373046647, |
| "learning_rate": 2.9604221019627316e-05, |
| "loss": 0.4348, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.0293670886075947, |
| "grad_norm": 0.5456009954038668, |
| "learning_rate": 2.955439637024526e-05, |
| "loss": 0.4451, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.0334177215189873, |
| "grad_norm": 0.40618356819091983, |
| "learning_rate": 2.9504494771068334e-05, |
| "loss": 0.4378, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.0374683544303798, |
| "grad_norm": 0.4974968382489763, |
| "learning_rate": 2.9454516623997156e-05, |
| "loss": 0.4513, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0415189873417723, |
| "grad_norm": 0.4021486337002783, |
| "learning_rate": 2.9404462331548847e-05, |
| "loss": 0.4511, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.0455696202531644, |
| "grad_norm": 0.7677301926029314, |
| "learning_rate": 2.93543322968538e-05, |
| "loss": 0.4588, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.049620253164557, |
| "grad_norm": 0.4377842312672235, |
| "learning_rate": 2.9304126923652428e-05, |
| "loss": 0.4355, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.0536708860759494, |
| "grad_norm": 0.4224768141975555, |
| "learning_rate": 2.9253846616291896e-05, |
| "loss": 0.4554, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.057721518987342, |
| "grad_norm": 0.4499938433878014, |
| "learning_rate": 2.9203491779722896e-05, |
| "loss": 0.4441, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.061772151898734, |
| "grad_norm": 0.4088659112727946, |
| "learning_rate": 2.9153062819496357e-05, |
| "loss": 0.4518, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.0658227848101265, |
| "grad_norm": 1.0829700250218213, |
| "learning_rate": 2.9102560141760178e-05, |
| "loss": 0.4637, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.069873417721519, |
| "grad_norm": 0.3958618008851208, |
| "learning_rate": 2.9051984153256004e-05, |
| "loss": 0.4513, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.0739240506329115, |
| "grad_norm": 0.45970586203425257, |
| "learning_rate": 2.900133526131588e-05, |
| "loss": 0.4607, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.0779746835443036, |
| "grad_norm": 0.3781635759784678, |
| "learning_rate": 2.8950613873859025e-05, |
| "loss": 0.4575, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.082025316455696, |
| "grad_norm": 0.40858390841491427, |
| "learning_rate": 2.8899820399388515e-05, |
| "loss": 0.4159, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.0860759493670886, |
| "grad_norm": 0.3684568274457562, |
| "learning_rate": 2.8848955246988012e-05, |
| "loss": 0.4472, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.090126582278481, |
| "grad_norm": 0.4324983507095325, |
| "learning_rate": 2.879801882631847e-05, |
| "loss": 0.4493, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.094177215189873, |
| "grad_norm": 0.34891103184984346, |
| "learning_rate": 2.8747011547614808e-05, |
| "loss": 0.4521, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.0982278481012657, |
| "grad_norm": 0.4097853308471671, |
| "learning_rate": 2.8695933821682635e-05, |
| "loss": 0.4387, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.1022784810126582, |
| "grad_norm": 0.4222884894170493, |
| "learning_rate": 2.864478605989494e-05, |
| "loss": 0.4399, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.1063291139240508, |
| "grad_norm": 0.35776335607688486, |
| "learning_rate": 2.8593568674188765e-05, |
| "loss": 0.4567, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.110379746835443, |
| "grad_norm": 0.478816368372734, |
| "learning_rate": 2.8542282077061892e-05, |
| "loss": 0.4556, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.1144303797468353, |
| "grad_norm": 1.0418199702669468, |
| "learning_rate": 2.8490926681569523e-05, |
| "loss": 0.4624, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.118481012658228, |
| "grad_norm": 0.4491872443726266, |
| "learning_rate": 2.8439502901320956e-05, |
| "loss": 0.4593, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.1225316455696204, |
| "grad_norm": 0.5512465289527541, |
| "learning_rate": 2.8388011150476237e-05, |
| "loss": 0.4553, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.1265822784810124, |
| "grad_norm": 0.4307789289747691, |
| "learning_rate": 2.8336451843742866e-05, |
| "loss": 0.4511, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.130632911392405, |
| "grad_norm": 0.4694366315395625, |
| "learning_rate": 2.8284825396372387e-05, |
| "loss": 0.4659, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.1346835443037975, |
| "grad_norm": 0.3885568324077637, |
| "learning_rate": 2.8233132224157132e-05, |
| "loss": 0.4439, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.13873417721519, |
| "grad_norm": 0.3378139042324306, |
| "learning_rate": 2.8181372743426805e-05, |
| "loss": 0.4557, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.1427848101265825, |
| "grad_norm": 0.42310699555892084, |
| "learning_rate": 2.8129547371045128e-05, |
| "loss": 0.4563, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.1468354430379746, |
| "grad_norm": 0.47589774606900853, |
| "learning_rate": 2.8077656524406534e-05, |
| "loss": 0.4381, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.150886075949367, |
| "grad_norm": 0.37720764383279487, |
| "learning_rate": 2.802570062143278e-05, |
| "loss": 0.4494, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.1549367088607596, |
| "grad_norm": 0.5222494199435063, |
| "learning_rate": 2.7973680080569555e-05, |
| "loss": 0.4606, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.1589873417721517, |
| "grad_norm": 0.35330970289056896, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.4405, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.163037974683544, |
| "grad_norm": 0.5409072342548263, |
| "learning_rate": 2.7869446761557033e-05, |
| "loss": 0.4568, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.1670886075949367, |
| "grad_norm": 0.483828147860248, |
| "learning_rate": 2.781723482288857e-05, |
| "loss": 0.4423, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1711392405063292, |
| "grad_norm": 0.37881184021619135, |
| "learning_rate": 2.7764959925285517e-05, |
| "loss": 0.4585, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.1751898734177217, |
| "grad_norm": 0.5280448317773953, |
| "learning_rate": 2.771262248976272e-05, |
| "loss": 0.4562, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.179240506329114, |
| "grad_norm": 0.3512693298829198, |
| "learning_rate": 2.7660222937838677e-05, |
| "loss": 0.4657, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.1832911392405063, |
| "grad_norm": 0.49274876396794187, |
| "learning_rate": 2.7607761691532186e-05, |
| "loss": 0.4559, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.187341772151899, |
| "grad_norm": 0.3964993354460611, |
| "learning_rate": 2.7555239173358916e-05, |
| "loss": 0.4316, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.191392405063291, |
| "grad_norm": 0.4097727206423144, |
| "learning_rate": 2.7502655806328e-05, |
| "loss": 0.4434, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.1954430379746834, |
| "grad_norm": 0.41292362151917744, |
| "learning_rate": 2.7450012013938648e-05, |
| "loss": 0.4591, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.199493670886076, |
| "grad_norm": 0.3828764695791823, |
| "learning_rate": 2.739730822017673e-05, |
| "loss": 0.444, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2035443037974685, |
| "grad_norm": 0.3694958973744744, |
| "learning_rate": 2.7344544849511355e-05, |
| "loss": 0.4457, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.207594936708861, |
| "grad_norm": 0.35551167904026326, |
| "learning_rate": 2.7291722326891456e-05, |
| "loss": 0.474, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.211645569620253, |
| "grad_norm": 0.3480653164278098, |
| "learning_rate": 2.723884107774236e-05, |
| "loss": 0.4474, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.2156962025316456, |
| "grad_norm": 0.41142444288199737, |
| "learning_rate": 2.718590152796239e-05, |
| "loss": 0.4572, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.219746835443038, |
| "grad_norm": 0.4220145291412988, |
| "learning_rate": 2.71329041039194e-05, |
| "loss": 0.4529, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.22379746835443, |
| "grad_norm": 0.35287104007341247, |
| "learning_rate": 2.7079849232447357e-05, |
| "loss": 0.4612, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.2278481012658227, |
| "grad_norm": 0.40502159933608306, |
| "learning_rate": 2.7026737340842895e-05, |
| "loss": 0.4528, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.231898734177215, |
| "grad_norm": 0.3661596183256528, |
| "learning_rate": 2.697356885686189e-05, |
| "loss": 0.4574, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.2359493670886077, |
| "grad_norm": 0.376681170063722, |
| "learning_rate": 2.6920344208716014e-05, |
| "loss": 0.4478, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.3790722640678017, |
| "learning_rate": 2.6867063825069252e-05, |
| "loss": 0.4638, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.2440506329113923, |
| "grad_norm": 0.3184322111175435, |
| "learning_rate": 2.6813728135034494e-05, |
| "loss": 0.4602, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.248101265822785, |
| "grad_norm": 0.39022712665092607, |
| "learning_rate": 2.6760337568170056e-05, |
| "loss": 0.4573, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.2521518987341773, |
| "grad_norm": 0.38579559452439255, |
| "learning_rate": 2.6706892554476226e-05, |
| "loss": 0.4494, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.2562025316455694, |
| "grad_norm": 0.38137486308582363, |
| "learning_rate": 2.6653393524391795e-05, |
| "loss": 0.4398, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.260253164556962, |
| "grad_norm": 0.42148215754136986, |
| "learning_rate": 2.6599840908790592e-05, |
| "loss": 0.4488, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.2643037974683544, |
| "grad_norm": 0.38248730564972966, |
| "learning_rate": 2.6546235138978028e-05, |
| "loss": 0.4606, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.268354430379747, |
| "grad_norm": 0.31131234987726175, |
| "learning_rate": 2.6492576646687597e-05, |
| "loss": 0.4521, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.2724050632911394, |
| "grad_norm": 0.4685517810412742, |
| "learning_rate": 2.6438865864077425e-05, |
| "loss": 0.4382, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.2764556962025315, |
| "grad_norm": 0.3216397285713306, |
| "learning_rate": 2.6385103223726766e-05, |
| "loss": 0.441, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.280506329113924, |
| "grad_norm": 0.3458879252715164, |
| "learning_rate": 2.6331289158632537e-05, |
| "loss": 0.4648, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2845569620253166, |
| "grad_norm": 0.316866584435557, |
| "learning_rate": 2.6277424102205817e-05, |
| "loss": 0.4801, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.2886075949367086, |
| "grad_norm": 0.40116359229636234, |
| "learning_rate": 2.6223508488268374e-05, |
| "loss": 0.464, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.292658227848101, |
| "grad_norm": 0.2944485955061041, |
| "learning_rate": 2.6169542751049148e-05, |
| "loss": 0.4648, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.2967088607594937, |
| "grad_norm": 0.3411962295006207, |
| "learning_rate": 2.6115527325180754e-05, |
| "loss": 0.4534, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.300759493670886, |
| "grad_norm": 0.33321820156550147, |
| "learning_rate": 2.606146264569603e-05, |
| "loss": 0.4343, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3048101265822787, |
| "grad_norm": 0.3492148948851837, |
| "learning_rate": 2.6007349148024447e-05, |
| "loss": 0.4444, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.3088607594936708, |
| "grad_norm": 0.32592028167785275, |
| "learning_rate": 2.5953187267988694e-05, |
| "loss": 0.4438, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.3129113924050633, |
| "grad_norm": 0.39123171220250946, |
| "learning_rate": 2.5898977441801097e-05, |
| "loss": 0.4505, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.316962025316456, |
| "grad_norm": 0.3047370121237105, |
| "learning_rate": 2.584472010606015e-05, |
| "loss": 0.4343, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.321012658227848, |
| "grad_norm": 0.35752264143045953, |
| "learning_rate": 2.5790415697746976e-05, |
| "loss": 0.4634, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.3250632911392404, |
| "grad_norm": 0.29094694318271036, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.457, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.329113924050633, |
| "grad_norm": 0.3610212039973494, |
| "learning_rate": 2.568166741322048e-05, |
| "loss": 0.4366, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.3331645569620254, |
| "grad_norm": 0.3464556286485968, |
| "learning_rate": 2.56272244128509e-05, |
| "loss": 0.4387, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.337215189873418, |
| "grad_norm": 0.3472594470883641, |
| "learning_rate": 2.55727360915895e-05, |
| "loss": 0.4399, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.34126582278481, |
| "grad_norm": 0.33336417746393987, |
| "learning_rate": 2.5518202888277734e-05, |
| "loss": 0.4364, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.3453164556962025, |
| "grad_norm": 0.3693174101283044, |
| "learning_rate": 2.5463625242118523e-05, |
| "loss": 0.4516, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.349367088607595, |
| "grad_norm": 0.33938587977565576, |
| "learning_rate": 2.5409003592672723e-05, |
| "loss": 0.4564, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.353417721518987, |
| "grad_norm": 0.33218180128597885, |
| "learning_rate": 2.535433837985559e-05, |
| "loss": 0.4718, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.3574683544303796, |
| "grad_norm": 0.3859942663118844, |
| "learning_rate": 2.529963004393324e-05, |
| "loss": 0.4496, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.361518987341772, |
| "grad_norm": 0.37355647677389464, |
| "learning_rate": 2.524487902551908e-05, |
| "loss": 0.4489, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.3655696202531646, |
| "grad_norm": 0.3615204328788961, |
| "learning_rate": 2.519008576557029e-05, |
| "loss": 0.4595, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.369620253164557, |
| "grad_norm": 0.35859542729580224, |
| "learning_rate": 2.5135250705384254e-05, |
| "loss": 0.4456, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.3736708860759492, |
| "grad_norm": 0.3928074279856453, |
| "learning_rate": 2.5080374286595007e-05, |
| "loss": 0.4452, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.3777215189873417, |
| "grad_norm": 0.3417960719316651, |
| "learning_rate": 2.5025456951169677e-05, |
| "loss": 0.451, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.3817721518987343, |
| "grad_norm": 0.4311842333890458, |
| "learning_rate": 2.4970499141404942e-05, |
| "loss": 0.4524, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.3858227848101268, |
| "grad_norm": 0.348116983610985, |
| "learning_rate": 2.491550129992345e-05, |
| "loss": 0.4449, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.389873417721519, |
| "grad_norm": 0.38798168396806276, |
| "learning_rate": 2.486046386967024e-05, |
| "loss": 0.4503, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.3939240506329114, |
| "grad_norm": 0.4205690048210354, |
| "learning_rate": 2.4805387293909214e-05, |
| "loss": 0.4559, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.397974683544304, |
| "grad_norm": 0.37443614140807585, |
| "learning_rate": 2.4750272016219552e-05, |
| "loss": 0.4523, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.4020253164556964, |
| "grad_norm": 0.3876330794174785, |
| "learning_rate": 2.4695118480492114e-05, |
| "loss": 0.4663, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.4060759493670885, |
| "grad_norm": 0.3694051750064184, |
| "learning_rate": 2.4639927130925898e-05, |
| "loss": 0.4488, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.410126582278481, |
| "grad_norm": 0.8835390781663585, |
| "learning_rate": 2.458469841202444e-05, |
| "loss": 0.4398, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4141772151898735, |
| "grad_norm": 0.36517416717005663, |
| "learning_rate": 2.452943276859226e-05, |
| "loss": 0.471, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.418227848101266, |
| "grad_norm": 0.36038511760950237, |
| "learning_rate": 2.447413064573125e-05, |
| "loss": 0.4643, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.422278481012658, |
| "grad_norm": 0.3950617712896261, |
| "learning_rate": 2.4418792488837095e-05, |
| "loss": 0.4515, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.4263291139240506, |
| "grad_norm": 0.34525606896349836, |
| "learning_rate": 2.4363418743595713e-05, |
| "loss": 0.4308, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.430379746835443, |
| "grad_norm": 0.3433297318231352, |
| "learning_rate": 2.430800985597963e-05, |
| "loss": 0.4555, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4344303797468356, |
| "grad_norm": 0.35331874814545017, |
| "learning_rate": 2.4252566272244415e-05, |
| "loss": 0.4543, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.4384810126582277, |
| "grad_norm": 0.3463051793246971, |
| "learning_rate": 2.4197088438925063e-05, |
| "loss": 0.4526, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.44253164556962, |
| "grad_norm": 0.34976981194784246, |
| "learning_rate": 2.4141576802832417e-05, |
| "loss": 0.4427, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.4465822784810127, |
| "grad_norm": 0.3128381720714162, |
| "learning_rate": 2.408603181104957e-05, |
| "loss": 0.4744, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4506329113924052, |
| "grad_norm": 0.41518699454392927, |
| "learning_rate": 2.4030453910928245e-05, |
| "loss": 0.4457, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.4546835443037973, |
| "grad_norm": 0.33465255828487445, |
| "learning_rate": 2.397484355008521e-05, |
| "loss": 0.4535, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.45873417721519, |
| "grad_norm": 0.3270475479899036, |
| "learning_rate": 2.3919201176398662e-05, |
| "loss": 0.4797, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4627848101265823, |
| "grad_norm": 0.33278107897155496, |
| "learning_rate": 2.3863527238004633e-05, |
| "loss": 0.4594, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.466835443037975, |
| "grad_norm": 0.3707997453408468, |
| "learning_rate": 2.380782218329337e-05, |
| "loss": 0.4629, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.470886075949367, |
| "grad_norm": 0.3170173239969989, |
| "learning_rate": 2.3752086460905725e-05, |
| "loss": 0.4586, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.4749367088607594, |
| "grad_norm": 0.8303789372661027, |
| "learning_rate": 2.3696320519729544e-05, |
| "loss": 0.4575, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.478987341772152, |
| "grad_norm": 0.333566007866273, |
| "learning_rate": 2.3640524808896045e-05, |
| "loss": 0.4581, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4830379746835445, |
| "grad_norm": 0.3170403028405267, |
| "learning_rate": 2.3584699777776222e-05, |
| "loss": 0.4461, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.4870886075949366, |
| "grad_norm": 0.29329914254193734, |
| "learning_rate": 2.3528845875977195e-05, |
| "loss": 0.4591, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.491139240506329, |
| "grad_norm": 0.395702602095219, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.45, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.4951898734177216, |
| "grad_norm": 0.33161331376129183, |
| "learning_rate": 2.341705325992901e-05, |
| "loss": 0.4614, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.499240506329114, |
| "grad_norm": 0.3243650355680588, |
| "learning_rate": 2.336111544604222e-05, |
| "loss": 0.451, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.503291139240506, |
| "grad_norm": 0.3411656846727874, |
| "learning_rate": 2.33051505621937e-05, |
| "loss": 0.45, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.5073417721518987, |
| "grad_norm": 0.3671497222499634, |
| "learning_rate": 2.324915905911693e-05, |
| "loss": 0.4628, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.511392405063291, |
| "grad_norm": 0.3436228011674535, |
| "learning_rate": 2.319314138775977e-05, |
| "loss": 0.4641, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.5154430379746833, |
| "grad_norm": 0.34490684071766986, |
| "learning_rate": 2.3137097999280856e-05, |
| "loss": 0.468, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.519493670886076, |
| "grad_norm": 0.3520503138071247, |
| "learning_rate": 2.308102934504593e-05, |
| "loss": 0.4449, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.5235443037974683, |
| "grad_norm": 0.33847193966141087, |
| "learning_rate": 2.3024935876624222e-05, |
| "loss": 0.4442, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.527594936708861, |
| "grad_norm": 0.2982118411778628, |
| "learning_rate": 2.2968818045784813e-05, |
| "loss": 0.4533, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5316455696202533, |
| "grad_norm": 0.33676237933949954, |
| "learning_rate": 2.2912676304493006e-05, |
| "loss": 0.4304, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.5356962025316454, |
| "grad_norm": 0.3198542497822019, |
| "learning_rate": 2.2856511104906668e-05, |
| "loss": 0.4532, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.539746835443038, |
| "grad_norm": 0.33632109163013024, |
| "learning_rate": 2.2800322899372586e-05, |
| "loss": 0.4571, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.5437974683544304, |
| "grad_norm": 0.36449198699731405, |
| "learning_rate": 2.2744112140422844e-05, |
| "loss": 0.4651, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.547848101265823, |
| "grad_norm": 0.3038812095700175, |
| "learning_rate": 2.2687879280771177e-05, |
| "loss": 0.4453, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.5518987341772155, |
| "grad_norm": 0.328535886728562, |
| "learning_rate": 2.26316247733093e-05, |
| "loss": 0.4619, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.5559493670886075, |
| "grad_norm": 0.32332551389044606, |
| "learning_rate": 2.257534907110328e-05, |
| "loss": 0.4607, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.2794184046561868, |
| "learning_rate": 2.2519052627389882e-05, |
| "loss": 0.4681, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.5640506329113926, |
| "grad_norm": 0.34722261871266724, |
| "learning_rate": 2.246273589557294e-05, |
| "loss": 0.4611, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.5681012658227846, |
| "grad_norm": 0.3138811789514351, |
| "learning_rate": 2.240639932921966e-05, |
| "loss": 0.4625, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.572151898734177, |
| "grad_norm": 0.3302297320983144, |
| "learning_rate": 2.2350043382056995e-05, |
| "loss": 0.4566, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.5762025316455697, |
| "grad_norm": 0.3339620035137812, |
| "learning_rate": 2.2293668507968015e-05, |
| "loss": 0.4556, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.580253164556962, |
| "grad_norm": 0.32954466061920773, |
| "learning_rate": 2.2237275160988186e-05, |
| "loss": 0.4572, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5843037974683547, |
| "grad_norm": 0.3143415058569146, |
| "learning_rate": 2.2180863795301787e-05, |
| "loss": 0.4551, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.5883544303797468, |
| "grad_norm": 0.3492137022411731, |
| "learning_rate": 2.212443486523819e-05, |
| "loss": 0.4385, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.5924050632911393, |
| "grad_norm": 0.362351962555067, |
| "learning_rate": 2.2067988825268243e-05, |
| "loss": 0.4513, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.596455696202532, |
| "grad_norm": 0.33667685313315854, |
| "learning_rate": 2.2011526130000596e-05, |
| "loss": 0.4845, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.600506329113924, |
| "grad_norm": 0.3800102296884916, |
| "learning_rate": 2.1955047234178038e-05, |
| "loss": 0.4724, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.6045569620253164, |
| "grad_norm": 0.3698154242275961, |
| "learning_rate": 2.1898552592673825e-05, |
| "loss": 0.4539, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.608607594936709, |
| "grad_norm": 0.3276843028539347, |
| "learning_rate": 2.184204266048803e-05, |
| "loss": 0.4447, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.6126582278481014, |
| "grad_norm": 0.4123423070907256, |
| "learning_rate": 2.1785517892743887e-05, |
| "loss": 0.4551, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.616708860759494, |
| "grad_norm": 0.36179798208985037, |
| "learning_rate": 2.17289787446841e-05, |
| "loss": 0.4547, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.620759493670886, |
| "grad_norm": 0.36132288515445193, |
| "learning_rate": 2.1672425671667198e-05, |
| "loss": 0.4554, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.6248101265822785, |
| "grad_norm": 0.39488808541438214, |
| "learning_rate": 2.161585912916385e-05, |
| "loss": 0.451, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.628860759493671, |
| "grad_norm": 0.3420612634189229, |
| "learning_rate": 2.1559279572753214e-05, |
| "loss": 0.4547, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.632911392405063, |
| "grad_norm": 0.372990763246441, |
| "learning_rate": 2.1502687458119268e-05, |
| "loss": 0.4432, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.6369620253164556, |
| "grad_norm": 2.435051941355152, |
| "learning_rate": 2.1446083241047116e-05, |
| "loss": 0.4593, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.641012658227848, |
| "grad_norm": 0.4729438219266065, |
| "learning_rate": 2.1389467377419333e-05, |
| "loss": 0.4598, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6450632911392407, |
| "grad_norm": 0.3113789258594704, |
| "learning_rate": 2.133284032321232e-05, |
| "loss": 0.4331, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.649113924050633, |
| "grad_norm": 0.40547957606324747, |
| "learning_rate": 2.1276202534492566e-05, |
| "loss": 0.432, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6531645569620252, |
| "grad_norm": 0.3402951870544527, |
| "learning_rate": 2.121955446741306e-05, |
| "loss": 0.4486, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.6572151898734178, |
| "grad_norm": 0.332227794244599, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.4259, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.6612658227848103, |
| "grad_norm": 0.33564472375463195, |
| "learning_rate": 2.1106229323196813e-05, |
| "loss": 0.4537, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.6653164556962023, |
| "grad_norm": 0.3055391326520935, |
| "learning_rate": 2.1049553158765214e-05, |
| "loss": 0.4562, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.669367088607595, |
| "grad_norm": 0.31903103983697795, |
| "learning_rate": 2.0992868541376764e-05, |
| "loss": 0.465, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.6734177215189874, |
| "grad_norm": 1.7769210611257602, |
| "learning_rate": 2.093617592756158e-05, |
| "loss": 0.4664, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.67746835443038, |
| "grad_norm": 0.3295225307718748, |
| "learning_rate": 2.0879475773914167e-05, |
| "loss": 0.4414, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.6815189873417724, |
| "grad_norm": 0.33581633031091923, |
| "learning_rate": 2.082276853708978e-05, |
| "loss": 0.4467, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.6855696202531645, |
| "grad_norm": 0.3212929445722813, |
| "learning_rate": 2.076605467380071e-05, |
| "loss": 0.4461, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.689620253164557, |
| "grad_norm": 0.33721643454362155, |
| "learning_rate": 2.0709334640812613e-05, |
| "loss": 0.4545, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.6936708860759495, |
| "grad_norm": 0.33336956595405615, |
| "learning_rate": 2.0652608894940824e-05, |
| "loss": 0.4467, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.6977215189873416, |
| "grad_norm": 0.35524015164560657, |
| "learning_rate": 2.0595877893046722e-05, |
| "loss": 0.4332, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.701772151898734, |
| "grad_norm": 0.37475437562984915, |
| "learning_rate": 2.0539142092033985e-05, |
| "loss": 0.4531, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.7058227848101266, |
| "grad_norm": 0.3473349855151068, |
| "learning_rate": 2.048240194884496e-05, |
| "loss": 0.4432, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.709873417721519, |
| "grad_norm": 0.3528813786574966, |
| "learning_rate": 2.042565792045695e-05, |
| "loss": 0.4486, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.7139240506329116, |
| "grad_norm": 0.35076547251043994, |
| "learning_rate": 2.036891046387857e-05, |
| "loss": 0.4449, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.7179746835443037, |
| "grad_norm": 0.3296304851189335, |
| "learning_rate": 2.0312160036146036e-05, |
| "loss": 0.4573, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.7220253164556962, |
| "grad_norm": 0.3727618062860742, |
| "learning_rate": 2.025540709431948e-05, |
| "loss": 0.46, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.7260759493670887, |
| "grad_norm": 0.33660982829137825, |
| "learning_rate": 2.0198652095479298e-05, |
| "loss": 0.4614, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.730126582278481, |
| "grad_norm": 0.3473154876747407, |
| "learning_rate": 2.014189549672245e-05, |
| "loss": 0.4417, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.7341772151898733, |
| "grad_norm": 0.3713371784338316, |
| "learning_rate": 2.0085137755158776e-05, |
| "loss": 0.4516, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.738227848101266, |
| "grad_norm": 0.38636838380115435, |
| "learning_rate": 2.0028379327907327e-05, |
| "loss": 0.4624, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.7422784810126584, |
| "grad_norm": 0.3777314019968518, |
| "learning_rate": 1.9971620672092676e-05, |
| "loss": 0.4391, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.746329113924051, |
| "grad_norm": 0.38393566886192965, |
| "learning_rate": 1.991486224484123e-05, |
| "loss": 0.4459, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.750379746835443, |
| "grad_norm": 0.3198016112922742, |
| "learning_rate": 1.985810450327756e-05, |
| "loss": 0.4709, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.7544303797468355, |
| "grad_norm": 0.38848090742175234, |
| "learning_rate": 1.9801347904520706e-05, |
| "loss": 0.4615, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.758481012658228, |
| "grad_norm": 0.3176385040527204, |
| "learning_rate": 1.974459290568053e-05, |
| "loss": 0.4507, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.76253164556962, |
| "grad_norm": 0.38346800171460454, |
| "learning_rate": 1.968783996385397e-05, |
| "loss": 0.4528, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.7665822784810126, |
| "grad_norm": 0.311348965955885, |
| "learning_rate": 1.963108953612143e-05, |
| "loss": 0.4468, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.770632911392405, |
| "grad_norm": 0.35988691746290574, |
| "learning_rate": 1.9574342079543056e-05, |
| "loss": 0.4548, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.7746835443037976, |
| "grad_norm": 0.3571679071861312, |
| "learning_rate": 1.9517598051155046e-05, |
| "loss": 0.4558, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.77873417721519, |
| "grad_norm": 0.32800363313989855, |
| "learning_rate": 1.9460857907966025e-05, |
| "loss": 0.4398, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.782784810126582, |
| "grad_norm": 0.3874449326177758, |
| "learning_rate": 1.9404122106953285e-05, |
| "loss": 0.4552, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.7868354430379747, |
| "grad_norm": 0.3227778348126492, |
| "learning_rate": 1.9347391105059176e-05, |
| "loss": 0.4423, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.790886075949367, |
| "grad_norm": 0.34756979362497864, |
| "learning_rate": 1.92906653591874e-05, |
| "loss": 0.4434, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.7949367088607593, |
| "grad_norm": 0.30555074082023487, |
| "learning_rate": 1.9233945326199295e-05, |
| "loss": 0.4346, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.798987341772152, |
| "grad_norm": 0.3054884170164001, |
| "learning_rate": 1.917723146291022e-05, |
| "loss": 0.4515, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.8030379746835443, |
| "grad_norm": 0.2953927285489211, |
| "learning_rate": 1.912052422608584e-05, |
| "loss": 0.4484, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.807088607594937, |
| "grad_norm": 0.300243149080344, |
| "learning_rate": 1.9063824072438428e-05, |
| "loss": 0.4511, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8111392405063294, |
| "grad_norm": 0.32474179246822626, |
| "learning_rate": 1.9007131458623246e-05, |
| "loss": 0.4403, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.8151898734177214, |
| "grad_norm": 0.2820522036976017, |
| "learning_rate": 1.895044684123479e-05, |
| "loss": 0.4667, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.819240506329114, |
| "grad_norm": 0.3103658086191727, |
| "learning_rate": 1.8893770676803194e-05, |
| "loss": 0.4542, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.8232911392405065, |
| "grad_norm": 0.2887615980402329, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.4387, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.8273417721518985, |
| "grad_norm": 0.3308221335110828, |
| "learning_rate": 1.8780445532586952e-05, |
| "loss": 0.4747, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.831392405063291, |
| "grad_norm": 0.2875914994850848, |
| "learning_rate": 1.872379746550743e-05, |
| "loss": 0.4622, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.8354430379746836, |
| "grad_norm": 0.32146887875920077, |
| "learning_rate": 1.866715967678769e-05, |
| "loss": 0.4357, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.839493670886076, |
| "grad_norm": 0.32162429802993964, |
| "learning_rate": 1.861053262258067e-05, |
| "loss": 0.4463, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.8435443037974686, |
| "grad_norm": 0.31099829641362153, |
| "learning_rate": 1.8553916758952897e-05, |
| "loss": 0.4632, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.8475949367088607, |
| "grad_norm": 0.36399628333046313, |
| "learning_rate": 1.8497312541880735e-05, |
| "loss": 0.4497, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.851645569620253, |
| "grad_norm": 0.3046288837496104, |
| "learning_rate": 1.8440720427246786e-05, |
| "loss": 0.4582, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8556962025316457, |
| "grad_norm": 0.3591483275972833, |
| "learning_rate": 1.8384140870836157e-05, |
| "loss": 0.4572, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.8597468354430378, |
| "grad_norm": 0.3028719009690781, |
| "learning_rate": 1.8327574328332806e-05, |
| "loss": 0.4519, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8637974683544303, |
| "grad_norm": 0.360437924066508, |
| "learning_rate": 1.8271021255315906e-05, |
| "loss": 0.4221, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.867848101265823, |
| "grad_norm": 0.34003431303658777, |
| "learning_rate": 1.8214482107256117e-05, |
| "loss": 0.453, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.8718987341772153, |
| "grad_norm": 0.30584309479980915, |
| "learning_rate": 1.8157957339511968e-05, |
| "loss": 0.4412, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.875949367088608, |
| "grad_norm": 0.33573499118510497, |
| "learning_rate": 1.8101447407326182e-05, |
| "loss": 0.4567, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.3150159046412944, |
| "learning_rate": 1.8044952765821966e-05, |
| "loss": 0.4435, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.8840506329113924, |
| "grad_norm": 0.2952286902082427, |
| "learning_rate": 1.7988473869999407e-05, |
| "loss": 0.4449, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.888101265822785, |
| "grad_norm": 0.3154768066487828, |
| "learning_rate": 1.7932011174731764e-05, |
| "loss": 0.4421, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.892151898734177, |
| "grad_norm": 0.3703951505842393, |
| "learning_rate": 1.7875565134761817e-05, |
| "loss": 0.4569, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.8962025316455695, |
| "grad_norm": 0.28142831248736744, |
| "learning_rate": 1.7819136204698226e-05, |
| "loss": 0.4515, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.900253164556962, |
| "grad_norm": 0.37054435913235784, |
| "learning_rate": 1.776272483901182e-05, |
| "loss": 0.4463, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.9043037974683545, |
| "grad_norm": 0.33319818731544276, |
| "learning_rate": 1.7706331492031995e-05, |
| "loss": 0.4467, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.908354430379747, |
| "grad_norm": 0.3386949124104893, |
| "learning_rate": 1.764995661794301e-05, |
| "loss": 0.4343, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.912405063291139, |
| "grad_norm": 0.3105761971115333, |
| "learning_rate": 1.759360067078035e-05, |
| "loss": 0.4451, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.9164556962025316, |
| "grad_norm": 0.3492142329843757, |
| "learning_rate": 1.7537264104427064e-05, |
| "loss": 0.441, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.920506329113924, |
| "grad_norm": 0.31023788202200364, |
| "learning_rate": 1.748094737261012e-05, |
| "loss": 0.451, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.9245569620253162, |
| "grad_norm": 1.2412436535899174, |
| "learning_rate": 1.7424650928896726e-05, |
| "loss": 0.4969, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.9286075949367087, |
| "grad_norm": 0.3275454562941189, |
| "learning_rate": 1.7368375226690712e-05, |
| "loss": 0.4387, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9326582278481013, |
| "grad_norm": 0.3160087408112924, |
| "learning_rate": 1.731212071922883e-05, |
| "loss": 0.4576, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.9367088607594938, |
| "grad_norm": 0.30409011385014795, |
| "learning_rate": 1.7255887859577156e-05, |
| "loss": 0.4532, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.9407594936708863, |
| "grad_norm": 0.34447273418601115, |
| "learning_rate": 1.7199677100627427e-05, |
| "loss": 0.4611, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.9448101265822784, |
| "grad_norm": 0.3361662671726351, |
| "learning_rate": 1.7143488895093343e-05, |
| "loss": 0.4514, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.948860759493671, |
| "grad_norm": 0.8704691236664848, |
| "learning_rate": 1.7087323695506994e-05, |
| "loss": 0.4688, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.9529113924050634, |
| "grad_norm": 0.31615405318253564, |
| "learning_rate": 1.7031181954215194e-05, |
| "loss": 0.4509, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.9569620253164555, |
| "grad_norm": 2.1044657031213383, |
| "learning_rate": 1.6975064123375788e-05, |
| "loss": 0.4446, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.961012658227848, |
| "grad_norm": 0.34836457585222247, |
| "learning_rate": 1.6918970654954084e-05, |
| "loss": 0.4635, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.9650632911392405, |
| "grad_norm": 0.3162275094224346, |
| "learning_rate": 1.686290200071915e-05, |
| "loss": 0.4697, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.969113924050633, |
| "grad_norm": 0.344011295354021, |
| "learning_rate": 1.6806858612240234e-05, |
| "loss": 0.4371, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.9731645569620255, |
| "grad_norm": 0.3034156702854327, |
| "learning_rate": 1.6750840940883078e-05, |
| "loss": 0.465, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.9772151898734176, |
| "grad_norm": 0.34097624568764656, |
| "learning_rate": 1.6694849437806305e-05, |
| "loss": 0.444, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.98126582278481, |
| "grad_norm": 0.33008976270643736, |
| "learning_rate": 1.663888455395778e-05, |
| "loss": 0.4344, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.9853164556962026, |
| "grad_norm": 0.31513838595801175, |
| "learning_rate": 1.6582946740070995e-05, |
| "loss": 0.4625, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.9893670886075947, |
| "grad_norm": 0.2859916724555832, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.4541, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.993417721518987, |
| "grad_norm": 0.3410227531790053, |
| "learning_rate": 1.6471154124022818e-05, |
| "loss": 0.4628, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.9974683544303797, |
| "grad_norm": 0.30528675713761777, |
| "learning_rate": 1.6415300222223788e-05, |
| "loss": 0.4435, |
| "step": 740 |
| }, |
| { |
| "epoch": 3.0015189873417722, |
| "grad_norm": 0.3416458090769293, |
| "learning_rate": 1.6359475191103958e-05, |
| "loss": 0.4173, |
| "step": 741 |
| }, |
| { |
| "epoch": 3.0055696202531648, |
| "grad_norm": 0.4226603916883641, |
| "learning_rate": 1.6303679480270466e-05, |
| "loss": 0.3983, |
| "step": 742 |
| }, |
| { |
| "epoch": 3.009620253164557, |
| "grad_norm": 0.44630093780236463, |
| "learning_rate": 1.624791353909428e-05, |
| "loss": 0.3946, |
| "step": 743 |
| }, |
| { |
| "epoch": 3.0136708860759494, |
| "grad_norm": 0.5287670352222881, |
| "learning_rate": 1.619217781670663e-05, |
| "loss": 0.3925, |
| "step": 744 |
| }, |
| { |
| "epoch": 3.017721518987342, |
| "grad_norm": 0.4510415846944142, |
| "learning_rate": 1.6136472761995373e-05, |
| "loss": 0.3934, |
| "step": 745 |
| }, |
| { |
| "epoch": 3.0217721518987344, |
| "grad_norm": 0.48154422172619704, |
| "learning_rate": 1.608079882360134e-05, |
| "loss": 0.3873, |
| "step": 746 |
| }, |
| { |
| "epoch": 3.0258227848101265, |
| "grad_norm": 0.3356328627397964, |
| "learning_rate": 1.60251564499148e-05, |
| "loss": 0.3983, |
| "step": 747 |
| }, |
| { |
| "epoch": 3.029873417721519, |
| "grad_norm": 0.44183836258881926, |
| "learning_rate": 1.596954608907176e-05, |
| "loss": 0.3796, |
| "step": 748 |
| }, |
| { |
| "epoch": 3.0339240506329115, |
| "grad_norm": 1.7532035402868917, |
| "learning_rate": 1.591396818895043e-05, |
| "loss": 0.4195, |
| "step": 749 |
| }, |
| { |
| "epoch": 3.037974683544304, |
| "grad_norm": 0.3869715612473372, |
| "learning_rate": 1.585842319716759e-05, |
| "loss": 0.404, |
| "step": 750 |
| }, |
| { |
| "epoch": 3.042025316455696, |
| "grad_norm": 0.33457484710667207, |
| "learning_rate": 1.5802911561074944e-05, |
| "loss": 0.3831, |
| "step": 751 |
| }, |
| { |
| "epoch": 3.0460759493670886, |
| "grad_norm": 0.39897744664405244, |
| "learning_rate": 1.5747433727755595e-05, |
| "loss": 0.3743, |
| "step": 752 |
| }, |
| { |
| "epoch": 3.050126582278481, |
| "grad_norm": 0.3606141731151871, |
| "learning_rate": 1.5691990144020376e-05, |
| "loss": 0.362, |
| "step": 753 |
| }, |
| { |
| "epoch": 3.0541772151898736, |
| "grad_norm": 0.3867637910326479, |
| "learning_rate": 1.5636581256404297e-05, |
| "loss": 0.3813, |
| "step": 754 |
| }, |
| { |
| "epoch": 3.0582278481012657, |
| "grad_norm": 2.3960580416158455, |
| "learning_rate": 1.558120751116291e-05, |
| "loss": 0.4102, |
| "step": 755 |
| }, |
| { |
| "epoch": 3.062278481012658, |
| "grad_norm": 0.41530675223614516, |
| "learning_rate": 1.552586935426876e-05, |
| "loss": 0.3577, |
| "step": 756 |
| }, |
| { |
| "epoch": 3.0663291139240507, |
| "grad_norm": 0.36049731590934386, |
| "learning_rate": 1.547056723140774e-05, |
| "loss": 0.3866, |
| "step": 757 |
| }, |
| { |
| "epoch": 3.0703797468354432, |
| "grad_norm": 0.3795703701047737, |
| "learning_rate": 1.5415301587975565e-05, |
| "loss": 0.387, |
| "step": 758 |
| }, |
| { |
| "epoch": 3.0744303797468353, |
| "grad_norm": 0.3690065583344431, |
| "learning_rate": 1.536007286907411e-05, |
| "loss": 0.3845, |
| "step": 759 |
| }, |
| { |
| "epoch": 3.078481012658228, |
| "grad_norm": 0.36302604234048413, |
| "learning_rate": 1.5304881519507896e-05, |
| "loss": 0.3728, |
| "step": 760 |
| }, |
| { |
| "epoch": 3.0825316455696203, |
| "grad_norm": 0.3949058647105395, |
| "learning_rate": 1.5249727983780453e-05, |
| "loss": 0.383, |
| "step": 761 |
| }, |
| { |
| "epoch": 3.086582278481013, |
| "grad_norm": 0.320156394955569, |
| "learning_rate": 1.5194612706090786e-05, |
| "loss": 0.3844, |
| "step": 762 |
| }, |
| { |
| "epoch": 3.090632911392405, |
| "grad_norm": 0.3557154656308041, |
| "learning_rate": 1.5139536130329771e-05, |
| "loss": 0.3934, |
| "step": 763 |
| }, |
| { |
| "epoch": 3.0946835443037974, |
| "grad_norm": 0.35252359265484134, |
| "learning_rate": 1.508449870007656e-05, |
| "loss": 0.4069, |
| "step": 764 |
| }, |
| { |
| "epoch": 3.09873417721519, |
| "grad_norm": 0.2950572649742489, |
| "learning_rate": 1.5029500858595056e-05, |
| "loss": 0.3805, |
| "step": 765 |
| }, |
| { |
| "epoch": 3.1027848101265825, |
| "grad_norm": 0.3276092129922619, |
| "learning_rate": 1.4974543048830328e-05, |
| "loss": 0.3844, |
| "step": 766 |
| }, |
| { |
| "epoch": 3.1068354430379745, |
| "grad_norm": 0.3164111811113752, |
| "learning_rate": 1.4919625713405e-05, |
| "loss": 0.399, |
| "step": 767 |
| }, |
| { |
| "epoch": 3.110886075949367, |
| "grad_norm": 0.28444869326853195, |
| "learning_rate": 1.4864749294615756e-05, |
| "loss": 0.3917, |
| "step": 768 |
| }, |
| { |
| "epoch": 3.1149367088607596, |
| "grad_norm": 0.333824201224533, |
| "learning_rate": 1.4809914234429716e-05, |
| "loss": 0.3826, |
| "step": 769 |
| }, |
| { |
| "epoch": 3.118987341772152, |
| "grad_norm": 0.31121342136371855, |
| "learning_rate": 1.4755120974480923e-05, |
| "loss": 0.3684, |
| "step": 770 |
| }, |
| { |
| "epoch": 3.123037974683544, |
| "grad_norm": 0.29798912576310393, |
| "learning_rate": 1.4700369956066771e-05, |
| "loss": 0.3983, |
| "step": 771 |
| }, |
| { |
| "epoch": 3.1270886075949367, |
| "grad_norm": 0.3954179182168481, |
| "learning_rate": 1.4645661620144413e-05, |
| "loss": 0.3989, |
| "step": 772 |
| }, |
| { |
| "epoch": 3.131139240506329, |
| "grad_norm": 0.3394405419282369, |
| "learning_rate": 1.4590996407327284e-05, |
| "loss": 0.3607, |
| "step": 773 |
| }, |
| { |
| "epoch": 3.1351898734177217, |
| "grad_norm": 0.30546405035244484, |
| "learning_rate": 1.4536374757881487e-05, |
| "loss": 0.3739, |
| "step": 774 |
| }, |
| { |
| "epoch": 3.1392405063291138, |
| "grad_norm": 0.2923000811404094, |
| "learning_rate": 1.4481797111722271e-05, |
| "loss": 0.3957, |
| "step": 775 |
| }, |
| { |
| "epoch": 3.1432911392405063, |
| "grad_norm": 0.34789870791827837, |
| "learning_rate": 1.4427263908410507e-05, |
| "loss": 0.3835, |
| "step": 776 |
| }, |
| { |
| "epoch": 3.147341772151899, |
| "grad_norm": 0.2700240842367118, |
| "learning_rate": 1.4372775587149108e-05, |
| "loss": 0.3921, |
| "step": 777 |
| }, |
| { |
| "epoch": 3.1513924050632913, |
| "grad_norm": 0.28895274687209016, |
| "learning_rate": 1.4318332586779522e-05, |
| "loss": 0.3994, |
| "step": 778 |
| }, |
| { |
| "epoch": 3.1554430379746834, |
| "grad_norm": 0.2853818388659593, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.4036, |
| "step": 779 |
| }, |
| { |
| "epoch": 3.159493670886076, |
| "grad_norm": 0.3022362323815387, |
| "learning_rate": 1.420958430225303e-05, |
| "loss": 0.3735, |
| "step": 780 |
| }, |
| { |
| "epoch": 3.1635443037974684, |
| "grad_norm": 0.3065342167032389, |
| "learning_rate": 1.415527989393985e-05, |
| "loss": 0.3938, |
| "step": 781 |
| }, |
| { |
| "epoch": 3.167594936708861, |
| "grad_norm": 0.2963376988644117, |
| "learning_rate": 1.410102255819891e-05, |
| "loss": 0.3965, |
| "step": 782 |
| }, |
| { |
| "epoch": 3.171645569620253, |
| "grad_norm": 0.29122274556723554, |
| "learning_rate": 1.404681273201131e-05, |
| "loss": 0.3777, |
| "step": 783 |
| }, |
| { |
| "epoch": 3.1756962025316455, |
| "grad_norm": 0.3229622135909309, |
| "learning_rate": 1.399265085197556e-05, |
| "loss": 0.3949, |
| "step": 784 |
| }, |
| { |
| "epoch": 3.179746835443038, |
| "grad_norm": 0.3158051390005196, |
| "learning_rate": 1.393853735430398e-05, |
| "loss": 0.3893, |
| "step": 785 |
| }, |
| { |
| "epoch": 3.1837974683544306, |
| "grad_norm": 0.30681449168803004, |
| "learning_rate": 1.3884472674819246e-05, |
| "loss": 0.3801, |
| "step": 786 |
| }, |
| { |
| "epoch": 3.1878481012658226, |
| "grad_norm": 0.30065311355924124, |
| "learning_rate": 1.3830457248950864e-05, |
| "loss": 0.3823, |
| "step": 787 |
| }, |
| { |
| "epoch": 3.191898734177215, |
| "grad_norm": 0.3244749153702471, |
| "learning_rate": 1.377649151173163e-05, |
| "loss": 0.3788, |
| "step": 788 |
| }, |
| { |
| "epoch": 3.1959493670886077, |
| "grad_norm": 0.2858970678417641, |
| "learning_rate": 1.3722575897794181e-05, |
| "loss": 0.4022, |
| "step": 789 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.3164389879024134, |
| "learning_rate": 1.3668710841367472e-05, |
| "loss": 0.388, |
| "step": 790 |
| }, |
| { |
| "epoch": 3.2040506329113922, |
| "grad_norm": 0.3020652412663424, |
| "learning_rate": 1.361489677627324e-05, |
| "loss": 0.3949, |
| "step": 791 |
| }, |
| { |
| "epoch": 3.2081012658227848, |
| "grad_norm": 0.2872697132106226, |
| "learning_rate": 1.3561134135922585e-05, |
| "loss": 0.3829, |
| "step": 792 |
| }, |
| { |
| "epoch": 3.2121518987341773, |
| "grad_norm": 0.34357663815939204, |
| "learning_rate": 1.350742335331241e-05, |
| "loss": 0.388, |
| "step": 793 |
| }, |
| { |
| "epoch": 3.21620253164557, |
| "grad_norm": 0.29277619299446617, |
| "learning_rate": 1.345376486102198e-05, |
| "loss": 0.3913, |
| "step": 794 |
| }, |
| { |
| "epoch": 3.220253164556962, |
| "grad_norm": 0.28484132440889226, |
| "learning_rate": 1.3400159091209414e-05, |
| "loss": 0.3921, |
| "step": 795 |
| }, |
| { |
| "epoch": 3.2243037974683544, |
| "grad_norm": 0.3136857446868059, |
| "learning_rate": 1.3346606475608216e-05, |
| "loss": 0.3872, |
| "step": 796 |
| }, |
| { |
| "epoch": 3.228354430379747, |
| "grad_norm": 0.3197017605085603, |
| "learning_rate": 1.3293107445523781e-05, |
| "loss": 0.3937, |
| "step": 797 |
| }, |
| { |
| "epoch": 3.2324050632911394, |
| "grad_norm": 0.2823666293279538, |
| "learning_rate": 1.3239662431829949e-05, |
| "loss": 0.3875, |
| "step": 798 |
| }, |
| { |
| "epoch": 3.2364556962025315, |
| "grad_norm": 0.28780626017993965, |
| "learning_rate": 1.3186271864965509e-05, |
| "loss": 0.3962, |
| "step": 799 |
| }, |
| { |
| "epoch": 3.240506329113924, |
| "grad_norm": 0.28651839773137594, |
| "learning_rate": 1.3132936174930756e-05, |
| "loss": 0.3944, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.2445569620253165, |
| "grad_norm": 0.28813408800320955, |
| "learning_rate": 1.3079655791283995e-05, |
| "loss": 0.3933, |
| "step": 801 |
| }, |
| { |
| "epoch": 3.248607594936709, |
| "grad_norm": 0.3045733848303924, |
| "learning_rate": 1.3026431143138108e-05, |
| "loss": 0.3915, |
| "step": 802 |
| }, |
| { |
| "epoch": 3.252658227848101, |
| "grad_norm": 0.3050328917555097, |
| "learning_rate": 1.2973262659157114e-05, |
| "loss": 0.4063, |
| "step": 803 |
| }, |
| { |
| "epoch": 3.2567088607594936, |
| "grad_norm": 0.2970692544161071, |
| "learning_rate": 1.2920150767552651e-05, |
| "loss": 0.4027, |
| "step": 804 |
| }, |
| { |
| "epoch": 3.260759493670886, |
| "grad_norm": 0.31005267262387365, |
| "learning_rate": 1.2867095896080607e-05, |
| "loss": 0.3595, |
| "step": 805 |
| }, |
| { |
| "epoch": 3.2648101265822786, |
| "grad_norm": 0.2945738927640656, |
| "learning_rate": 1.2814098472037612e-05, |
| "loss": 0.3834, |
| "step": 806 |
| }, |
| { |
| "epoch": 3.2688607594936707, |
| "grad_norm": 0.2824722294736441, |
| "learning_rate": 1.276115892225764e-05, |
| "loss": 0.3686, |
| "step": 807 |
| }, |
| { |
| "epoch": 3.2729113924050632, |
| "grad_norm": 0.32102194162098713, |
| "learning_rate": 1.2708277673108555e-05, |
| "loss": 0.3759, |
| "step": 808 |
| }, |
| { |
| "epoch": 3.2769620253164558, |
| "grad_norm": 0.2720809313566527, |
| "learning_rate": 1.2655455150488649e-05, |
| "loss": 0.3782, |
| "step": 809 |
| }, |
| { |
| "epoch": 3.2810126582278483, |
| "grad_norm": 0.28571588158471306, |
| "learning_rate": 1.2602691779823272e-05, |
| "loss": 0.3766, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.2850632911392403, |
| "grad_norm": 0.28877511356389723, |
| "learning_rate": 1.2549987986061355e-05, |
| "loss": 0.4065, |
| "step": 811 |
| }, |
| { |
| "epoch": 3.289113924050633, |
| "grad_norm": 0.30395655844156544, |
| "learning_rate": 1.2497344193672005e-05, |
| "loss": 0.3589, |
| "step": 812 |
| }, |
| { |
| "epoch": 3.2931645569620254, |
| "grad_norm": 0.27531245676796307, |
| "learning_rate": 1.2444760826641092e-05, |
| "loss": 0.3855, |
| "step": 813 |
| }, |
| { |
| "epoch": 3.297215189873418, |
| "grad_norm": 0.6775427073700421, |
| "learning_rate": 1.2392238308467817e-05, |
| "loss": 0.364, |
| "step": 814 |
| }, |
| { |
| "epoch": 3.30126582278481, |
| "grad_norm": 0.29799576909731745, |
| "learning_rate": 1.2339777062161326e-05, |
| "loss": 0.395, |
| "step": 815 |
| }, |
| { |
| "epoch": 3.3053164556962025, |
| "grad_norm": 0.29904217081309875, |
| "learning_rate": 1.2287377510237293e-05, |
| "loss": 0.3926, |
| "step": 816 |
| }, |
| { |
| "epoch": 3.309367088607595, |
| "grad_norm": 0.28338731914992105, |
| "learning_rate": 1.2235040074714488e-05, |
| "loss": 0.3996, |
| "step": 817 |
| }, |
| { |
| "epoch": 3.3134177215189875, |
| "grad_norm": 0.29925813704912885, |
| "learning_rate": 1.2182765177111434e-05, |
| "loss": 0.3877, |
| "step": 818 |
| }, |
| { |
| "epoch": 3.3174683544303796, |
| "grad_norm": 1.2751416805039333, |
| "learning_rate": 1.213055323844297e-05, |
| "loss": 0.4128, |
| "step": 819 |
| }, |
| { |
| "epoch": 3.321518987341772, |
| "grad_norm": 0.33027320368374563, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.3597, |
| "step": 820 |
| }, |
| { |
| "epoch": 3.3255696202531646, |
| "grad_norm": 0.2741894125035202, |
| "learning_rate": 1.2026319919430458e-05, |
| "loss": 0.387, |
| "step": 821 |
| }, |
| { |
| "epoch": 3.329620253164557, |
| "grad_norm": 0.275825534977247, |
| "learning_rate": 1.1974299378567227e-05, |
| "loss": 0.3999, |
| "step": 822 |
| }, |
| { |
| "epoch": 3.333670886075949, |
| "grad_norm": 0.30056353670413083, |
| "learning_rate": 1.1922343475593462e-05, |
| "loss": 0.3883, |
| "step": 823 |
| }, |
| { |
| "epoch": 3.3377215189873417, |
| "grad_norm": 0.2781089003858883, |
| "learning_rate": 1.187045262895488e-05, |
| "loss": 0.3914, |
| "step": 824 |
| }, |
| { |
| "epoch": 3.3417721518987342, |
| "grad_norm": 1.528197501348485, |
| "learning_rate": 1.1818627256573203e-05, |
| "loss": 0.4118, |
| "step": 825 |
| }, |
| { |
| "epoch": 3.3458227848101267, |
| "grad_norm": 0.338172680201471, |
| "learning_rate": 1.1766867775842864e-05, |
| "loss": 0.3653, |
| "step": 826 |
| }, |
| { |
| "epoch": 3.349873417721519, |
| "grad_norm": 0.28370426615747907, |
| "learning_rate": 1.1715174603627615e-05, |
| "loss": 0.3699, |
| "step": 827 |
| }, |
| { |
| "epoch": 3.3539240506329113, |
| "grad_norm": 0.3155108012028665, |
| "learning_rate": 1.1663548156257147e-05, |
| "loss": 0.3737, |
| "step": 828 |
| }, |
| { |
| "epoch": 3.357974683544304, |
| "grad_norm": 0.3152944417268735, |
| "learning_rate": 1.161198884952377e-05, |
| "loss": 0.399, |
| "step": 829 |
| }, |
| { |
| "epoch": 3.3620253164556964, |
| "grad_norm": 0.30549108298122934, |
| "learning_rate": 1.1560497098679056e-05, |
| "loss": 0.3805, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.3660759493670884, |
| "grad_norm": 0.30850845339268657, |
| "learning_rate": 1.1509073318430479e-05, |
| "loss": 0.3915, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.370126582278481, |
| "grad_norm": 0.30143439230214036, |
| "learning_rate": 1.1457717922938116e-05, |
| "loss": 0.3914, |
| "step": 832 |
| }, |
| { |
| "epoch": 3.3741772151898735, |
| "grad_norm": 0.2790273276016487, |
| "learning_rate": 1.1406431325811233e-05, |
| "loss": 0.3986, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.378227848101266, |
| "grad_norm": 0.34691365864548807, |
| "learning_rate": 1.135521394010506e-05, |
| "loss": 0.3935, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.382278481012658, |
| "grad_norm": 0.3135894569629801, |
| "learning_rate": 1.1304066178317367e-05, |
| "loss": 0.3833, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.3863291139240506, |
| "grad_norm": 0.3126540388943159, |
| "learning_rate": 1.1252988452385199e-05, |
| "loss": 0.4156, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.390379746835443, |
| "grad_norm": 0.3189983332951186, |
| "learning_rate": 1.1201981173681536e-05, |
| "loss": 0.3533, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.3944303797468356, |
| "grad_norm": 0.3009073843756289, |
| "learning_rate": 1.1151044753011991e-05, |
| "loss": 0.3814, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.3984810126582277, |
| "grad_norm": 0.31662875085424874, |
| "learning_rate": 1.1100179600611491e-05, |
| "loss": 0.3797, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.40253164556962, |
| "grad_norm": 0.28233413304072363, |
| "learning_rate": 1.1049386126140985e-05, |
| "loss": 0.3785, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.4065822784810127, |
| "grad_norm": 0.33576068322611863, |
| "learning_rate": 1.0998664738684128e-05, |
| "loss": 0.3806, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.410632911392405, |
| "grad_norm": 0.31166607352150777, |
| "learning_rate": 1.0948015846744e-05, |
| "loss": 0.3757, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.4146835443037973, |
| "grad_norm": 0.26984085547187875, |
| "learning_rate": 1.0897439858239832e-05, |
| "loss": 0.3982, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.41873417721519, |
| "grad_norm": 0.2877146844073926, |
| "learning_rate": 1.0846937180503652e-05, |
| "loss": 0.4044, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.4227848101265823, |
| "grad_norm": 0.2915730763284753, |
| "learning_rate": 1.0796508220277117e-05, |
| "loss": 0.3926, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.426835443037975, |
| "grad_norm": 0.3135679415878097, |
| "learning_rate": 1.0746153383708107e-05, |
| "loss": 0.3948, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.430886075949367, |
| "grad_norm": 0.2894175553743601, |
| "learning_rate": 1.0695873076347579e-05, |
| "loss": 0.39, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.4349367088607594, |
| "grad_norm": 0.2866671493141274, |
| "learning_rate": 1.0645667703146205e-05, |
| "loss": 0.3926, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.438987341772152, |
| "grad_norm": 0.2882554332037146, |
| "learning_rate": 1.0595537668451161e-05, |
| "loss": 0.3776, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.4430379746835444, |
| "grad_norm": 0.27954878922760024, |
| "learning_rate": 1.0545483376002854e-05, |
| "loss": 0.4015, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.4470886075949365, |
| "grad_norm": 0.25702279464606603, |
| "learning_rate": 1.0495505228931676e-05, |
| "loss": 0.3963, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.451139240506329, |
| "grad_norm": 0.2971433362988801, |
| "learning_rate": 1.044560362975474e-05, |
| "loss": 0.3924, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.4551898734177215, |
| "grad_norm": 0.29116278036457366, |
| "learning_rate": 1.0395778980372695e-05, |
| "loss": 0.3731, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.459240506329114, |
| "grad_norm": 0.27077837576961605, |
| "learning_rate": 1.0346031682066381e-05, |
| "loss": 0.383, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.463291139240506, |
| "grad_norm": 1.0767882816804477, |
| "learning_rate": 1.0296362135493724e-05, |
| "loss": 0.4189, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.4673417721518986, |
| "grad_norm": 0.2819241440416203, |
| "learning_rate": 1.0246770740686422e-05, |
| "loss": 0.382, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.471392405063291, |
| "grad_norm": 0.2936862016703352, |
| "learning_rate": 1.0197257897046743e-05, |
| "loss": 0.3881, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.4754430379746837, |
| "grad_norm": 0.2653667017684713, |
| "learning_rate": 1.014782400334433e-05, |
| "loss": 0.3763, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.479493670886076, |
| "grad_norm": 0.26844145665520847, |
| "learning_rate": 1.009846945771296e-05, |
| "loss": 0.3998, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.4835443037974683, |
| "grad_norm": 0.28797751730661875, |
| "learning_rate": 1.0049194657647363e-05, |
| "loss": 0.393, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.487594936708861, |
| "grad_norm": 0.26737858542468007, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.4031, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.4916455696202533, |
| "grad_norm": 0.2599950542591144, |
| "learning_rate": 9.950885880977891e-06, |
| "loss": 0.4025, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.4956962025316454, |
| "grad_norm": 0.2722824525047798, |
| "learning_rate": 9.901852696139382e-06, |
| "loss": 0.3821, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.499746835443038, |
| "grad_norm": 0.6343043080171414, |
| "learning_rate": 9.852900840391027e-06, |
| "loss": 0.4039, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.5037974683544304, |
| "grad_norm": 0.25463413566203597, |
| "learning_rate": 9.804030707984313e-06, |
| "loss": 0.3857, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.507848101265823, |
| "grad_norm": 0.2758974121937548, |
| "learning_rate": 9.755242692512599e-06, |
| "loss": 0.365, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.5118987341772154, |
| "grad_norm": 0.26267611616172754, |
| "learning_rate": 9.70653718690782e-06, |
| "loss": 0.3949, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.5159493670886075, |
| "grad_norm": 0.2498285817200836, |
| "learning_rate": 9.657914583437454e-06, |
| "loss": 0.3912, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.52, |
| "grad_norm": 0.28142242003629936, |
| "learning_rate": 9.609375273701246e-06, |
| "loss": 0.3806, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.5240506329113925, |
| "grad_norm": 0.2615735362768074, |
| "learning_rate": 9.560919648628133e-06, |
| "loss": 0.3907, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.5281012658227846, |
| "grad_norm": 0.2710071493406881, |
| "learning_rate": 9.512548098473047e-06, |
| "loss": 0.378, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.532151898734177, |
| "grad_norm": 0.27789627040339215, |
| "learning_rate": 9.464261012813825e-06, |
| "loss": 0.3794, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.5362025316455696, |
| "grad_norm": 0.26852658677807406, |
| "learning_rate": 9.416058780547987e-06, |
| "loss": 0.3889, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.540253164556962, |
| "grad_norm": 0.27374021891275957, |
| "learning_rate": 9.367941789889714e-06, |
| "loss": 0.3697, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.5443037974683547, |
| "grad_norm": 0.24444733163304413, |
| "learning_rate": 9.319910428366607e-06, |
| "loss": 0.4092, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.5483544303797467, |
| "grad_norm": 0.2690222281985552, |
| "learning_rate": 9.271965082816667e-06, |
| "loss": 0.3955, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.5524050632911393, |
| "grad_norm": 0.2637702854407074, |
| "learning_rate": 9.224106139385111e-06, |
| "loss": 0.3859, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.5564556962025318, |
| "grad_norm": 0.24240038696527244, |
| "learning_rate": 9.176333983521291e-06, |
| "loss": 0.3927, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.560506329113924, |
| "grad_norm": 0.2643910623402378, |
| "learning_rate": 9.12864899997558e-06, |
| "loss": 0.3853, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.5645569620253164, |
| "grad_norm": 0.2531953584538693, |
| "learning_rate": 9.08105157279628e-06, |
| "loss": 0.4058, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.568607594936709, |
| "grad_norm": 0.2642244521632522, |
| "learning_rate": 9.03354208532653e-06, |
| "loss": 0.375, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.5726582278481014, |
| "grad_norm": 0.2514842708594186, |
| "learning_rate": 8.986120920201205e-06, |
| "loss": 0.3805, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.576708860759494, |
| "grad_norm": 0.2599605918066831, |
| "learning_rate": 8.938788459343852e-06, |
| "loss": 0.3804, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.580759493670886, |
| "grad_norm": 0.25077137957280576, |
| "learning_rate": 8.8915450839636e-06, |
| "loss": 0.4037, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.5848101265822785, |
| "grad_norm": 0.2603990060885474, |
| "learning_rate": 8.844391174552116e-06, |
| "loss": 0.365, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.588860759493671, |
| "grad_norm": 0.26813673787166187, |
| "learning_rate": 8.797327110880479e-06, |
| "loss": 0.3644, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.592911392405063, |
| "grad_norm": 0.27532702793790514, |
| "learning_rate": 8.750353271996206e-06, |
| "loss": 0.3732, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.5969620253164556, |
| "grad_norm": 0.268976659034524, |
| "learning_rate": 8.703470036220132e-06, |
| "loss": 0.3916, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.601012658227848, |
| "grad_norm": 0.2549672023027923, |
| "learning_rate": 8.656677781143394e-06, |
| "loss": 0.4006, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.6050632911392406, |
| "grad_norm": 0.26211242943071167, |
| "learning_rate": 8.609976883624377e-06, |
| "loss": 0.3672, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.609113924050633, |
| "grad_norm": 0.26805663491727094, |
| "learning_rate": 8.563367719785698e-06, |
| "loss": 0.3887, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.613164556962025, |
| "grad_norm": 0.256521538981174, |
| "learning_rate": 8.516850665011138e-06, |
| "loss": 0.382, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.6172151898734177, |
| "grad_norm": 0.2609300417425864, |
| "learning_rate": 8.47042609394269e-06, |
| "loss": 0.3906, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.6212658227848102, |
| "grad_norm": 0.27328726679227544, |
| "learning_rate": 8.424094380477432e-06, |
| "loss": 0.3839, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.6253164556962023, |
| "grad_norm": 0.2723889483934061, |
| "learning_rate": 8.37785589776465e-06, |
| "loss": 0.3605, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.629367088607595, |
| "grad_norm": 0.2553282096940184, |
| "learning_rate": 8.331711018202694e-06, |
| "loss": 0.3725, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.6334177215189873, |
| "grad_norm": 0.2564041149831849, |
| "learning_rate": 8.285660113436104e-06, |
| "loss": 0.3913, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.63746835443038, |
| "grad_norm": 0.25715614877187887, |
| "learning_rate": 8.239703554352527e-06, |
| "loss": 0.3939, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.6415189873417724, |
| "grad_norm": 0.2589073856891789, |
| "learning_rate": 8.193841711079775e-06, |
| "loss": 0.3802, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.6455696202531644, |
| "grad_norm": 0.2709578886603319, |
| "learning_rate": 8.148074952982828e-06, |
| "loss": 0.3932, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.649620253164557, |
| "grad_norm": 0.25213787618382183, |
| "learning_rate": 8.102403648660859e-06, |
| "loss": 0.3887, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.6536708860759495, |
| "grad_norm": 0.26322762436352, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.3834, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.6577215189873415, |
| "grad_norm": 1.9745933804504674, |
| "learning_rate": 8.011348871891762e-06, |
| "loss": 0.4335, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.661772151898734, |
| "grad_norm": 0.28652448800112185, |
| "learning_rate": 7.965966132787287e-06, |
| "loss": 0.3665, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.6658227848101266, |
| "grad_norm": 0.25607893918279, |
| "learning_rate": 7.920680314137189e-06, |
| "loss": 0.368, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.669873417721519, |
| "grad_norm": 0.25466925894827214, |
| "learning_rate": 7.875491780667246e-06, |
| "loss": 0.3976, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.6739240506329116, |
| "grad_norm": 0.2847763268657028, |
| "learning_rate": 7.830400896319667e-06, |
| "loss": 0.3805, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.6779746835443037, |
| "grad_norm": 0.2588946010095188, |
| "learning_rate": 7.785408024250259e-06, |
| "loss": 0.3864, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.682025316455696, |
| "grad_norm": 1.0411054943479572, |
| "learning_rate": 7.74051352682542e-06, |
| "loss": 0.3908, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.6860759493670887, |
| "grad_norm": 0.29084408495838565, |
| "learning_rate": 7.695717765619257e-06, |
| "loss": 0.3983, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.690126582278481, |
| "grad_norm": 0.2535755504988569, |
| "learning_rate": 7.651021101410673e-06, |
| "loss": 0.3854, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.6941772151898733, |
| "grad_norm": 0.2654538587598099, |
| "learning_rate": 7.606423894180464e-06, |
| "loss": 0.3794, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.698227848101266, |
| "grad_norm": 0.2808967679557905, |
| "learning_rate": 7.56192650310839e-06, |
| "loss": 0.3946, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.7022784810126583, |
| "grad_norm": 0.26108723383567417, |
| "learning_rate": 7.517529286570349e-06, |
| "loss": 0.3768, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.706329113924051, |
| "grad_norm": 0.2462551047073291, |
| "learning_rate": 7.473232602135387e-06, |
| "loss": 0.406, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.710379746835443, |
| "grad_norm": 0.28812470327046574, |
| "learning_rate": 7.429036806562935e-06, |
| "loss": 0.3697, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.7144303797468354, |
| "grad_norm": 0.2560738647263331, |
| "learning_rate": 7.3849422557998455e-06, |
| "loss": 0.388, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.718481012658228, |
| "grad_norm": 0.2529512531209103, |
| "learning_rate": 7.340949304977567e-06, |
| "loss": 0.3865, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.72253164556962, |
| "grad_norm": 0.29228562511608625, |
| "learning_rate": 7.297058308409282e-06, |
| "loss": 0.3713, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.7265822784810125, |
| "grad_norm": 0.26143831372168136, |
| "learning_rate": 7.25326961958704e-06, |
| "loss": 0.381, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.730632911392405, |
| "grad_norm": 0.24851118961863275, |
| "learning_rate": 7.209583591178921e-06, |
| "loss": 0.3783, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.7346835443037976, |
| "grad_norm": 0.24650756878733374, |
| "learning_rate": 7.1660005750261925e-06, |
| "loss": 0.3876, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.73873417721519, |
| "grad_norm": 0.4015257865985564, |
| "learning_rate": 7.1225209221404765e-06, |
| "loss": 0.3821, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.742784810126582, |
| "grad_norm": 0.2496034772962912, |
| "learning_rate": 7.079144982700909e-06, |
| "loss": 0.4019, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.7468354430379747, |
| "grad_norm": 0.26694631813462494, |
| "learning_rate": 7.0358731060513695e-06, |
| "loss": 0.3693, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.750886075949367, |
| "grad_norm": 0.43360959359939316, |
| "learning_rate": 6.99270564069757e-06, |
| "loss": 0.3821, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.7549367088607593, |
| "grad_norm": 0.2671581278854808, |
| "learning_rate": 6.949642934304375e-06, |
| "loss": 0.356, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.7589873417721518, |
| "grad_norm": 0.27149751106533226, |
| "learning_rate": 6.906685333692871e-06, |
| "loss": 0.366, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.7630379746835443, |
| "grad_norm": 0.28021630663875724, |
| "learning_rate": 6.86383318483769e-06, |
| "loss": 0.3995, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.767088607594937, |
| "grad_norm": 0.25773568863452084, |
| "learning_rate": 6.821086832864139e-06, |
| "loss": 0.3794, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.7711392405063293, |
| "grad_norm": 0.2533143931488609, |
| "learning_rate": 6.77844662204546e-06, |
| "loss": 0.3952, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.7751898734177214, |
| "grad_norm": 0.26368565309233316, |
| "learning_rate": 6.7359128958000455e-06, |
| "loss": 0.3894, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.779240506329114, |
| "grad_norm": 0.2650008671834002, |
| "learning_rate": 6.693485996688695e-06, |
| "loss": 0.3981, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.7832911392405064, |
| "grad_norm": 0.2394786724093155, |
| "learning_rate": 6.651166266411801e-06, |
| "loss": 0.4167, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.7873417721518985, |
| "grad_norm": 0.2486387995104209, |
| "learning_rate": 6.6089540458066725e-06, |
| "loss": 0.3958, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.791392405063291, |
| "grad_norm": 0.26184265423869746, |
| "learning_rate": 6.566849674844711e-06, |
| "loss": 0.3812, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.7954430379746835, |
| "grad_norm": 0.25535750238162497, |
| "learning_rate": 6.524853492628747e-06, |
| "loss": 0.3908, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.799493670886076, |
| "grad_norm": 0.2652877089997182, |
| "learning_rate": 6.4829658373902536e-06, |
| "loss": 0.3721, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.8035443037974686, |
| "grad_norm": 0.25403443609492954, |
| "learning_rate": 6.441187046486648e-06, |
| "loss": 0.3964, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.8075949367088606, |
| "grad_norm": 0.27506026016836005, |
| "learning_rate": 6.399517456398567e-06, |
| "loss": 0.3775, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.811645569620253, |
| "grad_norm": 0.3461892215119306, |
| "learning_rate": 6.357957402727164e-06, |
| "loss": 0.3887, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.8156962025316457, |
| "grad_norm": 0.23793879630257386, |
| "learning_rate": 6.316507220191395e-06, |
| "loss": 0.3995, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.8197468354430377, |
| "grad_norm": 0.24431724194447266, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.4079, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.8237974683544302, |
| "grad_norm": 0.27813497725248376, |
| "learning_rate": 6.233937802975471e-06, |
| "loss": 0.3625, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.8278481012658228, |
| "grad_norm": 0.24039914476916308, |
| "learning_rate": 6.192819233298046e-06, |
| "loss": 0.3907, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.8318987341772153, |
| "grad_norm": 0.23581339111016625, |
| "learning_rate": 6.151811864756383e-06, |
| "loss": 0.4018, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.835949367088608, |
| "grad_norm": 0.25685597844679453, |
| "learning_rate": 6.1109160276181655e-06, |
| "loss": 0.3609, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.84, |
| "grad_norm": 0.24380145245197765, |
| "learning_rate": 6.070132051252868e-06, |
| "loss": 0.4006, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.8440506329113924, |
| "grad_norm": 0.2504831853925064, |
| "learning_rate": 6.0294602641290034e-06, |
| "loss": 0.3769, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.848101265822785, |
| "grad_norm": 0.2447032274685255, |
| "learning_rate": 5.988900993811575e-06, |
| "loss": 0.3868, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.852151898734177, |
| "grad_norm": 0.2621957493714064, |
| "learning_rate": 5.948454566959363e-06, |
| "loss": 0.3873, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.8562025316455695, |
| "grad_norm": 4.60530102462379, |
| "learning_rate": 5.908121309322328e-06, |
| "loss": 0.4076, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.860253164556962, |
| "grad_norm": 0.2590535024601451, |
| "learning_rate": 5.867901545738976e-06, |
| "loss": 0.3993, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.8643037974683545, |
| "grad_norm": 0.2567267415920894, |
| "learning_rate": 5.827795600133774e-06, |
| "loss": 0.3898, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.868354430379747, |
| "grad_norm": 0.24813209330683392, |
| "learning_rate": 5.787803795514466e-06, |
| "loss": 0.3721, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.872405063291139, |
| "grad_norm": 0.2591529773171053, |
| "learning_rate": 5.747926453969576e-06, |
| "loss": 0.3857, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.8764556962025316, |
| "grad_norm": 0.24506152749211516, |
| "learning_rate": 5.708163896665708e-06, |
| "loss": 0.3814, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.880506329113924, |
| "grad_norm": 0.2406828546990463, |
| "learning_rate": 5.668516443845047e-06, |
| "loss": 0.4083, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.884556962025316, |
| "grad_norm": 0.24006003777287732, |
| "learning_rate": 5.6289844148227225e-06, |
| "loss": 0.3997, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.8886075949367087, |
| "grad_norm": 0.23773192923204278, |
| "learning_rate": 5.5895681279842615e-06, |
| "loss": 0.3973, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.8926582278481012, |
| "grad_norm": 0.24797360005575414, |
| "learning_rate": 5.550267900783019e-06, |
| "loss": 0.378, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.8967088607594937, |
| "grad_norm": 0.2584292225128584, |
| "learning_rate": 5.511084049737623e-06, |
| "loss": 0.3733, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.9007594936708863, |
| "grad_norm": 0.24516825018130953, |
| "learning_rate": 5.4720168904294215e-06, |
| "loss": 0.3862, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.9048101265822783, |
| "grad_norm": 0.2465914730124335, |
| "learning_rate": 5.433066737499948e-06, |
| "loss": 0.3569, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.908860759493671, |
| "grad_norm": 0.2412327389472091, |
| "learning_rate": 5.394233904648376e-06, |
| "loss": 0.3955, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.9129113924050634, |
| "grad_norm": 0.29722387640117837, |
| "learning_rate": 5.355518704628997e-06, |
| "loss": 0.3957, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.9169620253164554, |
| "grad_norm": 0.23391901975938287, |
| "learning_rate": 5.316921449248731e-06, |
| "loss": 0.3963, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.921012658227848, |
| "grad_norm": 0.2516433916634741, |
| "learning_rate": 5.278442449364538e-06, |
| "loss": 0.3764, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.9250632911392405, |
| "grad_norm": 0.238571437082297, |
| "learning_rate": 5.240082014881016e-06, |
| "loss": 0.395, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.929113924050633, |
| "grad_norm": 0.25099063248721026, |
| "learning_rate": 5.201840454747822e-06, |
| "loss": 0.3684, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.9331645569620255, |
| "grad_norm": 0.2563887006623139, |
| "learning_rate": 5.163718076957223e-06, |
| "loss": 0.3747, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.9372151898734176, |
| "grad_norm": 0.25570322937600515, |
| "learning_rate": 5.125715188541609e-06, |
| "loss": 0.3845, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.94126582278481, |
| "grad_norm": 0.23814505154564167, |
| "learning_rate": 5.087832095571021e-06, |
| "loss": 0.4031, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.9453164556962026, |
| "grad_norm": 0.24664249142765152, |
| "learning_rate": 5.0500691031506766e-06, |
| "loss": 0.3925, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.9493670886075947, |
| "grad_norm": 0.24740806229844822, |
| "learning_rate": 5.01242651541854e-06, |
| "loss": 0.4011, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.953417721518987, |
| "grad_norm": 0.25248377469728867, |
| "learning_rate": 4.974904635542815e-06, |
| "loss": 0.3849, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.9574683544303797, |
| "grad_norm": 0.250987819066587, |
| "learning_rate": 4.937503765719582e-06, |
| "loss": 0.38, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.961518987341772, |
| "grad_norm": 0.2592355466476159, |
| "learning_rate": 4.900224207170299e-06, |
| "loss": 0.3715, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.9655696202531647, |
| "grad_norm": 0.2708628530266836, |
| "learning_rate": 4.8630662601394065e-06, |
| "loss": 0.3534, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.969620253164557, |
| "grad_norm": 0.2443575444700035, |
| "learning_rate": 4.8260302238918995e-06, |
| "loss": 0.3972, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.9736708860759493, |
| "grad_norm": 0.2667200253990292, |
| "learning_rate": 4.789116396710924e-06, |
| "loss": 0.3902, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.977721518987342, |
| "grad_norm": 0.248886408720081, |
| "learning_rate": 4.752325075895368e-06, |
| "loss": 0.3719, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.981772151898734, |
| "grad_norm": 0.24252440176539827, |
| "learning_rate": 4.715656557757473e-06, |
| "loss": 0.3743, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.9858227848101264, |
| "grad_norm": 0.24930239622940165, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.3925, |
| "step": 984 |
| }, |
| { |
| "epoch": 3.989873417721519, |
| "grad_norm": 0.26339681972185286, |
| "learning_rate": 4.6426891098160585e-06, |
| "loss": 0.3744, |
| "step": 985 |
| }, |
| { |
| "epoch": 3.9939240506329114, |
| "grad_norm": 0.25405467908262674, |
| "learning_rate": 4.6063907676823474e-06, |
| "loss": 0.3789, |
| "step": 986 |
| }, |
| { |
| "epoch": 3.997974683544304, |
| "grad_norm": 0.24050654280746672, |
| "learning_rate": 4.570216403561141e-06, |
| "loss": 0.3945, |
| "step": 987 |
| }, |
| { |
| "epoch": 4.01620253164557, |
| "grad_norm": 0.39754793003032945, |
| "learning_rate": 4.534166308795815e-06, |
| "loss": 0.3637, |
| "step": 988 |
| }, |
| { |
| "epoch": 4.020253164556962, |
| "grad_norm": 0.30791009216160803, |
| "learning_rate": 4.498240773728859e-06, |
| "loss": 0.3578, |
| "step": 989 |
| }, |
| { |
| "epoch": 4.024303797468354, |
| "grad_norm": 0.241600489956607, |
| "learning_rate": 4.462440087699609e-06, |
| "loss": 0.337, |
| "step": 990 |
| }, |
| { |
| "epoch": 4.028354430379747, |
| "grad_norm": 0.3302376176628308, |
| "learning_rate": 4.426764539041861e-06, |
| "loss": 0.3297, |
| "step": 991 |
| }, |
| { |
| "epoch": 4.032405063291139, |
| "grad_norm": 0.46570156217601283, |
| "learning_rate": 4.391214415081582e-06, |
| "loss": 0.358, |
| "step": 992 |
| }, |
| { |
| "epoch": 4.036455696202531, |
| "grad_norm": 0.30914464823809723, |
| "learning_rate": 4.355790002134579e-06, |
| "loss": 0.334, |
| "step": 993 |
| }, |
| { |
| "epoch": 4.040506329113924, |
| "grad_norm": 0.48232846467549595, |
| "learning_rate": 4.320491585504207e-06, |
| "loss": 0.3269, |
| "step": 994 |
| }, |
| { |
| "epoch": 4.044556962025316, |
| "grad_norm": 3.1974577897306933, |
| "learning_rate": 4.2853194494790615e-06, |
| "loss": 0.3934, |
| "step": 995 |
| }, |
| { |
| "epoch": 4.048607594936709, |
| "grad_norm": 0.4026370305859368, |
| "learning_rate": 4.250273877330691e-06, |
| "loss": 0.3424, |
| "step": 996 |
| }, |
| { |
| "epoch": 4.052658227848101, |
| "grad_norm": 0.33855731230932273, |
| "learning_rate": 4.215355151311313e-06, |
| "loss": 0.3325, |
| "step": 997 |
| }, |
| { |
| "epoch": 4.056708860759493, |
| "grad_norm": 0.24829114364790583, |
| "learning_rate": 4.180563552651542e-06, |
| "loss": 0.3426, |
| "step": 998 |
| }, |
| { |
| "epoch": 4.060759493670886, |
| "grad_norm": 0.27652038208373275, |
| "learning_rate": 4.145899361558147e-06, |
| "loss": 0.3372, |
| "step": 999 |
| }, |
| { |
| "epoch": 4.0648101265822785, |
| "grad_norm": 0.35269573146769706, |
| "learning_rate": 4.111362857211738e-06, |
| "loss": 0.3455, |
| "step": 1000 |
| }, |
| { |
| "epoch": 4.0688607594936705, |
| "grad_norm": 0.37669136430794065, |
| "learning_rate": 4.076954317764592e-06, |
| "loss": 0.3297, |
| "step": 1001 |
| }, |
| { |
| "epoch": 4.0729113924050635, |
| "grad_norm": 0.2875399690261895, |
| "learning_rate": 4.042674020338335e-06, |
| "loss": 0.3459, |
| "step": 1002 |
| }, |
| { |
| "epoch": 4.076962025316456, |
| "grad_norm": 0.2778702105108225, |
| "learning_rate": 4.0085222410217835e-06, |
| "loss": 0.3373, |
| "step": 1003 |
| }, |
| { |
| "epoch": 4.0810126582278485, |
| "grad_norm": 0.3139773499272283, |
| "learning_rate": 3.974499254868674e-06, |
| "loss": 0.3628, |
| "step": 1004 |
| }, |
| { |
| "epoch": 4.085063291139241, |
| "grad_norm": 0.31864363003955254, |
| "learning_rate": 3.940605335895451e-06, |
| "loss": 0.3145, |
| "step": 1005 |
| }, |
| { |
| "epoch": 4.089113924050633, |
| "grad_norm": 0.27894069785328107, |
| "learning_rate": 3.90684075707908e-06, |
| "loss": 0.3572, |
| "step": 1006 |
| }, |
| { |
| "epoch": 4.093164556962026, |
| "grad_norm": 0.2652941643884745, |
| "learning_rate": 3.8732057903548505e-06, |
| "loss": 0.3406, |
| "step": 1007 |
| }, |
| { |
| "epoch": 4.097215189873418, |
| "grad_norm": 0.2800048969802994, |
| "learning_rate": 3.8397007066141375e-06, |
| "loss": 0.3493, |
| "step": 1008 |
| }, |
| { |
| "epoch": 4.10126582278481, |
| "grad_norm": 0.3045148234498339, |
| "learning_rate": 3.806325775702304e-06, |
| "loss": 0.348, |
| "step": 1009 |
| }, |
| { |
| "epoch": 4.105316455696203, |
| "grad_norm": 0.28169648487500615, |
| "learning_rate": 3.773081266416434e-06, |
| "loss": 0.3345, |
| "step": 1010 |
| }, |
| { |
| "epoch": 4.109367088607595, |
| "grad_norm": 0.25478516118755906, |
| "learning_rate": 3.739967446503245e-06, |
| "loss": 0.3588, |
| "step": 1011 |
| }, |
| { |
| "epoch": 4.113417721518988, |
| "grad_norm": 0.46240889689382436, |
| "learning_rate": 3.706984582656894e-06, |
| "loss": 0.3558, |
| "step": 1012 |
| }, |
| { |
| "epoch": 4.11746835443038, |
| "grad_norm": 0.2580813754152087, |
| "learning_rate": 3.6741329405168237e-06, |
| "loss": 0.3268, |
| "step": 1013 |
| }, |
| { |
| "epoch": 4.121518987341772, |
| "grad_norm": 1.296650697827636, |
| "learning_rate": 3.641412784665648e-06, |
| "loss": 0.349, |
| "step": 1014 |
| }, |
| { |
| "epoch": 4.125569620253165, |
| "grad_norm": 0.26626350249869213, |
| "learning_rate": 3.608824378627005e-06, |
| "loss": 0.3287, |
| "step": 1015 |
| }, |
| { |
| "epoch": 4.129620253164557, |
| "grad_norm": 0.2729789994952113, |
| "learning_rate": 3.5763679848634337e-06, |
| "loss": 0.3472, |
| "step": 1016 |
| }, |
| { |
| "epoch": 4.133670886075949, |
| "grad_norm": 0.25576269733085183, |
| "learning_rate": 3.544043864774269e-06, |
| "loss": 0.3172, |
| "step": 1017 |
| }, |
| { |
| "epoch": 4.137721518987342, |
| "grad_norm": 0.2564021604915243, |
| "learning_rate": 3.5118522786935282e-06, |
| "loss": 0.3459, |
| "step": 1018 |
| }, |
| { |
| "epoch": 4.141772151898734, |
| "grad_norm": 0.2794063421539516, |
| "learning_rate": 3.479793485887819e-06, |
| "loss": 0.3302, |
| "step": 1019 |
| }, |
| { |
| "epoch": 4.145822784810127, |
| "grad_norm": 0.2827943576221101, |
| "learning_rate": 3.4478677445542653e-06, |
| "loss": 0.3423, |
| "step": 1020 |
| }, |
| { |
| "epoch": 4.149873417721519, |
| "grad_norm": 0.2438797016031761, |
| "learning_rate": 3.4160753118183767e-06, |
| "loss": 0.35, |
| "step": 1021 |
| }, |
| { |
| "epoch": 4.153924050632911, |
| "grad_norm": 0.24102842269824729, |
| "learning_rate": 3.3844164437320527e-06, |
| "loss": 0.34, |
| "step": 1022 |
| }, |
| { |
| "epoch": 4.157974683544304, |
| "grad_norm": 1.710252662826693, |
| "learning_rate": 3.3528913952714558e-06, |
| "loss": 0.3685, |
| "step": 1023 |
| }, |
| { |
| "epoch": 4.162025316455696, |
| "grad_norm": 0.24997725542126809, |
| "learning_rate": 3.321500420335e-06, |
| "loss": 0.3357, |
| "step": 1024 |
| }, |
| { |
| "epoch": 4.166075949367088, |
| "grad_norm": 0.23915441999858278, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.3429, |
| "step": 1025 |
| }, |
| { |
| "epoch": 4.170126582278481, |
| "grad_norm": 0.2552663780875061, |
| "learning_rate": 3.2591217012270325e-06, |
| "loss": 0.3208, |
| "step": 1026 |
| }, |
| { |
| "epoch": 4.174177215189873, |
| "grad_norm": 0.24296816557302064, |
| "learning_rate": 3.228134459445149e-06, |
| "loss": 0.3526, |
| "step": 1027 |
| }, |
| { |
| "epoch": 4.178227848101266, |
| "grad_norm": 0.24382884730570148, |
| "learning_rate": 3.1972822959626205e-06, |
| "loss": 0.3609, |
| "step": 1028 |
| }, |
| { |
| "epoch": 4.182278481012658, |
| "grad_norm": 0.24550261881512794, |
| "learning_rate": 3.166565459258513e-06, |
| "loss": 0.3329, |
| "step": 1029 |
| }, |
| { |
| "epoch": 4.18632911392405, |
| "grad_norm": 0.255817324549862, |
| "learning_rate": 3.1359841967220193e-06, |
| "loss": 0.3318, |
| "step": 1030 |
| }, |
| { |
| "epoch": 4.190379746835443, |
| "grad_norm": 0.2553174247624844, |
| "learning_rate": 3.105538754650419e-06, |
| "loss": 0.3269, |
| "step": 1031 |
| }, |
| { |
| "epoch": 4.194430379746835, |
| "grad_norm": 0.24031894514754334, |
| "learning_rate": 3.07522937824712e-06, |
| "loss": 0.3611, |
| "step": 1032 |
| }, |
| { |
| "epoch": 4.1984810126582275, |
| "grad_norm": 0.2331885180937163, |
| "learning_rate": 3.0450563116196697e-06, |
| "loss": 0.3626, |
| "step": 1033 |
| }, |
| { |
| "epoch": 4.2025316455696204, |
| "grad_norm": 0.23163910580405703, |
| "learning_rate": 3.0150197977778008e-06, |
| "loss": 0.3363, |
| "step": 1034 |
| }, |
| { |
| "epoch": 4.2065822784810125, |
| "grad_norm": 0.24409782372443178, |
| "learning_rate": 2.985120078631465e-06, |
| "loss": 0.3258, |
| "step": 1035 |
| }, |
| { |
| "epoch": 4.2106329113924055, |
| "grad_norm": 0.24001473019573935, |
| "learning_rate": 2.9553573949888893e-06, |
| "loss": 0.3544, |
| "step": 1036 |
| }, |
| { |
| "epoch": 4.2146835443037975, |
| "grad_norm": 0.23601430634440101, |
| "learning_rate": 2.9257319865546384e-06, |
| "loss": 0.3537, |
| "step": 1037 |
| }, |
| { |
| "epoch": 4.21873417721519, |
| "grad_norm": 0.23464541155481092, |
| "learning_rate": 2.896244091927678e-06, |
| "loss": 0.3369, |
| "step": 1038 |
| }, |
| { |
| "epoch": 4.222784810126583, |
| "grad_norm": 0.22863639113009174, |
| "learning_rate": 2.8668939485994584e-06, |
| "loss": 0.3624, |
| "step": 1039 |
| }, |
| { |
| "epoch": 4.226835443037975, |
| "grad_norm": 0.2411131901213987, |
| "learning_rate": 2.837681792951994e-06, |
| "loss": 0.3456, |
| "step": 1040 |
| }, |
| { |
| "epoch": 4.230886075949367, |
| "grad_norm": 0.23706052344746498, |
| "learning_rate": 2.808607860255981e-06, |
| "loss": 0.327, |
| "step": 1041 |
| }, |
| { |
| "epoch": 4.23493670886076, |
| "grad_norm": 0.22149988860328992, |
| "learning_rate": 2.7796723846688634e-06, |
| "loss": 0.3689, |
| "step": 1042 |
| }, |
| { |
| "epoch": 4.238987341772152, |
| "grad_norm": 0.2216626239437601, |
| "learning_rate": 2.7508755992329937e-06, |
| "loss": 0.3488, |
| "step": 1043 |
| }, |
| { |
| "epoch": 4.243037974683545, |
| "grad_norm": 0.22415903546430668, |
| "learning_rate": 2.722217735873718e-06, |
| "loss": 0.3585, |
| "step": 1044 |
| }, |
| { |
| "epoch": 4.247088607594937, |
| "grad_norm": 0.23511691769504728, |
| "learning_rate": 2.6936990253975315e-06, |
| "loss": 0.3469, |
| "step": 1045 |
| }, |
| { |
| "epoch": 4.251139240506329, |
| "grad_norm": 0.23489337789004225, |
| "learning_rate": 2.665319697490205e-06, |
| "loss": 0.3494, |
| "step": 1046 |
| }, |
| { |
| "epoch": 4.255189873417722, |
| "grad_norm": 0.21960823370825283, |
| "learning_rate": 2.637079980714945e-06, |
| "loss": 0.3626, |
| "step": 1047 |
| }, |
| { |
| "epoch": 4.259240506329114, |
| "grad_norm": 0.22599911013858912, |
| "learning_rate": 2.6089801025105453e-06, |
| "loss": 0.3245, |
| "step": 1048 |
| }, |
| { |
| "epoch": 4.263291139240506, |
| "grad_norm": 0.23750570209978766, |
| "learning_rate": 2.581020289189571e-06, |
| "loss": 0.338, |
| "step": 1049 |
| }, |
| { |
| "epoch": 4.267341772151899, |
| "grad_norm": 0.22654514710735038, |
| "learning_rate": 2.553200765936501e-06, |
| "loss": 0.34, |
| "step": 1050 |
| }, |
| { |
| "epoch": 4.271392405063291, |
| "grad_norm": 0.23221906436485107, |
| "learning_rate": 2.525521756805962e-06, |
| "loss": 0.3574, |
| "step": 1051 |
| }, |
| { |
| "epoch": 4.275443037974684, |
| "grad_norm": 0.23161940268512196, |
| "learning_rate": 2.497983484720885e-06, |
| "loss": 0.3501, |
| "step": 1052 |
| }, |
| { |
| "epoch": 4.279493670886076, |
| "grad_norm": 0.22812388931679883, |
| "learning_rate": 2.470586171470728e-06, |
| "loss": 0.3473, |
| "step": 1053 |
| }, |
| { |
| "epoch": 4.283544303797468, |
| "grad_norm": 0.22373730422661364, |
| "learning_rate": 2.4433300377096836e-06, |
| "loss": 0.352, |
| "step": 1054 |
| }, |
| { |
| "epoch": 4.287594936708861, |
| "grad_norm": 0.23466894165855348, |
| "learning_rate": 2.4162153029549073e-06, |
| "loss": 0.3336, |
| "step": 1055 |
| }, |
| { |
| "epoch": 4.291645569620253, |
| "grad_norm": 0.23660663366959608, |
| "learning_rate": 2.3892421855847458e-06, |
| "loss": 0.3486, |
| "step": 1056 |
| }, |
| { |
| "epoch": 4.295696202531645, |
| "grad_norm": 0.22449168299642905, |
| "learning_rate": 2.362410902836978e-06, |
| "loss": 0.3326, |
| "step": 1057 |
| }, |
| { |
| "epoch": 4.299746835443038, |
| "grad_norm": 0.22659117064922524, |
| "learning_rate": 2.3357216708070653e-06, |
| "loss": 0.343, |
| "step": 1058 |
| }, |
| { |
| "epoch": 4.30379746835443, |
| "grad_norm": 0.24703242786229143, |
| "learning_rate": 2.309174704446411e-06, |
| "loss": 0.3354, |
| "step": 1059 |
| }, |
| { |
| "epoch": 4.307848101265823, |
| "grad_norm": 0.24145733348444287, |
| "learning_rate": 2.2827702175606437e-06, |
| "loss": 0.348, |
| "step": 1060 |
| }, |
| { |
| "epoch": 4.311898734177215, |
| "grad_norm": 0.2550223313348423, |
| "learning_rate": 2.256508422807855e-06, |
| "loss": 0.3463, |
| "step": 1061 |
| }, |
| { |
| "epoch": 4.315949367088607, |
| "grad_norm": 0.23580628469344608, |
| "learning_rate": 2.230389531696946e-06, |
| "loss": 0.3251, |
| "step": 1062 |
| }, |
| { |
| "epoch": 4.32, |
| "grad_norm": 0.2332168731333629, |
| "learning_rate": 2.204413754585857e-06, |
| "loss": 0.3607, |
| "step": 1063 |
| }, |
| { |
| "epoch": 4.324050632911392, |
| "grad_norm": 0.21646996843302882, |
| "learning_rate": 2.1785813006799406e-06, |
| "loss": 0.3348, |
| "step": 1064 |
| }, |
| { |
| "epoch": 4.328101265822784, |
| "grad_norm": 0.23240161724975694, |
| "learning_rate": 2.1528923780302224e-06, |
| "loss": 0.3536, |
| "step": 1065 |
| }, |
| { |
| "epoch": 4.332151898734177, |
| "grad_norm": 0.23201400502182304, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.3169, |
| "step": 1066 |
| }, |
| { |
| "epoch": 4.3362025316455695, |
| "grad_norm": 0.23331896161484383, |
| "learning_rate": 2.101945952921942e-06, |
| "loss": 0.3448, |
| "step": 1067 |
| }, |
| { |
| "epoch": 4.340253164556962, |
| "grad_norm": 0.2389300858813356, |
| "learning_rate": 2.0766888607788906e-06, |
| "loss": 0.3526, |
| "step": 1068 |
| }, |
| { |
| "epoch": 4.3443037974683545, |
| "grad_norm": 0.23221826307325194, |
| "learning_rate": 2.0515761205197337e-06, |
| "loss": 0.3335, |
| "step": 1069 |
| }, |
| { |
| "epoch": 4.348354430379747, |
| "grad_norm": 0.2320757669652291, |
| "learning_rate": 2.0266079343990453e-06, |
| "loss": 0.3266, |
| "step": 1070 |
| }, |
| { |
| "epoch": 4.3524050632911395, |
| "grad_norm": 0.23502623251481042, |
| "learning_rate": 2.0017845035071494e-06, |
| "loss": 0.3424, |
| "step": 1071 |
| }, |
| { |
| "epoch": 4.356455696202532, |
| "grad_norm": 0.2316784994993235, |
| "learning_rate": 1.9771060277685537e-06, |
| "loss": 0.3583, |
| "step": 1072 |
| }, |
| { |
| "epoch": 4.360506329113924, |
| "grad_norm": 0.22116137243525105, |
| "learning_rate": 1.95257270594031e-06, |
| "loss": 0.3479, |
| "step": 1073 |
| }, |
| { |
| "epoch": 4.364556962025317, |
| "grad_norm": 0.22175738646137969, |
| "learning_rate": 1.9281847356104188e-06, |
| "loss": 0.3506, |
| "step": 1074 |
| }, |
| { |
| "epoch": 4.368607594936709, |
| "grad_norm": 0.2249046541056626, |
| "learning_rate": 1.9039423131962365e-06, |
| "loss": 0.3447, |
| "step": 1075 |
| }, |
| { |
| "epoch": 4.372658227848102, |
| "grad_norm": 0.23105213507226072, |
| "learning_rate": 1.8798456339429027e-06, |
| "loss": 0.3459, |
| "step": 1076 |
| }, |
| { |
| "epoch": 4.376708860759494, |
| "grad_norm": 0.22888803244024647, |
| "learning_rate": 1.8558948919217612e-06, |
| "loss": 0.3433, |
| "step": 1077 |
| }, |
| { |
| "epoch": 4.380759493670886, |
| "grad_norm": 0.2326582523027793, |
| "learning_rate": 1.8320902800287954e-06, |
| "loss": 0.3335, |
| "step": 1078 |
| }, |
| { |
| "epoch": 4.384810126582279, |
| "grad_norm": 0.24984941561849872, |
| "learning_rate": 1.8084319899830726e-06, |
| "loss": 0.3233, |
| "step": 1079 |
| }, |
| { |
| "epoch": 4.388860759493671, |
| "grad_norm": 0.24525501231424193, |
| "learning_rate": 1.7849202123252097e-06, |
| "loss": 0.3342, |
| "step": 1080 |
| }, |
| { |
| "epoch": 4.392911392405063, |
| "grad_norm": 0.22630544860419957, |
| "learning_rate": 1.7615551364158401e-06, |
| "loss": 0.3509, |
| "step": 1081 |
| }, |
| { |
| "epoch": 4.396962025316456, |
| "grad_norm": 0.2390237437484769, |
| "learning_rate": 1.738336950434061e-06, |
| "loss": 0.34, |
| "step": 1082 |
| }, |
| { |
| "epoch": 4.401012658227848, |
| "grad_norm": 0.21890399574265337, |
| "learning_rate": 1.715265841375957e-06, |
| "loss": 0.3631, |
| "step": 1083 |
| }, |
| { |
| "epoch": 4.405063291139241, |
| "grad_norm": 0.24750399873294068, |
| "learning_rate": 1.6923419950530684e-06, |
| "loss": 0.3264, |
| "step": 1084 |
| }, |
| { |
| "epoch": 4.409113924050633, |
| "grad_norm": 0.23808925708017223, |
| "learning_rate": 1.6695655960909008e-06, |
| "loss": 0.3205, |
| "step": 1085 |
| }, |
| { |
| "epoch": 4.413164556962025, |
| "grad_norm": 0.22169290716974274, |
| "learning_rate": 1.646936827927441e-06, |
| "loss": 0.3336, |
| "step": 1086 |
| }, |
| { |
| "epoch": 4.417215189873418, |
| "grad_norm": 0.23108414492478047, |
| "learning_rate": 1.6244558728116766e-06, |
| "loss": 0.3258, |
| "step": 1087 |
| }, |
| { |
| "epoch": 4.42126582278481, |
| "grad_norm": 0.2256100131974818, |
| "learning_rate": 1.6021229118021265e-06, |
| "loss": 0.3364, |
| "step": 1088 |
| }, |
| { |
| "epoch": 4.425316455696202, |
| "grad_norm": 0.23241087385879544, |
| "learning_rate": 1.5799381247653967e-06, |
| "loss": 0.312, |
| "step": 1089 |
| }, |
| { |
| "epoch": 4.429367088607595, |
| "grad_norm": 0.23559164812632197, |
| "learning_rate": 1.5579016903747013e-06, |
| "loss": 0.3337, |
| "step": 1090 |
| }, |
| { |
| "epoch": 4.433417721518987, |
| "grad_norm": 0.23530820181520465, |
| "learning_rate": 1.5360137861084656e-06, |
| "loss": 0.3535, |
| "step": 1091 |
| }, |
| { |
| "epoch": 4.43746835443038, |
| "grad_norm": 0.2526271936386745, |
| "learning_rate": 1.5142745882488475e-06, |
| "loss": 0.3325, |
| "step": 1092 |
| }, |
| { |
| "epoch": 4.441518987341772, |
| "grad_norm": 0.2307349945969056, |
| "learning_rate": 1.4926842718803691e-06, |
| "loss": 0.3353, |
| "step": 1093 |
| }, |
| { |
| "epoch": 4.445569620253164, |
| "grad_norm": 0.22810942865684578, |
| "learning_rate": 1.4712430108884657e-06, |
| "loss": 0.3439, |
| "step": 1094 |
| }, |
| { |
| "epoch": 4.449620253164557, |
| "grad_norm": 0.2510107040095722, |
| "learning_rate": 1.4499509779581078e-06, |
| "loss": 0.3268, |
| "step": 1095 |
| }, |
| { |
| "epoch": 4.453670886075949, |
| "grad_norm": 0.21996616668324864, |
| "learning_rate": 1.4288083445723988e-06, |
| "loss": 0.3588, |
| "step": 1096 |
| }, |
| { |
| "epoch": 4.457721518987341, |
| "grad_norm": 0.22556329560073013, |
| "learning_rate": 1.4078152810112045e-06, |
| "loss": 0.3378, |
| "step": 1097 |
| }, |
| { |
| "epoch": 4.461772151898734, |
| "grad_norm": 0.2455048332568078, |
| "learning_rate": 1.3869719563497697e-06, |
| "loss": 0.3366, |
| "step": 1098 |
| }, |
| { |
| "epoch": 4.465822784810126, |
| "grad_norm": 0.22592128190820662, |
| "learning_rate": 1.3662785384573663e-06, |
| "loss": 0.3467, |
| "step": 1099 |
| }, |
| { |
| "epoch": 4.469873417721519, |
| "grad_norm": 0.2202728336494389, |
| "learning_rate": 1.3457351939959383e-06, |
| "loss": 0.3571, |
| "step": 1100 |
| }, |
| { |
| "epoch": 4.473924050632911, |
| "grad_norm": 0.2238737479006771, |
| "learning_rate": 1.3253420884187551e-06, |
| "loss": 0.3475, |
| "step": 1101 |
| }, |
| { |
| "epoch": 4.4779746835443035, |
| "grad_norm": 0.24471888485634277, |
| "learning_rate": 1.3050993859690953e-06, |
| "loss": 0.3319, |
| "step": 1102 |
| }, |
| { |
| "epoch": 4.4820253164556965, |
| "grad_norm": 0.23505704064039315, |
| "learning_rate": 1.2850072496788869e-06, |
| "loss": 0.3258, |
| "step": 1103 |
| }, |
| { |
| "epoch": 4.4860759493670885, |
| "grad_norm": 0.23235206889423574, |
| "learning_rate": 1.2650658413674434e-06, |
| "loss": 0.3225, |
| "step": 1104 |
| }, |
| { |
| "epoch": 4.490126582278481, |
| "grad_norm": 0.2296456195129021, |
| "learning_rate": 1.2452753216401226e-06, |
| "loss": 0.3537, |
| "step": 1105 |
| }, |
| { |
| "epoch": 4.494177215189874, |
| "grad_norm": 0.21826572081415896, |
| "learning_rate": 1.2256358498870503e-06, |
| "loss": 0.3703, |
| "step": 1106 |
| }, |
| { |
| "epoch": 4.498227848101266, |
| "grad_norm": 0.23329055713341754, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.3543, |
| "step": 1107 |
| }, |
| { |
| "epoch": 4.502278481012659, |
| "grad_norm": 0.22996447173493714, |
| "learning_rate": 1.1868106817802816e-06, |
| "loss": 0.3542, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.506329113924051, |
| "grad_norm": 0.2282616197114923, |
| "learning_rate": 1.1676252981191482e-06, |
| "loss": 0.3416, |
| "step": 1109 |
| }, |
| { |
| "epoch": 4.510379746835443, |
| "grad_norm": 0.22467778401244823, |
| "learning_rate": 1.1485915878148823e-06, |
| "loss": 0.3502, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.514430379746836, |
| "grad_norm": 0.2285990698037089, |
| "learning_rate": 1.1297097041623584e-06, |
| "loss": 0.3622, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.518481012658228, |
| "grad_norm": 0.217567851646941, |
| "learning_rate": 1.1109797992336847e-06, |
| "loss": 0.3489, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.52253164556962, |
| "grad_norm": 0.226977583443272, |
| "learning_rate": 1.092402023876933e-06, |
| "loss": 0.3425, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.526582278481013, |
| "grad_norm": 0.2128419558146082, |
| "learning_rate": 1.0739765277149527e-06, |
| "loss": 0.3643, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.530632911392405, |
| "grad_norm": 0.21396849582044447, |
| "learning_rate": 1.0557034591441596e-06, |
| "loss": 0.3613, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.534683544303798, |
| "grad_norm": 0.2227696651900208, |
| "learning_rate": 1.0375829653333324e-06, |
| "loss": 0.348, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.53873417721519, |
| "grad_norm": 0.24458506981164718, |
| "learning_rate": 1.0196151922224385e-06, |
| "loss": 0.3299, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.542784810126582, |
| "grad_norm": 0.21654491625589806, |
| "learning_rate": 1.0018002845214526e-06, |
| "loss": 0.3379, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.546835443037975, |
| "grad_norm": 0.21619432668241706, |
| "learning_rate": 9.841383857091947e-07, |
| "loss": 0.3757, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.550886075949367, |
| "grad_norm": 0.2135527968497539, |
| "learning_rate": 9.666296380321616e-07, |
| "loss": 0.3506, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.55493670886076, |
| "grad_norm": 0.22015730182747786, |
| "learning_rate": 9.492741825034124e-07, |
| "loss": 0.3542, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.558987341772152, |
| "grad_norm": 0.227885101210962, |
| "learning_rate": 9.320721589013892e-07, |
| "loss": 0.3303, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.563037974683544, |
| "grad_norm": 0.21564588371082707, |
| "learning_rate": 9.150237057688339e-07, |
| "loss": 0.3433, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.567088607594937, |
| "grad_norm": 0.2192072899838672, |
| "learning_rate": 8.981289604116328e-07, |
| "loss": 0.3517, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.571139240506329, |
| "grad_norm": 0.2230460288522539, |
| "learning_rate": 8.813880588977542e-07, |
| "loss": 0.3327, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.575189873417721, |
| "grad_norm": 0.21630898809670182, |
| "learning_rate": 8.648011360561126e-07, |
| "loss": 0.3493, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.579240506329114, |
| "grad_norm": 0.2219110579507975, |
| "learning_rate": 8.483683254755037e-07, |
| "loss": 0.345, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.583291139240506, |
| "grad_norm": 0.22829036725542842, |
| "learning_rate": 8.320897595035227e-07, |
| "loss": 0.3497, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.587341772151898, |
| "grad_norm": 0.23772071968463482, |
| "learning_rate": 8.159655692455093e-07, |
| "loss": 0.3102, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.591392405063291, |
| "grad_norm": 0.22127801439917633, |
| "learning_rate": 7.999958845634648e-07, |
| "loss": 0.3359, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.595443037974683, |
| "grad_norm": 0.2265667455218001, |
| "learning_rate": 7.841808340750478e-07, |
| "loss": 0.3491, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.599493670886076, |
| "grad_norm": 0.2306606884201352, |
| "learning_rate": 7.685205451524869e-07, |
| "loss": 0.3334, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.603544303797468, |
| "grad_norm": 0.22750629566324304, |
| "learning_rate": 7.530151439216027e-07, |
| "loss": 0.3112, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.6075949367088604, |
| "grad_norm": 0.2214549945115179, |
| "learning_rate": 7.376647552607675e-07, |
| "loss": 0.3399, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.611645569620253, |
| "grad_norm": 0.21925704364004547, |
| "learning_rate": 7.224695027998963e-07, |
| "loss": 0.3669, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.6156962025316455, |
| "grad_norm": 0.2166420859468684, |
| "learning_rate": 7.07429508919466e-07, |
| "loss": 0.3196, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.619746835443038, |
| "grad_norm": 0.23245586218930092, |
| "learning_rate": 6.925448947495206e-07, |
| "loss": 0.3283, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.6237974683544305, |
| "grad_norm": 0.23451696755636153, |
| "learning_rate": 6.778157801686936e-07, |
| "loss": 0.3253, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.627848101265823, |
| "grad_norm": 0.23566115388224512, |
| "learning_rate": 6.632422838032515e-07, |
| "loss": 0.3425, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.6318987341772155, |
| "grad_norm": 0.22161728403867287, |
| "learning_rate": 6.488245230261281e-07, |
| "loss": 0.342, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.635949367088608, |
| "grad_norm": 0.2165163919864329, |
| "learning_rate": 6.345626139559868e-07, |
| "loss": 0.3446, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.64, |
| "grad_norm": 0.22391429586985143, |
| "learning_rate": 6.204566714562866e-07, |
| "loss": 0.3382, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.644050632911393, |
| "grad_norm": 0.219589121963335, |
| "learning_rate": 6.06506809134344e-07, |
| "loss": 0.3396, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.648101265822785, |
| "grad_norm": 0.2195028219301542, |
| "learning_rate": 5.927131393404373e-07, |
| "loss": 0.3337, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.652151898734177, |
| "grad_norm": 0.22092242896764594, |
| "learning_rate": 5.790757731668817e-07, |
| "loss": 0.341, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.65620253164557, |
| "grad_norm": 0.21992573532297494, |
| "learning_rate": 5.655948204471507e-07, |
| "loss": 0.3423, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.660253164556962, |
| "grad_norm": 0.22778975370582277, |
| "learning_rate": 5.522703897549875e-07, |
| "loss": 0.3364, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.664303797468355, |
| "grad_norm": 0.22860409235647375, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.3444, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.668354430379747, |
| "grad_norm": 0.2156598829092732, |
| "learning_rate": 5.260915224444207e-07, |
| "loss": 0.3499, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.672405063291139, |
| "grad_norm": 0.22185043424999085, |
| "learning_rate": 5.132372966670129e-07, |
| "loss": 0.3302, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.676455696202532, |
| "grad_norm": 0.22324288492150526, |
| "learning_rate": 5.005400145974704e-07, |
| "loss": 0.3415, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.680506329113924, |
| "grad_norm": 0.22693325132925538, |
| "learning_rate": 4.879997784979562e-07, |
| "loss": 0.3272, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.684556962025317, |
| "grad_norm": 0.23649599941732452, |
| "learning_rate": 4.7561668936580984e-07, |
| "loss": 0.3394, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.688607594936709, |
| "grad_norm": 0.22203717988469662, |
| "learning_rate": 4.6339084693272306e-07, |
| "loss": 0.3473, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.692658227848101, |
| "grad_norm": 0.2076132691587771, |
| "learning_rate": 4.5132234966395847e-07, |
| "loss": 0.3708, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.696708860759494, |
| "grad_norm": 0.22247585137444284, |
| "learning_rate": 4.3941129475752795e-07, |
| "loss": 0.3378, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.700759493670886, |
| "grad_norm": 0.22603187382880338, |
| "learning_rate": 4.27657778143431e-07, |
| "loss": 0.3435, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.704810126582278, |
| "grad_norm": 0.22154674459770948, |
| "learning_rate": 4.1606189448287757e-07, |
| "loss": 0.3516, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.708860759493671, |
| "grad_norm": 0.22402884283506913, |
| "learning_rate": 4.046237371675177e-07, |
| "loss": 0.3365, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.712911392405063, |
| "grad_norm": 0.21492541325290254, |
| "learning_rate": 3.9334339831869963e-07, |
| "loss": 0.3568, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.716962025316455, |
| "grad_norm": 0.22883082000254862, |
| "learning_rate": 3.8222096878671955e-07, |
| "loss": 0.348, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.721012658227848, |
| "grad_norm": 0.21866230980481213, |
| "learning_rate": 3.7125653815009545e-07, |
| "loss": 0.3427, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.72506329113924, |
| "grad_norm": 0.2260558562109304, |
| "learning_rate": 3.6045019471484974e-07, |
| "loss": 0.3478, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.729113924050633, |
| "grad_norm": 0.22749430593483452, |
| "learning_rate": 3.498020255137813e-07, |
| "loss": 0.3119, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.733164556962025, |
| "grad_norm": 0.22206792926863306, |
| "learning_rate": 3.393121163057811e-07, |
| "loss": 0.3575, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.737215189873417, |
| "grad_norm": 0.23028395940593227, |
| "learning_rate": 3.289805515751399e-07, |
| "loss": 0.3331, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.74126582278481, |
| "grad_norm": 0.21943558656543075, |
| "learning_rate": 3.188074145308573e-07, |
| "loss": 0.3304, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.745316455696202, |
| "grad_norm": 0.2185411318845525, |
| "learning_rate": 3.087927871059804e-07, |
| "loss": 0.3421, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.749367088607595, |
| "grad_norm": 0.22859750395302583, |
| "learning_rate": 2.989367499569418e-07, |
| "loss": 0.3322, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.7534177215189874, |
| "grad_norm": 0.2800363271588219, |
| "learning_rate": 2.8923938246290917e-07, |
| "loss": 0.3673, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.7574683544303795, |
| "grad_norm": 0.21768296172355678, |
| "learning_rate": 2.7970076272514804e-07, |
| "loss": 0.3421, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.7615189873417725, |
| "grad_norm": 0.21466494019883914, |
| "learning_rate": 2.703209675663887e-07, |
| "loss": 0.3382, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.7655696202531646, |
| "grad_norm": 0.5287516162065553, |
| "learning_rate": 2.6110007253021374e-07, |
| "loss": 0.3563, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.769620253164557, |
| "grad_norm": 0.2253252841069492, |
| "learning_rate": 2.520381518804471e-07, |
| "loss": 0.3252, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.77367088607595, |
| "grad_norm": 0.22603215216217395, |
| "learning_rate": 2.4313527860054585e-07, |
| "loss": 0.3545, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.777721518987342, |
| "grad_norm": 0.21494642839240427, |
| "learning_rate": 2.343915243930317e-07, |
| "loss": 0.3687, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.781772151898734, |
| "grad_norm": 0.2197996273110546, |
| "learning_rate": 2.2580695967889367e-07, |
| "loss": 0.3413, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.785822784810127, |
| "grad_norm": 0.21748344274444936, |
| "learning_rate": 2.1738165359704189e-07, |
| "loss": 0.323, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.789873417721519, |
| "grad_norm": 0.27503646907847923, |
| "learning_rate": 2.0911567400373257e-07, |
| "loss": 0.3506, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.793924050632912, |
| "grad_norm": 0.22450138901393915, |
| "learning_rate": 2.0100908747202607e-07, |
| "loss": 0.3192, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.797974683544304, |
| "grad_norm": 0.22006645563236157, |
| "learning_rate": 1.9306195929125638e-07, |
| "loss": 0.3377, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.802025316455696, |
| "grad_norm": 0.22410265646821437, |
| "learning_rate": 1.8527435346650247e-07, |
| "loss": 0.3294, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.806075949367089, |
| "grad_norm": 0.28120372912324837, |
| "learning_rate": 1.7764633271807108e-07, |
| "loss": 0.3617, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.810126582278481, |
| "grad_norm": 0.23472804322871157, |
| "learning_rate": 1.7017795848099262e-07, |
| "loss": 0.3173, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.814177215189874, |
| "grad_norm": 0.21592033250967224, |
| "learning_rate": 1.6286929090452596e-07, |
| "loss": 0.3605, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.818227848101266, |
| "grad_norm": 0.21671223634571288, |
| "learning_rate": 1.557203888516745e-07, |
| "loss": 0.3521, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.822278481012658, |
| "grad_norm": 0.21846928410257557, |
| "learning_rate": 1.487313098987131e-07, |
| "loss": 0.327, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.826329113924051, |
| "grad_norm": 0.22683156521378925, |
| "learning_rate": 1.4190211033472402e-07, |
| "loss": 0.3374, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.830379746835443, |
| "grad_norm": 0.2259471659691339, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.3438, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.834430379746835, |
| "grad_norm": 0.20872782887825414, |
| "learning_rate": 1.2872356809130682e-07, |
| "loss": 0.3487, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.838481012658228, |
| "grad_norm": 0.23195076947259177, |
| "learning_rate": 1.2237433155004807e-07, |
| "loss": 0.3132, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.84253164556962, |
| "grad_norm": 0.21240908688952878, |
| "learning_rate": 1.1618518667323886e-07, |
| "loss": 0.3556, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.846582278481012, |
| "grad_norm": 0.210591124683683, |
| "learning_rate": 1.1015618330740385e-07, |
| "loss": 0.367, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.850632911392405, |
| "grad_norm": 0.21524345788459137, |
| "learning_rate": 1.042873700093061e-07, |
| "loss": 0.3421, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.854683544303797, |
| "grad_norm": 0.22795877037205065, |
| "learning_rate": 9.857879404556291e-08, |
| "loss": 0.3347, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.85873417721519, |
| "grad_norm": 0.23608514846358708, |
| "learning_rate": 9.303050139225722e-08, |
| "loss": 0.3237, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.862784810126582, |
| "grad_norm": 0.23761809459276664, |
| "learning_rate": 8.76425367345779e-08, |
| "loss": 0.3346, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.866835443037974, |
| "grad_norm": 0.21782005205768631, |
| "learning_rate": 8.241494346644897e-08, |
| "loss": 0.3422, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.870886075949367, |
| "grad_norm": 0.23660565436367104, |
| "learning_rate": 7.734776369019204e-08, |
| "loss": 0.3232, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.874936708860759, |
| "grad_norm": 0.24909080528855831, |
| "learning_rate": 7.244103821617332e-08, |
| "loss": 0.3335, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.878987341772152, |
| "grad_norm": 0.2305707284654281, |
| "learning_rate": 6.769480656248606e-08, |
| "loss": 0.3304, |
| "step": 1201 |
| }, |
| { |
| "epoch": 4.883037974683544, |
| "grad_norm": 0.23240002699255347, |
| "learning_rate": 6.310910695462635e-08, |
| "loss": 0.3556, |
| "step": 1202 |
| }, |
| { |
| "epoch": 4.8870886075949365, |
| "grad_norm": 0.22269063144876258, |
| "learning_rate": 5.8683976325191185e-08, |
| "loss": 0.3204, |
| "step": 1203 |
| }, |
| { |
| "epoch": 4.891139240506329, |
| "grad_norm": 0.23106475181964103, |
| "learning_rate": 5.4419450313571984e-08, |
| "loss": 0.3138, |
| "step": 1204 |
| }, |
| { |
| "epoch": 4.8951898734177215, |
| "grad_norm": 0.22953782433699457, |
| "learning_rate": 5.031556326567488e-08, |
| "loss": 0.3362, |
| "step": 1205 |
| }, |
| { |
| "epoch": 4.899240506329114, |
| "grad_norm": 0.20062084668136676, |
| "learning_rate": 4.637234823364312e-08, |
| "loss": 0.3646, |
| "step": 1206 |
| }, |
| { |
| "epoch": 4.9032911392405065, |
| "grad_norm": 0.22419247310858656, |
| "learning_rate": 4.258983697558838e-08, |
| "loss": 0.3502, |
| "step": 1207 |
| }, |
| { |
| "epoch": 4.907341772151899, |
| "grad_norm": 0.22144127163243738, |
| "learning_rate": 3.896805995533548e-08, |
| "loss": 0.3382, |
| "step": 1208 |
| }, |
| { |
| "epoch": 4.911392405063291, |
| "grad_norm": 0.22320950471639764, |
| "learning_rate": 3.550704634218028e-08, |
| "loss": 0.3312, |
| "step": 1209 |
| }, |
| { |
| "epoch": 4.915443037974684, |
| "grad_norm": 0.23750791528744938, |
| "learning_rate": 3.2206824010647676e-08, |
| "loss": 0.3236, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.919493670886076, |
| "grad_norm": 0.2257469901941087, |
| "learning_rate": 2.9067419540278476e-08, |
| "loss": 0.3332, |
| "step": 1211 |
| }, |
| { |
| "epoch": 4.923544303797469, |
| "grad_norm": 0.22078250467287752, |
| "learning_rate": 2.6088858215400638e-08, |
| "loss": 0.3302, |
| "step": 1212 |
| }, |
| { |
| "epoch": 4.927594936708861, |
| "grad_norm": 0.23308112037608622, |
| "learning_rate": 2.3271164024940564e-08, |
| "loss": 0.3514, |
| "step": 1213 |
| }, |
| { |
| "epoch": 4.931645569620253, |
| "grad_norm": 0.22151560304937246, |
| "learning_rate": 2.061435966221881e-08, |
| "loss": 0.3336, |
| "step": 1214 |
| }, |
| { |
| "epoch": 4.935696202531646, |
| "grad_norm": 0.21698014774846952, |
| "learning_rate": 1.811846652477245e-08, |
| "loss": 0.3278, |
| "step": 1215 |
| }, |
| { |
| "epoch": 4.939746835443038, |
| "grad_norm": 0.2243939352032785, |
| "learning_rate": 1.5783504714184106e-08, |
| "loss": 0.3432, |
| "step": 1216 |
| }, |
| { |
| "epoch": 4.943797468354431, |
| "grad_norm": 0.22736524443989478, |
| "learning_rate": 1.360949303591097e-08, |
| "loss": 0.3392, |
| "step": 1217 |
| }, |
| { |
| "epoch": 4.947848101265823, |
| "grad_norm": 0.22430049392042806, |
| "learning_rate": 1.1596448999144916e-08, |
| "loss": 0.3374, |
| "step": 1218 |
| }, |
| { |
| "epoch": 4.951898734177215, |
| "grad_norm": 0.22869423703984793, |
| "learning_rate": 9.744388816668172e-09, |
| "loss": 0.3304, |
| "step": 1219 |
| }, |
| { |
| "epoch": 4.955949367088608, |
| "grad_norm": 0.22672386048570264, |
| "learning_rate": 8.05332740472009e-09, |
| "loss": 0.3491, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.96, |
| "grad_norm": 0.2145857620709742, |
| "learning_rate": 6.523278382872811e-09, |
| "loss": 0.3575, |
| "step": 1221 |
| }, |
| { |
| "epoch": 4.964050632911392, |
| "grad_norm": 0.23208322109322455, |
| "learning_rate": 5.15425407393133e-09, |
| "loss": 0.3472, |
| "step": 1222 |
| }, |
| { |
| "epoch": 4.968101265822785, |
| "grad_norm": 0.22575309951801417, |
| "learning_rate": 3.94626550383137e-09, |
| "loss": 0.3339, |
| "step": 1223 |
| }, |
| { |
| "epoch": 4.972151898734177, |
| "grad_norm": 0.21619559930612434, |
| "learning_rate": 2.899322401546112e-09, |
| "loss": 0.3468, |
| "step": 1224 |
| }, |
| { |
| "epoch": 4.976202531645569, |
| "grad_norm": 0.22197887040289424, |
| "learning_rate": 2.013433199010706e-09, |
| "loss": 0.3372, |
| "step": 1225 |
| }, |
| { |
| "epoch": 4.980253164556962, |
| "grad_norm": 0.2281099393347131, |
| "learning_rate": 1.2886050310556563e-09, |
| "loss": 0.3493, |
| "step": 1226 |
| }, |
| { |
| "epoch": 4.984303797468354, |
| "grad_norm": 0.22884895564545982, |
| "learning_rate": 7.248437353468695e-10, |
| "loss": 0.3392, |
| "step": 1227 |
| }, |
| { |
| "epoch": 4.988354430379747, |
| "grad_norm": 0.22703987340643778, |
| "learning_rate": 3.221538523412449e-10, |
| "loss": 0.3257, |
| "step": 1228 |
| }, |
| { |
| "epoch": 4.992405063291139, |
| "grad_norm": 0.2235451665486011, |
| "learning_rate": 8.053862524670663e-11, |
| "loss": 0.3345, |
| "step": 1229 |
| }, |
| { |
| "epoch": 4.996455696202531, |
| "grad_norm": 0.21665439406294906, |
| "learning_rate": 0.0, |
| "loss": 0.3405, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.996455696202531, |
| "step": 1230, |
| "total_flos": 4.615292924698526e+18, |
| "train_loss": 0.06754724102291634, |
| "train_runtime": 10670.9883, |
| "train_samples_per_second": 14.807, |
| "train_steps_per_second": 0.115 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1230, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.615292924698526e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|