| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9964614295824488, |
| "eval_steps": 500, |
| "global_step": 2118, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0014154281670205238, |
| "grad_norm": 56.700288366666925, |
| "learning_rate": 0.0, |
| "loss": 11.0711, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0028308563340410475, |
| "grad_norm": 57.44157352268369, |
| "learning_rate": 2.3584905660377358e-07, |
| "loss": 10.9796, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.004246284501061571, |
| "grad_norm": 53.372480303586045, |
| "learning_rate": 4.7169811320754717e-07, |
| "loss": 11.055, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.005661712668082095, |
| "grad_norm": 54.58404851338792, |
| "learning_rate": 7.075471698113208e-07, |
| "loss": 11.1072, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.007077140835102618, |
| "grad_norm": 56.233021204938744, |
| "learning_rate": 9.433962264150943e-07, |
| "loss": 11.1468, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.008492569002123142, |
| "grad_norm": 58.256970362810634, |
| "learning_rate": 1.179245283018868e-06, |
| "loss": 11.0462, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.009907997169143666, |
| "grad_norm": 56.64095298259024, |
| "learning_rate": 1.4150943396226415e-06, |
| "loss": 10.9947, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01132342533616419, |
| "grad_norm": 58.72592183949963, |
| "learning_rate": 1.650943396226415e-06, |
| "loss": 10.7203, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.012738853503184714, |
| "grad_norm": 63.37966188745395, |
| "learning_rate": 1.8867924528301887e-06, |
| "loss": 10.6896, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.014154281670205236, |
| "grad_norm": 86.37474442272796, |
| "learning_rate": 2.1226415094339624e-06, |
| "loss": 9.6251, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01556970983722576, |
| "grad_norm": 90.0472325162758, |
| "learning_rate": 2.358490566037736e-06, |
| "loss": 9.4276, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.016985138004246284, |
| "grad_norm": 102.22797914866898, |
| "learning_rate": 2.5943396226415095e-06, |
| "loss": 8.8853, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.01840056617126681, |
| "grad_norm": 86.241399089093, |
| "learning_rate": 2.830188679245283e-06, |
| "loss": 4.4159, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.019815994338287332, |
| "grad_norm": 59.25457272201123, |
| "learning_rate": 3.0660377358490567e-06, |
| "loss": 3.3741, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.021231422505307854, |
| "grad_norm": 55.03231585614142, |
| "learning_rate": 3.30188679245283e-06, |
| "loss": 3.1289, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02264685067232838, |
| "grad_norm": 35.542100873737745, |
| "learning_rate": 3.5377358490566038e-06, |
| "loss": 2.4912, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.024062278839348902, |
| "grad_norm": 29.48191176082474, |
| "learning_rate": 3.7735849056603773e-06, |
| "loss": 2.2826, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.025477707006369428, |
| "grad_norm": 7.025724837128468, |
| "learning_rate": 4.009433962264151e-06, |
| "loss": 1.4331, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.02689313517338995, |
| "grad_norm": 5.084153252624878, |
| "learning_rate": 4.245283018867925e-06, |
| "loss": 1.3344, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.028308563340410473, |
| "grad_norm": 4.10463053850957, |
| "learning_rate": 4.481132075471698e-06, |
| "loss": 1.2341, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.029723991507430998, |
| "grad_norm": 3.239333220848038, |
| "learning_rate": 4.716981132075472e-06, |
| "loss": 1.2445, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03113941967445152, |
| "grad_norm": 2.4598734753960074, |
| "learning_rate": 4.952830188679246e-06, |
| "loss": 1.1433, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03255484784147204, |
| "grad_norm": 2.0603206837135257, |
| "learning_rate": 5.188679245283019e-06, |
| "loss": 1.106, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.03397027600849257, |
| "grad_norm": 1.5119226227330653, |
| "learning_rate": 5.424528301886793e-06, |
| "loss": 1.0189, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.035385704175513094, |
| "grad_norm": 38.97325572744693, |
| "learning_rate": 5.660377358490566e-06, |
| "loss": 0.952, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.03680113234253362, |
| "grad_norm": 35.594655464652305, |
| "learning_rate": 5.89622641509434e-06, |
| "loss": 0.9317, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.03821656050955414, |
| "grad_norm": 2.243266260915374, |
| "learning_rate": 6.132075471698113e-06, |
| "loss": 0.8992, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.039631988676574664, |
| "grad_norm": 1.2834980121102546, |
| "learning_rate": 6.367924528301887e-06, |
| "loss": 0.8868, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04104741684359519, |
| "grad_norm": 1.0074041465604129, |
| "learning_rate": 6.60377358490566e-06, |
| "loss": 0.8448, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04246284501061571, |
| "grad_norm": 0.9346105193952094, |
| "learning_rate": 6.839622641509434e-06, |
| "loss": 0.8312, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.043878273177636234, |
| "grad_norm": 0.8661479332986395, |
| "learning_rate": 7.0754716981132075e-06, |
| "loss": 0.794, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.04529370134465676, |
| "grad_norm": 0.8137863776538741, |
| "learning_rate": 7.3113207547169815e-06, |
| "loss": 0.7864, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.04670912951167728, |
| "grad_norm": 0.7938781033721152, |
| "learning_rate": 7.547169811320755e-06, |
| "loss": 0.7867, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.048124557678697805, |
| "grad_norm": 1.0106701346359332, |
| "learning_rate": 7.783018867924528e-06, |
| "loss": 0.7753, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.04953998584571833, |
| "grad_norm": 0.6556714750236174, |
| "learning_rate": 8.018867924528302e-06, |
| "loss": 0.7516, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.050955414012738856, |
| "grad_norm": 0.6214486522550411, |
| "learning_rate": 8.254716981132076e-06, |
| "loss": 0.7006, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.052370842179759375, |
| "grad_norm": 0.7730669399452611, |
| "learning_rate": 8.49056603773585e-06, |
| "loss": 0.7446, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.0537862703467799, |
| "grad_norm": 0.7018102584659955, |
| "learning_rate": 8.726415094339622e-06, |
| "loss": 0.711, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.055201698513800426, |
| "grad_norm": 0.5894767958912929, |
| "learning_rate": 8.962264150943396e-06, |
| "loss": 0.7113, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.056617126680820945, |
| "grad_norm": 0.5133118675149501, |
| "learning_rate": 9.19811320754717e-06, |
| "loss": 0.6953, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.05803255484784147, |
| "grad_norm": 0.6025515680004425, |
| "learning_rate": 9.433962264150944e-06, |
| "loss": 0.6876, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.059447983014861996, |
| "grad_norm": 0.5831736200181878, |
| "learning_rate": 9.669811320754718e-06, |
| "loss": 0.7135, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06086341118188252, |
| "grad_norm": 0.5035341748650088, |
| "learning_rate": 9.905660377358492e-06, |
| "loss": 0.6603, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.06227883934890304, |
| "grad_norm": 0.5102193306030185, |
| "learning_rate": 1.0141509433962266e-05, |
| "loss": 0.6497, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.06369426751592357, |
| "grad_norm": 0.4668554672731941, |
| "learning_rate": 1.0377358490566038e-05, |
| "loss": 0.6563, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.06510969568294409, |
| "grad_norm": 0.39544745906790735, |
| "learning_rate": 1.0613207547169812e-05, |
| "loss": 0.6421, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.06652512384996462, |
| "grad_norm": 0.4641271827730375, |
| "learning_rate": 1.0849056603773586e-05, |
| "loss": 0.6502, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.06794055201698514, |
| "grad_norm": 0.5259678866370957, |
| "learning_rate": 1.108490566037736e-05, |
| "loss": 0.6534, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.06935598018400566, |
| "grad_norm": 0.4245716432566208, |
| "learning_rate": 1.1320754716981132e-05, |
| "loss": 0.6342, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.07077140835102619, |
| "grad_norm": 0.3716803827062742, |
| "learning_rate": 1.1556603773584906e-05, |
| "loss": 0.581, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07218683651804671, |
| "grad_norm": 0.401007074045064, |
| "learning_rate": 1.179245283018868e-05, |
| "loss": 0.592, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.07360226468506724, |
| "grad_norm": 0.36955059055091255, |
| "learning_rate": 1.2028301886792454e-05, |
| "loss": 0.5982, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.07501769285208776, |
| "grad_norm": 0.37553426047306404, |
| "learning_rate": 1.2264150943396227e-05, |
| "loss": 0.6227, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.07643312101910828, |
| "grad_norm": 0.38357989584669916, |
| "learning_rate": 1.25e-05, |
| "loss": 0.6383, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.07784854918612881, |
| "grad_norm": 0.30675169193620705, |
| "learning_rate": 1.2735849056603775e-05, |
| "loss": 0.627, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.07926397735314933, |
| "grad_norm": 0.29348505645605366, |
| "learning_rate": 1.2971698113207547e-05, |
| "loss": 0.6151, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.08067940552016985, |
| "grad_norm": 0.35004341869750544, |
| "learning_rate": 1.320754716981132e-05, |
| "loss": 0.5992, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.08209483368719038, |
| "grad_norm": 0.3279980602714091, |
| "learning_rate": 1.3443396226415095e-05, |
| "loss": 0.6399, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.0835102618542109, |
| "grad_norm": 0.2777121636040088, |
| "learning_rate": 1.3679245283018869e-05, |
| "loss": 0.5908, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.08492569002123142, |
| "grad_norm": 0.2646786874035676, |
| "learning_rate": 1.3915094339622641e-05, |
| "loss": 0.5783, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.08634111818825195, |
| "grad_norm": 0.3153521431686913, |
| "learning_rate": 1.4150943396226415e-05, |
| "loss": 0.599, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.08775654635527247, |
| "grad_norm": 0.28886428433507566, |
| "learning_rate": 1.4386792452830189e-05, |
| "loss": 0.6061, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.08917197452229299, |
| "grad_norm": 0.29782611977418977, |
| "learning_rate": 1.4622641509433963e-05, |
| "loss": 0.6184, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.09058740268931352, |
| "grad_norm": 0.25742903864507843, |
| "learning_rate": 1.4858490566037735e-05, |
| "loss": 0.5752, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.09200283085633404, |
| "grad_norm": 0.25729978441166224, |
| "learning_rate": 1.509433962264151e-05, |
| "loss": 0.5831, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.09341825902335456, |
| "grad_norm": 0.2730860454565041, |
| "learning_rate": 1.5330188679245283e-05, |
| "loss": 0.5699, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.09483368719037509, |
| "grad_norm": 0.2486573103575608, |
| "learning_rate": 1.5566037735849056e-05, |
| "loss": 0.6091, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.09624911535739561, |
| "grad_norm": 0.2652550247675538, |
| "learning_rate": 1.580188679245283e-05, |
| "loss": 0.5901, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.09766454352441614, |
| "grad_norm": 0.276533754994318, |
| "learning_rate": 1.6037735849056604e-05, |
| "loss": 0.5834, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.09907997169143666, |
| "grad_norm": 0.23811332725591694, |
| "learning_rate": 1.6273584905660376e-05, |
| "loss": 0.5691, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.10049539985845718, |
| "grad_norm": 0.2535636624906435, |
| "learning_rate": 1.650943396226415e-05, |
| "loss": 0.5669, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.10191082802547771, |
| "grad_norm": 0.2844750862752134, |
| "learning_rate": 1.6745283018867924e-05, |
| "loss": 0.5832, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.10332625619249823, |
| "grad_norm": 0.27334058079679957, |
| "learning_rate": 1.69811320754717e-05, |
| "loss": 0.5792, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.10474168435951875, |
| "grad_norm": 0.2760364074849346, |
| "learning_rate": 1.7216981132075472e-05, |
| "loss": 0.5859, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.10615711252653928, |
| "grad_norm": 0.24662318119066878, |
| "learning_rate": 1.7452830188679244e-05, |
| "loss": 0.5665, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.1075725406935598, |
| "grad_norm": 0.25506885251253175, |
| "learning_rate": 1.768867924528302e-05, |
| "loss": 0.5584, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.10898796886058032, |
| "grad_norm": 0.26773057280476475, |
| "learning_rate": 1.7924528301886792e-05, |
| "loss": 0.5547, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.11040339702760085, |
| "grad_norm": 0.26103279361024406, |
| "learning_rate": 1.8160377358490564e-05, |
| "loss": 0.5539, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.11181882519462137, |
| "grad_norm": 0.2535566104327732, |
| "learning_rate": 1.839622641509434e-05, |
| "loss": 0.5393, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.11323425336164189, |
| "grad_norm": 0.25737562711758843, |
| "learning_rate": 1.8632075471698112e-05, |
| "loss": 0.5519, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.11464968152866242, |
| "grad_norm": 0.2516666029791499, |
| "learning_rate": 1.8867924528301888e-05, |
| "loss": 0.5586, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.11606510969568294, |
| "grad_norm": 0.27538960051050937, |
| "learning_rate": 1.9103773584905664e-05, |
| "loss": 0.537, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.11748053786270347, |
| "grad_norm": 0.25155459291746524, |
| "learning_rate": 1.9339622641509436e-05, |
| "loss": 0.5238, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.11889596602972399, |
| "grad_norm": 0.2610631357388765, |
| "learning_rate": 1.9575471698113208e-05, |
| "loss": 0.5469, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.12031139419674451, |
| "grad_norm": 0.2710744295346308, |
| "learning_rate": 1.9811320754716984e-05, |
| "loss": 0.5342, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.12172682236376504, |
| "grad_norm": 0.2746000305881197, |
| "learning_rate": 2.0047169811320756e-05, |
| "loss": 0.5533, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.12314225053078556, |
| "grad_norm": 0.2713072569437375, |
| "learning_rate": 2.0283018867924532e-05, |
| "loss": 0.564, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.12455767869780608, |
| "grad_norm": 0.25200431770680803, |
| "learning_rate": 2.0518867924528304e-05, |
| "loss": 0.5308, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.1259731068648266, |
| "grad_norm": 0.2904072154659877, |
| "learning_rate": 2.0754716981132076e-05, |
| "loss": 0.5561, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.12738853503184713, |
| "grad_norm": 0.29066427628129154, |
| "learning_rate": 2.0990566037735852e-05, |
| "loss": 0.5419, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.12880396319886767, |
| "grad_norm": 0.26449247738413356, |
| "learning_rate": 2.1226415094339624e-05, |
| "loss": 0.53, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.13021939136588817, |
| "grad_norm": 0.28680674957429403, |
| "learning_rate": 2.1462264150943397e-05, |
| "loss": 0.5631, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1316348195329087, |
| "grad_norm": 0.29348512626053397, |
| "learning_rate": 2.1698113207547172e-05, |
| "loss": 0.5488, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.13305024769992924, |
| "grad_norm": 0.2670829979081624, |
| "learning_rate": 2.1933962264150945e-05, |
| "loss": 0.5607, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.13446567586694974, |
| "grad_norm": 0.2951742951553837, |
| "learning_rate": 2.216981132075472e-05, |
| "loss": 0.5264, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.13588110403397027, |
| "grad_norm": 0.2738940637804454, |
| "learning_rate": 2.2405660377358493e-05, |
| "loss": 0.5497, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.1372965322009908, |
| "grad_norm": 0.260689152524016, |
| "learning_rate": 2.2641509433962265e-05, |
| "loss": 0.529, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.1387119603680113, |
| "grad_norm": 0.3460865251704031, |
| "learning_rate": 2.287735849056604e-05, |
| "loss": 0.5374, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.14012738853503184, |
| "grad_norm": 0.26025504251336634, |
| "learning_rate": 2.3113207547169813e-05, |
| "loss": 0.5197, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.14154281670205238, |
| "grad_norm": 0.32553833522366754, |
| "learning_rate": 2.3349056603773585e-05, |
| "loss": 0.5483, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.14295824486907288, |
| "grad_norm": 0.28825995008378963, |
| "learning_rate": 2.358490566037736e-05, |
| "loss": 0.5206, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.14437367303609341, |
| "grad_norm": 0.2873909845421847, |
| "learning_rate": 2.3820754716981133e-05, |
| "loss": 0.4962, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.14578910120311395, |
| "grad_norm": 0.28026036525465287, |
| "learning_rate": 2.405660377358491e-05, |
| "loss": 0.5446, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.14720452937013448, |
| "grad_norm": 0.3272353770003188, |
| "learning_rate": 2.429245283018868e-05, |
| "loss": 0.5166, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.14861995753715498, |
| "grad_norm": 0.31747918749974297, |
| "learning_rate": 2.4528301886792453e-05, |
| "loss": 0.5468, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.15003538570417552, |
| "grad_norm": 0.3248933376699994, |
| "learning_rate": 2.476415094339623e-05, |
| "loss": 0.5378, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.15145081387119605, |
| "grad_norm": 0.34796809815050533, |
| "learning_rate": 2.5e-05, |
| "loss": 0.5336, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.15286624203821655, |
| "grad_norm": 0.32031365578591203, |
| "learning_rate": 2.5235849056603777e-05, |
| "loss": 0.5265, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.1542816702052371, |
| "grad_norm": 0.3290449991605199, |
| "learning_rate": 2.547169811320755e-05, |
| "loss": 0.5419, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.15569709837225762, |
| "grad_norm": 0.31459646236712213, |
| "learning_rate": 2.5707547169811325e-05, |
| "loss": 0.5267, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.15711252653927812, |
| "grad_norm": 0.31446522115607733, |
| "learning_rate": 2.5943396226415094e-05, |
| "loss": 0.5425, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.15852795470629866, |
| "grad_norm": 0.314348472522342, |
| "learning_rate": 2.6179245283018873e-05, |
| "loss": 0.5194, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.1599433828733192, |
| "grad_norm": 0.33590260833875185, |
| "learning_rate": 2.641509433962264e-05, |
| "loss": 0.5286, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.1613588110403397, |
| "grad_norm": 0.293348617675266, |
| "learning_rate": 2.6650943396226417e-05, |
| "loss": 0.5227, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.16277423920736023, |
| "grad_norm": 0.2908436470141757, |
| "learning_rate": 2.688679245283019e-05, |
| "loss": 0.5342, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.16418966737438076, |
| "grad_norm": 0.2909829047502465, |
| "learning_rate": 2.7122641509433965e-05, |
| "loss": 0.5235, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.16560509554140126, |
| "grad_norm": 0.30833330637724304, |
| "learning_rate": 2.7358490566037738e-05, |
| "loss": 0.519, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.1670205237084218, |
| "grad_norm": 0.30762798006157255, |
| "learning_rate": 2.7594339622641513e-05, |
| "loss": 0.534, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.16843595187544233, |
| "grad_norm": 0.2865739474038587, |
| "learning_rate": 2.7830188679245282e-05, |
| "loss": 0.517, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.16985138004246284, |
| "grad_norm": 0.33305918186709865, |
| "learning_rate": 2.806603773584906e-05, |
| "loss": 0.5249, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.17126680820948337, |
| "grad_norm": 0.28237709762325275, |
| "learning_rate": 2.830188679245283e-05, |
| "loss": 0.5034, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.1726822363765039, |
| "grad_norm": 0.27928952164149773, |
| "learning_rate": 2.8537735849056606e-05, |
| "loss": 0.5375, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.1740976645435244, |
| "grad_norm": 0.324115687867415, |
| "learning_rate": 2.8773584905660378e-05, |
| "loss": 0.5246, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.17551309271054494, |
| "grad_norm": 0.2762247549280857, |
| "learning_rate": 2.9009433962264154e-05, |
| "loss": 0.5055, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.17692852087756547, |
| "grad_norm": 0.32165873663895433, |
| "learning_rate": 2.9245283018867926e-05, |
| "loss": 0.5065, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.17834394904458598, |
| "grad_norm": 0.3089669988568903, |
| "learning_rate": 2.9481132075471702e-05, |
| "loss": 0.5123, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.1797593772116065, |
| "grad_norm": 0.2915336309538987, |
| "learning_rate": 2.971698113207547e-05, |
| "loss": 0.5103, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.18117480537862704, |
| "grad_norm": 0.33637365443529005, |
| "learning_rate": 2.995283018867925e-05, |
| "loss": 0.5363, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.18259023354564755, |
| "grad_norm": 0.3122972896517381, |
| "learning_rate": 3.018867924528302e-05, |
| "loss": 0.5194, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.18400566171266808, |
| "grad_norm": 0.32543770734441557, |
| "learning_rate": 3.0424528301886794e-05, |
| "loss": 0.5135, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.1854210898796886, |
| "grad_norm": 0.27252001697072026, |
| "learning_rate": 3.0660377358490567e-05, |
| "loss": 0.528, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.18683651804670912, |
| "grad_norm": 0.34402518951819805, |
| "learning_rate": 3.0896226415094346e-05, |
| "loss": 0.5195, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.18825194621372965, |
| "grad_norm": 0.3126019430630103, |
| "learning_rate": 3.113207547169811e-05, |
| "loss": 0.5491, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.18966737438075018, |
| "grad_norm": 0.2688192270465016, |
| "learning_rate": 3.136792452830189e-05, |
| "loss": 0.5275, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.1910828025477707, |
| "grad_norm": 0.3119675680452963, |
| "learning_rate": 3.160377358490566e-05, |
| "loss": 0.5277, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.19249823071479122, |
| "grad_norm": 0.25218648912353075, |
| "learning_rate": 3.1839622641509435e-05, |
| "loss": 0.5024, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.19391365888181175, |
| "grad_norm": 0.3018799406875476, |
| "learning_rate": 3.207547169811321e-05, |
| "loss": 0.5113, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.19532908704883228, |
| "grad_norm": 0.2851896196357652, |
| "learning_rate": 3.2311320754716986e-05, |
| "loss": 0.4968, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.1967445152158528, |
| "grad_norm": 0.31923660565775647, |
| "learning_rate": 3.254716981132075e-05, |
| "loss": 0.5179, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.19815994338287332, |
| "grad_norm": 0.3376523343537797, |
| "learning_rate": 3.278301886792453e-05, |
| "loss": 0.4941, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.19957537154989385, |
| "grad_norm": 0.3325849764665281, |
| "learning_rate": 3.30188679245283e-05, |
| "loss": 0.5348, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.20099079971691436, |
| "grad_norm": 0.2951463049393858, |
| "learning_rate": 3.3254716981132075e-05, |
| "loss": 0.5089, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.2024062278839349, |
| "grad_norm": 0.3368157389197186, |
| "learning_rate": 3.349056603773585e-05, |
| "loss": 0.5116, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.20382165605095542, |
| "grad_norm": 0.3195128601211723, |
| "learning_rate": 3.3726415094339627e-05, |
| "loss": 0.5229, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.20523708421797593, |
| "grad_norm": 0.36219481973365675, |
| "learning_rate": 3.39622641509434e-05, |
| "loss": 0.5122, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.20665251238499646, |
| "grad_norm": 0.3172880541291816, |
| "learning_rate": 3.419811320754717e-05, |
| "loss": 0.5006, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.208067940552017, |
| "grad_norm": 0.3789720604684654, |
| "learning_rate": 3.4433962264150943e-05, |
| "loss": 0.4973, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.2094833687190375, |
| "grad_norm": 0.3455429168182563, |
| "learning_rate": 3.466981132075472e-05, |
| "loss": 0.5127, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.21089879688605803, |
| "grad_norm": 0.30672119758259186, |
| "learning_rate": 3.490566037735849e-05, |
| "loss": 0.4918, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.21231422505307856, |
| "grad_norm": 0.332798117726157, |
| "learning_rate": 3.514150943396227e-05, |
| "loss": 0.5, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.21372965322009907, |
| "grad_norm": 0.33726327033143527, |
| "learning_rate": 3.537735849056604e-05, |
| "loss": 0.4936, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.2151450813871196, |
| "grad_norm": 0.3032929045898552, |
| "learning_rate": 3.561320754716981e-05, |
| "loss": 0.4937, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.21656050955414013, |
| "grad_norm": 0.3589340045521236, |
| "learning_rate": 3.5849056603773584e-05, |
| "loss": 0.4882, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.21797593772116064, |
| "grad_norm": 0.31519815057694117, |
| "learning_rate": 3.608490566037736e-05, |
| "loss": 0.4884, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.21939136588818117, |
| "grad_norm": 0.35459205720985826, |
| "learning_rate": 3.632075471698113e-05, |
| "loss": 0.5036, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.2208067940552017, |
| "grad_norm": 0.43207177900202387, |
| "learning_rate": 3.655660377358491e-05, |
| "loss": 0.4988, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 0.36421480300311665, |
| "learning_rate": 3.679245283018868e-05, |
| "loss": 0.5089, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.22363765038924274, |
| "grad_norm": 0.3834592202853376, |
| "learning_rate": 3.702830188679245e-05, |
| "loss": 0.5143, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.22505307855626328, |
| "grad_norm": 0.4048640876243969, |
| "learning_rate": 3.7264150943396224e-05, |
| "loss": 0.5204, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.22646850672328378, |
| "grad_norm": 0.4624530002488823, |
| "learning_rate": 3.7500000000000003e-05, |
| "loss": 0.5226, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2278839348903043, |
| "grad_norm": 0.3480343117860362, |
| "learning_rate": 3.7735849056603776e-05, |
| "loss": 0.5081, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.22929936305732485, |
| "grad_norm": 0.42689915332306533, |
| "learning_rate": 3.797169811320755e-05, |
| "loss": 0.4847, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.23071479122434538, |
| "grad_norm": 0.4105608858696282, |
| "learning_rate": 3.820754716981133e-05, |
| "loss": 0.497, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.23213021939136588, |
| "grad_norm": 0.43465063717377017, |
| "learning_rate": 3.844339622641509e-05, |
| "loss": 0.4867, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.23354564755838642, |
| "grad_norm": 0.4724016009185264, |
| "learning_rate": 3.867924528301887e-05, |
| "loss": 0.4869, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.23496107572540695, |
| "grad_norm": 0.5181772638936092, |
| "learning_rate": 3.8915094339622644e-05, |
| "loss": 0.4916, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.23637650389242745, |
| "grad_norm": 0.3856581389322893, |
| "learning_rate": 3.9150943396226416e-05, |
| "loss": 0.4942, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.23779193205944799, |
| "grad_norm": 0.6128154044577643, |
| "learning_rate": 3.938679245283019e-05, |
| "loss": 0.5066, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.23920736022646852, |
| "grad_norm": 0.5053186581991689, |
| "learning_rate": 3.962264150943397e-05, |
| "loss": 0.4964, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.24062278839348902, |
| "grad_norm": 0.49986453289811006, |
| "learning_rate": 3.985849056603774e-05, |
| "loss": 0.5, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.24203821656050956, |
| "grad_norm": 0.6792847076210307, |
| "learning_rate": 4.009433962264151e-05, |
| "loss": 0.5246, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.2434536447275301, |
| "grad_norm": 0.6145656510061281, |
| "learning_rate": 4.0330188679245284e-05, |
| "loss": 0.4873, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.2448690728945506, |
| "grad_norm": 0.4118351232509526, |
| "learning_rate": 4.0566037735849064e-05, |
| "loss": 0.4843, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.24628450106157113, |
| "grad_norm": 0.6392886230615822, |
| "learning_rate": 4.080188679245283e-05, |
| "loss": 0.4967, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.24769992922859166, |
| "grad_norm": 0.34893502077766275, |
| "learning_rate": 4.103773584905661e-05, |
| "loss": 0.4825, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.24911535739561216, |
| "grad_norm": 0.5176109420998536, |
| "learning_rate": 4.127358490566038e-05, |
| "loss": 0.4744, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.2505307855626327, |
| "grad_norm": 0.4123363340874883, |
| "learning_rate": 4.150943396226415e-05, |
| "loss": 0.4959, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.2519462137296532, |
| "grad_norm": 0.450719206533509, |
| "learning_rate": 4.1745283018867925e-05, |
| "loss": 0.4881, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.25336164189667376, |
| "grad_norm": 0.43351355624228327, |
| "learning_rate": 4.1981132075471704e-05, |
| "loss": 0.4999, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.25477707006369427, |
| "grad_norm": 0.33664144300948656, |
| "learning_rate": 4.221698113207547e-05, |
| "loss": 0.5142, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.25619249823071477, |
| "grad_norm": 0.3837742618615644, |
| "learning_rate": 4.245283018867925e-05, |
| "loss": 0.4733, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.25760792639773533, |
| "grad_norm": 0.3151243355874945, |
| "learning_rate": 4.268867924528302e-05, |
| "loss": 0.4657, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.25902335456475584, |
| "grad_norm": 0.3335622550176586, |
| "learning_rate": 4.292452830188679e-05, |
| "loss": 0.4772, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.26043878273177634, |
| "grad_norm": 0.37628468547226257, |
| "learning_rate": 4.3160377358490565e-05, |
| "loss": 0.4848, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.2618542108987969, |
| "grad_norm": 0.34250164420941787, |
| "learning_rate": 4.3396226415094345e-05, |
| "loss": 0.4963, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2632696390658174, |
| "grad_norm": 0.390192986807738, |
| "learning_rate": 4.363207547169812e-05, |
| "loss": 0.513, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.2646850672328379, |
| "grad_norm": 0.3657597965323173, |
| "learning_rate": 4.386792452830189e-05, |
| "loss": 0.5028, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.26610049539985847, |
| "grad_norm": 0.3552471961943396, |
| "learning_rate": 4.410377358490566e-05, |
| "loss": 0.4958, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.267515923566879, |
| "grad_norm": 0.3751793282608708, |
| "learning_rate": 4.433962264150944e-05, |
| "loss": 0.5002, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.2689313517338995, |
| "grad_norm": 0.31709289415813285, |
| "learning_rate": 4.4575471698113206e-05, |
| "loss": 0.485, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.27034677990092004, |
| "grad_norm": 0.36930880358545004, |
| "learning_rate": 4.4811320754716985e-05, |
| "loss": 0.4763, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.27176220806794055, |
| "grad_norm": 0.38164778972975844, |
| "learning_rate": 4.504716981132076e-05, |
| "loss": 0.4796, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.27317763623496105, |
| "grad_norm": 0.3827965077528942, |
| "learning_rate": 4.528301886792453e-05, |
| "loss": 0.4978, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.2745930644019816, |
| "grad_norm": 0.3892538141587405, |
| "learning_rate": 4.55188679245283e-05, |
| "loss": 0.4712, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.2760084925690021, |
| "grad_norm": 0.3653595080312334, |
| "learning_rate": 4.575471698113208e-05, |
| "loss": 0.485, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.2774239207360226, |
| "grad_norm": 0.3593560802498003, |
| "learning_rate": 4.5990566037735846e-05, |
| "loss": 0.4581, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.2788393489030432, |
| "grad_norm": 0.3833174718177277, |
| "learning_rate": 4.6226415094339625e-05, |
| "loss": 0.4868, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.2802547770700637, |
| "grad_norm": 0.3104445627049494, |
| "learning_rate": 4.64622641509434e-05, |
| "loss": 0.4939, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.2816702052370842, |
| "grad_norm": 0.2925127148194562, |
| "learning_rate": 4.669811320754717e-05, |
| "loss": 0.4988, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.28308563340410475, |
| "grad_norm": 0.34460020120708856, |
| "learning_rate": 4.693396226415094e-05, |
| "loss": 0.4918, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.28450106157112526, |
| "grad_norm": 0.31227346468617606, |
| "learning_rate": 4.716981132075472e-05, |
| "loss": 0.4995, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.28591648973814576, |
| "grad_norm": 0.3640317902107833, |
| "learning_rate": 4.7405660377358494e-05, |
| "loss": 0.4707, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.2873319179051663, |
| "grad_norm": 0.3145870893766912, |
| "learning_rate": 4.7641509433962266e-05, |
| "loss": 0.4995, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.28874734607218683, |
| "grad_norm": 0.32355667169539304, |
| "learning_rate": 4.787735849056604e-05, |
| "loss": 0.4844, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.29016277423920733, |
| "grad_norm": 0.36886406958455736, |
| "learning_rate": 4.811320754716982e-05, |
| "loss": 0.5, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.2915782024062279, |
| "grad_norm": 0.28157776049221367, |
| "learning_rate": 4.834905660377358e-05, |
| "loss": 0.4897, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.2929936305732484, |
| "grad_norm": 0.7299868380151449, |
| "learning_rate": 4.858490566037736e-05, |
| "loss": 0.496, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.29440905874026896, |
| "grad_norm": 0.3571637911618915, |
| "learning_rate": 4.8820754716981134e-05, |
| "loss": 0.4859, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.29582448690728946, |
| "grad_norm": 0.31598318128466457, |
| "learning_rate": 4.9056603773584906e-05, |
| "loss": 0.4684, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.29723991507430997, |
| "grad_norm": 0.35704049963583556, |
| "learning_rate": 4.929245283018868e-05, |
| "loss": 0.4653, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.29865534324133053, |
| "grad_norm": 0.3380809563649105, |
| "learning_rate": 4.952830188679246e-05, |
| "loss": 0.4972, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.30007077140835103, |
| "grad_norm": 0.2893108910416616, |
| "learning_rate": 4.976415094339622e-05, |
| "loss": 0.4672, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.30148619957537154, |
| "grad_norm": 0.34105686642163857, |
| "learning_rate": 5e-05, |
| "loss": 0.4833, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.3029016277423921, |
| "grad_norm": 0.3654037324084374, |
| "learning_rate": 4.997376705141658e-05, |
| "loss": 0.4787, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.3043170559094126, |
| "grad_norm": 0.33962044855755386, |
| "learning_rate": 4.994753410283316e-05, |
| "loss": 0.4917, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.3057324840764331, |
| "grad_norm": 0.3651658064563787, |
| "learning_rate": 4.9921301154249736e-05, |
| "loss": 0.4731, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.30714791224345367, |
| "grad_norm": 0.39085977965098573, |
| "learning_rate": 4.989506820566632e-05, |
| "loss": 0.4966, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.3085633404104742, |
| "grad_norm": 0.38749937841905047, |
| "learning_rate": 4.98688352570829e-05, |
| "loss": 0.4751, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.3099787685774947, |
| "grad_norm": 0.31799760427494933, |
| "learning_rate": 4.984260230849948e-05, |
| "loss": 0.4733, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.31139419674451524, |
| "grad_norm": 0.3359905056993565, |
| "learning_rate": 4.981636935991606e-05, |
| "loss": 0.467, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.31280962491153574, |
| "grad_norm": 0.40511778428597134, |
| "learning_rate": 4.979013641133264e-05, |
| "loss": 0.5045, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.31422505307855625, |
| "grad_norm": 0.379192341890831, |
| "learning_rate": 4.976390346274922e-05, |
| "loss": 0.4713, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.3156404812455768, |
| "grad_norm": 0.38069431190869296, |
| "learning_rate": 4.97376705141658e-05, |
| "loss": 0.4691, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.3170559094125973, |
| "grad_norm": 0.40512176731248734, |
| "learning_rate": 4.971143756558237e-05, |
| "loss": 0.4952, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.3184713375796178, |
| "grad_norm": 0.42869246316273457, |
| "learning_rate": 4.968520461699895e-05, |
| "loss": 0.479, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.3198867657466384, |
| "grad_norm": 0.34166560604627294, |
| "learning_rate": 4.965897166841553e-05, |
| "loss": 0.4616, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.3213021939136589, |
| "grad_norm": 0.41668612296835184, |
| "learning_rate": 4.963273871983211e-05, |
| "loss": 0.476, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.3227176220806794, |
| "grad_norm": 0.3444355148419626, |
| "learning_rate": 4.960650577124869e-05, |
| "loss": 0.4845, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.32413305024769995, |
| "grad_norm": 0.4082393224656534, |
| "learning_rate": 4.958027282266527e-05, |
| "loss": 0.5083, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.32554847841472045, |
| "grad_norm": 0.3581858800328615, |
| "learning_rate": 4.955403987408185e-05, |
| "loss": 0.4887, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.32696390658174096, |
| "grad_norm": 0.3604962793037811, |
| "learning_rate": 4.952780692549843e-05, |
| "loss": 0.51, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.3283793347487615, |
| "grad_norm": 0.3764941078516145, |
| "learning_rate": 4.950157397691501e-05, |
| "loss": 0.456, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.329794762915782, |
| "grad_norm": 0.3111710179843355, |
| "learning_rate": 4.947534102833158e-05, |
| "loss": 0.4691, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.33121019108280253, |
| "grad_norm": 0.35045316900302903, |
| "learning_rate": 4.944910807974817e-05, |
| "loss": 0.4875, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.3326256192498231, |
| "grad_norm": 0.3162652367435176, |
| "learning_rate": 4.942287513116475e-05, |
| "loss": 0.4553, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.3340410474168436, |
| "grad_norm": 0.3360681973991888, |
| "learning_rate": 4.939664218258133e-05, |
| "loss": 0.4796, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.3354564755838641, |
| "grad_norm": 0.3741851712229335, |
| "learning_rate": 4.93704092339979e-05, |
| "loss": 0.4571, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.33687190375088466, |
| "grad_norm": 0.3718199670391826, |
| "learning_rate": 4.934417628541448e-05, |
| "loss": 0.4841, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.33828733191790517, |
| "grad_norm": 0.35326975836585, |
| "learning_rate": 4.931794333683106e-05, |
| "loss": 0.4838, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.33970276008492567, |
| "grad_norm": 0.36254161920550493, |
| "learning_rate": 4.929171038824764e-05, |
| "loss": 0.4607, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.34111818825194623, |
| "grad_norm": 0.4305497527280153, |
| "learning_rate": 4.926547743966422e-05, |
| "loss": 0.4808, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.34253361641896674, |
| "grad_norm": 0.3854579649210384, |
| "learning_rate": 4.92392444910808e-05, |
| "loss": 0.4612, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.34394904458598724, |
| "grad_norm": 0.37381038850584963, |
| "learning_rate": 4.921301154249738e-05, |
| "loss": 0.4681, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.3453644727530078, |
| "grad_norm": 0.34094705578286977, |
| "learning_rate": 4.918677859391396e-05, |
| "loss": 0.4748, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.3467799009200283, |
| "grad_norm": 0.43256806769218703, |
| "learning_rate": 4.916054564533054e-05, |
| "loss": 0.4869, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.3481953290870488, |
| "grad_norm": 0.3512540885487744, |
| "learning_rate": 4.913431269674712e-05, |
| "loss": 0.4749, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.34961075725406937, |
| "grad_norm": 0.4375420690405016, |
| "learning_rate": 4.910807974816369e-05, |
| "loss": 0.4567, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.3510261854210899, |
| "grad_norm": 0.31331139412521325, |
| "learning_rate": 4.908184679958028e-05, |
| "loss": 0.4956, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.3524416135881104, |
| "grad_norm": 0.40569597255126255, |
| "learning_rate": 4.905561385099686e-05, |
| "loss": 0.479, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.35385704175513094, |
| "grad_norm": 0.3015217033905556, |
| "learning_rate": 4.902938090241343e-05, |
| "loss": 0.4631, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.35527246992215145, |
| "grad_norm": 0.36234313075601515, |
| "learning_rate": 4.900314795383001e-05, |
| "loss": 0.4788, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.35668789808917195, |
| "grad_norm": 0.3350987908357145, |
| "learning_rate": 4.897691500524659e-05, |
| "loss": 0.4666, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.3581033262561925, |
| "grad_norm": 0.30055620666911115, |
| "learning_rate": 4.895068205666317e-05, |
| "loss": 0.4896, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.359518754423213, |
| "grad_norm": 0.3224634511396762, |
| "learning_rate": 4.8924449108079753e-05, |
| "loss": 0.4689, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.3609341825902335, |
| "grad_norm": 0.3523537727385361, |
| "learning_rate": 4.889821615949633e-05, |
| "loss": 0.474, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.3623496107572541, |
| "grad_norm": 0.32606824732367184, |
| "learning_rate": 4.887198321091291e-05, |
| "loss": 0.483, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.3637650389242746, |
| "grad_norm": 0.35312194584977225, |
| "learning_rate": 4.884575026232949e-05, |
| "loss": 0.4616, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.3651804670912951, |
| "grad_norm": 0.2977039251083076, |
| "learning_rate": 4.881951731374607e-05, |
| "loss": 0.4893, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.36659589525831565, |
| "grad_norm": 0.36965910622460707, |
| "learning_rate": 4.879328436516265e-05, |
| "loss": 0.484, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.36801132342533616, |
| "grad_norm": 0.32694450731488317, |
| "learning_rate": 4.876705141657922e-05, |
| "loss": 0.4734, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.36942675159235666, |
| "grad_norm": 0.33797975840114225, |
| "learning_rate": 4.87408184679958e-05, |
| "loss": 0.4609, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.3708421797593772, |
| "grad_norm": 0.5188557226310996, |
| "learning_rate": 4.871458551941239e-05, |
| "loss": 0.4695, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.3722576079263977, |
| "grad_norm": 0.28372641106405255, |
| "learning_rate": 4.868835257082896e-05, |
| "loss": 0.4505, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.37367303609341823, |
| "grad_norm": 0.37728420455185746, |
| "learning_rate": 4.866211962224554e-05, |
| "loss": 0.4699, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.3750884642604388, |
| "grad_norm": 0.27926539871155076, |
| "learning_rate": 4.863588667366212e-05, |
| "loss": 0.4497, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.3765038924274593, |
| "grad_norm": 0.31523862716113915, |
| "learning_rate": 4.86096537250787e-05, |
| "loss": 0.4794, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.37791932059447986, |
| "grad_norm": 0.3559960078668447, |
| "learning_rate": 4.8583420776495284e-05, |
| "loss": 0.4838, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.37933474876150036, |
| "grad_norm": 0.26459931841726647, |
| "learning_rate": 4.855718782791186e-05, |
| "loss": 0.4701, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.38075017692852087, |
| "grad_norm": 0.42754490475895474, |
| "learning_rate": 4.853095487932844e-05, |
| "loss": 0.4833, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.3821656050955414, |
| "grad_norm": 0.31990202221223335, |
| "learning_rate": 4.850472193074502e-05, |
| "loss": 0.4664, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.38358103326256193, |
| "grad_norm": 0.32284874424979937, |
| "learning_rate": 4.84784889821616e-05, |
| "loss": 0.4646, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.38499646142958244, |
| "grad_norm": 0.3567088507467582, |
| "learning_rate": 4.845225603357818e-05, |
| "loss": 0.4899, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.386411889596603, |
| "grad_norm": 0.28350442390303104, |
| "learning_rate": 4.842602308499475e-05, |
| "loss": 0.4628, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.3878273177636235, |
| "grad_norm": 0.3380796093379581, |
| "learning_rate": 4.839979013641133e-05, |
| "loss": 0.4798, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.389242745930644, |
| "grad_norm": 0.30402306763549686, |
| "learning_rate": 4.837355718782791e-05, |
| "loss": 0.478, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.39065817409766457, |
| "grad_norm": 0.33347801143181516, |
| "learning_rate": 4.834732423924449e-05, |
| "loss": 0.4725, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.3920736022646851, |
| "grad_norm": 0.3229830448029133, |
| "learning_rate": 4.832109129066107e-05, |
| "loss": 0.4661, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.3934890304317056, |
| "grad_norm": 0.3214414708827648, |
| "learning_rate": 4.829485834207765e-05, |
| "loss": 0.4389, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.39490445859872614, |
| "grad_norm": 0.2977414670901615, |
| "learning_rate": 4.8268625393494233e-05, |
| "loss": 0.4534, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.39631988676574664, |
| "grad_norm": 0.3494964999721987, |
| "learning_rate": 4.8242392444910814e-05, |
| "loss": 0.4506, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.39773531493276715, |
| "grad_norm": 0.33432250104369315, |
| "learning_rate": 4.821615949632739e-05, |
| "loss": 0.4794, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.3991507430997877, |
| "grad_norm": 0.2989920920241222, |
| "learning_rate": 4.818992654774397e-05, |
| "loss": 0.4679, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.4005661712668082, |
| "grad_norm": 0.33484591865165336, |
| "learning_rate": 4.816369359916055e-05, |
| "loss": 0.48, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.4019815994338287, |
| "grad_norm": 0.308359897039709, |
| "learning_rate": 4.813746065057713e-05, |
| "loss": 0.4601, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.4033970276008493, |
| "grad_norm": 0.34107458942761815, |
| "learning_rate": 4.811122770199371e-05, |
| "loss": 0.4775, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.4048124557678698, |
| "grad_norm": 0.2790810489904784, |
| "learning_rate": 4.808499475341028e-05, |
| "loss": 0.4684, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.4062278839348903, |
| "grad_norm": 0.31368051064270663, |
| "learning_rate": 4.805876180482686e-05, |
| "loss": 0.4524, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.40764331210191085, |
| "grad_norm": 0.31954064273172456, |
| "learning_rate": 4.803252885624344e-05, |
| "loss": 0.4866, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.40905874026893135, |
| "grad_norm": 0.3409185291839269, |
| "learning_rate": 4.800629590766002e-05, |
| "loss": 0.4696, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.41047416843595186, |
| "grad_norm": 0.29737417322671006, |
| "learning_rate": 4.79800629590766e-05, |
| "loss": 0.4707, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.4118895966029724, |
| "grad_norm": 0.3418785035707623, |
| "learning_rate": 4.795383001049318e-05, |
| "loss": 0.4494, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.4133050247699929, |
| "grad_norm": 0.29733881088472347, |
| "learning_rate": 4.7927597061909764e-05, |
| "loss": 0.4741, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.41472045293701343, |
| "grad_norm": 0.31999219722171973, |
| "learning_rate": 4.7901364113326344e-05, |
| "loss": 0.4565, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.416135881104034, |
| "grad_norm": 0.2702853626810532, |
| "learning_rate": 4.787513116474292e-05, |
| "loss": 0.4789, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.4175513092710545, |
| "grad_norm": 0.35698586881409045, |
| "learning_rate": 4.78488982161595e-05, |
| "loss": 0.4512, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.418966737438075, |
| "grad_norm": 0.30683113400792583, |
| "learning_rate": 4.782266526757608e-05, |
| "loss": 0.4681, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.42038216560509556, |
| "grad_norm": 0.3166932092062115, |
| "learning_rate": 4.779643231899266e-05, |
| "loss": 0.4611, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.42179759377211606, |
| "grad_norm": 0.28440777282553725, |
| "learning_rate": 4.777019937040924e-05, |
| "loss": 0.4549, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.42321302193913657, |
| "grad_norm": 0.30989709115616915, |
| "learning_rate": 4.774396642182581e-05, |
| "loss": 0.4538, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.42462845010615713, |
| "grad_norm": 0.24260466757465912, |
| "learning_rate": 4.771773347324239e-05, |
| "loss": 0.4681, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.42604387827317763, |
| "grad_norm": 0.3391264946077127, |
| "learning_rate": 4.769150052465897e-05, |
| "loss": 0.4617, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.42745930644019814, |
| "grad_norm": 0.3008596481448339, |
| "learning_rate": 4.766526757607555e-05, |
| "loss": 0.4609, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.4288747346072187, |
| "grad_norm": 0.33198306390049503, |
| "learning_rate": 4.763903462749213e-05, |
| "loss": 0.4776, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.4302901627742392, |
| "grad_norm": 0.2962026562742108, |
| "learning_rate": 4.7612801678908713e-05, |
| "loss": 0.4617, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.4317055909412597, |
| "grad_norm": 0.9650530020569035, |
| "learning_rate": 4.7586568730325294e-05, |
| "loss": 0.4646, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.43312101910828027, |
| "grad_norm": 0.2876675793351413, |
| "learning_rate": 4.7560335781741874e-05, |
| "loss": 0.4564, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.4345364472753008, |
| "grad_norm": 0.3037747439699752, |
| "learning_rate": 4.753410283315845e-05, |
| "loss": 0.487, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.4359518754423213, |
| "grad_norm": 0.299098005284229, |
| "learning_rate": 4.750786988457503e-05, |
| "loss": 0.4568, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.43736730360934184, |
| "grad_norm": 0.26342453389940673, |
| "learning_rate": 4.748163693599161e-05, |
| "loss": 0.4602, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.43878273177636234, |
| "grad_norm": 0.26189351741312544, |
| "learning_rate": 4.745540398740819e-05, |
| "loss": 0.4575, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.44019815994338285, |
| "grad_norm": 0.269146148673188, |
| "learning_rate": 4.742917103882477e-05, |
| "loss": 0.4472, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.4416135881104034, |
| "grad_norm": 0.27774884805503863, |
| "learning_rate": 4.740293809024134e-05, |
| "loss": 0.4801, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.4430290162774239, |
| "grad_norm": 0.2828678588034561, |
| "learning_rate": 4.737670514165792e-05, |
| "loss": 0.4476, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 0.26381381025775535, |
| "learning_rate": 4.73504721930745e-05, |
| "loss": 0.4526, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.445859872611465, |
| "grad_norm": 0.29545201089339096, |
| "learning_rate": 4.732423924449108e-05, |
| "loss": 0.4456, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.4472753007784855, |
| "grad_norm": 0.31571268465596236, |
| "learning_rate": 4.7298006295907657e-05, |
| "loss": 0.4763, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.448690728945506, |
| "grad_norm": 0.3139992834947803, |
| "learning_rate": 4.7271773347324244e-05, |
| "loss": 0.453, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.45010615711252655, |
| "grad_norm": 0.2954480433149446, |
| "learning_rate": 4.7245540398740824e-05, |
| "loss": 0.4516, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.45152158527954706, |
| "grad_norm": 0.3629808241350359, |
| "learning_rate": 4.7219307450157404e-05, |
| "loss": 0.4674, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.45293701344656756, |
| "grad_norm": 0.3526526315963183, |
| "learning_rate": 4.719307450157398e-05, |
| "loss": 0.46, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.4543524416135881, |
| "grad_norm": 0.42467086373484514, |
| "learning_rate": 4.716684155299056e-05, |
| "loss": 0.4722, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.4557678697806086, |
| "grad_norm": 0.2848455631907692, |
| "learning_rate": 4.714060860440714e-05, |
| "loss": 0.4677, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.45718329794762913, |
| "grad_norm": 0.3680582877057587, |
| "learning_rate": 4.711437565582372e-05, |
| "loss": 0.4626, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.4585987261146497, |
| "grad_norm": 0.30480571190360467, |
| "learning_rate": 4.70881427072403e-05, |
| "loss": 0.4734, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.4600141542816702, |
| "grad_norm": 0.3738153957980129, |
| "learning_rate": 4.706190975865687e-05, |
| "loss": 0.4672, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.46142958244869076, |
| "grad_norm": 0.3202269092407385, |
| "learning_rate": 4.703567681007345e-05, |
| "loss": 0.4529, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.46284501061571126, |
| "grad_norm": 0.3705235889244079, |
| "learning_rate": 4.700944386149003e-05, |
| "loss": 0.457, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.46426043878273177, |
| "grad_norm": 0.3300580785020962, |
| "learning_rate": 4.698321091290661e-05, |
| "loss": 0.4634, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.4656758669497523, |
| "grad_norm": 0.35469328535410066, |
| "learning_rate": 4.695697796432319e-05, |
| "loss": 0.4733, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.46709129511677283, |
| "grad_norm": 0.3194681818790911, |
| "learning_rate": 4.693074501573977e-05, |
| "loss": 0.4427, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.46850672328379334, |
| "grad_norm": 0.3613032196238565, |
| "learning_rate": 4.6904512067156354e-05, |
| "loss": 0.4556, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.4699221514508139, |
| "grad_norm": 0.27287526828539865, |
| "learning_rate": 4.6878279118572934e-05, |
| "loss": 0.4531, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.4713375796178344, |
| "grad_norm": 0.426853472134396, |
| "learning_rate": 4.685204616998951e-05, |
| "loss": 0.4856, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.4727530077848549, |
| "grad_norm": 0.2764447274960176, |
| "learning_rate": 4.682581322140609e-05, |
| "loss": 0.4614, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.47416843595187547, |
| "grad_norm": 0.3747012954551615, |
| "learning_rate": 4.679958027282267e-05, |
| "loss": 0.4484, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.47558386411889597, |
| "grad_norm": 0.26796928979959844, |
| "learning_rate": 4.677334732423925e-05, |
| "loss": 0.4357, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.4769992922859165, |
| "grad_norm": 0.3566137792471709, |
| "learning_rate": 4.674711437565583e-05, |
| "loss": 0.4731, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.47841472045293704, |
| "grad_norm": 0.3475361430572997, |
| "learning_rate": 4.67208814270724e-05, |
| "loss": 0.4699, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.47983014861995754, |
| "grad_norm": 0.4581592514232306, |
| "learning_rate": 4.669464847848898e-05, |
| "loss": 0.451, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.48124557678697805, |
| "grad_norm": 0.3627293439819416, |
| "learning_rate": 4.666841552990556e-05, |
| "loss": 0.4625, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.4826610049539986, |
| "grad_norm": 0.3511019572881394, |
| "learning_rate": 4.664218258132214e-05, |
| "loss": 0.4551, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.4840764331210191, |
| "grad_norm": 0.3234996409960452, |
| "learning_rate": 4.661594963273872e-05, |
| "loss": 0.4761, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.4854918612880396, |
| "grad_norm": 0.3915392794609834, |
| "learning_rate": 4.65897166841553e-05, |
| "loss": 0.4819, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.4869072894550602, |
| "grad_norm": 0.30334442650643895, |
| "learning_rate": 4.656348373557188e-05, |
| "loss": 0.4617, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.4883227176220807, |
| "grad_norm": 0.3434333598334738, |
| "learning_rate": 4.6537250786988465e-05, |
| "loss": 0.4591, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.4897381457891012, |
| "grad_norm": 0.3011418095074162, |
| "learning_rate": 4.651101783840504e-05, |
| "loss": 0.4593, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.49115357395612175, |
| "grad_norm": 0.2931682762521998, |
| "learning_rate": 4.648478488982162e-05, |
| "loss": 0.4534, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.49256900212314225, |
| "grad_norm": 0.27983031997568253, |
| "learning_rate": 4.64585519412382e-05, |
| "loss": 0.456, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.49398443029016276, |
| "grad_norm": 0.3330628925072195, |
| "learning_rate": 4.643231899265478e-05, |
| "loss": 0.4583, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.4953998584571833, |
| "grad_norm": 0.24344080894387007, |
| "learning_rate": 4.640608604407136e-05, |
| "loss": 0.4444, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.4968152866242038, |
| "grad_norm": 0.3149584883086908, |
| "learning_rate": 4.637985309548793e-05, |
| "loss": 0.4629, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.4982307147912243, |
| "grad_norm": 0.25711306244469556, |
| "learning_rate": 4.635362014690451e-05, |
| "loss": 0.4353, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.4996461429582449, |
| "grad_norm": 0.2828675944672857, |
| "learning_rate": 4.632738719832109e-05, |
| "loss": 0.4431, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.5010615711252654, |
| "grad_norm": 0.31965497332448656, |
| "learning_rate": 4.6301154249737674e-05, |
| "loss": 0.4683, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.502476999292286, |
| "grad_norm": 0.29420711453793835, |
| "learning_rate": 4.627492130115425e-05, |
| "loss": 0.4779, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.5038924274593064, |
| "grad_norm": 0.3095981627932921, |
| "learning_rate": 4.624868835257083e-05, |
| "loss": 0.4809, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.505307855626327, |
| "grad_norm": 0.2784944679427735, |
| "learning_rate": 4.622245540398741e-05, |
| "loss": 0.4608, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.5067232837933475, |
| "grad_norm": 0.29624109800363596, |
| "learning_rate": 4.619622245540399e-05, |
| "loss": 0.4472, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.508138711960368, |
| "grad_norm": 0.28349708245921784, |
| "learning_rate": 4.616998950682057e-05, |
| "loss": 0.4632, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.5095541401273885, |
| "grad_norm": 0.296812331973689, |
| "learning_rate": 4.614375655823715e-05, |
| "loss": 0.4419, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.5109695682944091, |
| "grad_norm": 0.29521985654259064, |
| "learning_rate": 4.611752360965373e-05, |
| "loss": 0.4404, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.5123849964614295, |
| "grad_norm": 0.30198848925830873, |
| "learning_rate": 4.609129066107031e-05, |
| "loss": 0.4764, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.5138004246284501, |
| "grad_norm": 0.2913858999538317, |
| "learning_rate": 4.606505771248689e-05, |
| "loss": 0.4656, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.5152158527954707, |
| "grad_norm": 0.29621855684908976, |
| "learning_rate": 4.603882476390346e-05, |
| "loss": 0.449, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.5166312809624911, |
| "grad_norm": 0.3060245599564631, |
| "learning_rate": 4.601259181532004e-05, |
| "loss": 0.4642, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.5180467091295117, |
| "grad_norm": 0.2926543846950487, |
| "learning_rate": 4.5986358866736623e-05, |
| "loss": 0.4503, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.5194621372965322, |
| "grad_norm": 0.26926515134699414, |
| "learning_rate": 4.5960125918153204e-05, |
| "loss": 0.4617, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.5208775654635527, |
| "grad_norm": 0.27299414729049654, |
| "learning_rate": 4.5933892969569784e-05, |
| "loss": 0.45, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.5222929936305732, |
| "grad_norm": 0.25833671342973175, |
| "learning_rate": 4.590766002098636e-05, |
| "loss": 0.4646, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.5237084217975938, |
| "grad_norm": 0.2839236221161132, |
| "learning_rate": 4.588142707240294e-05, |
| "loss": 0.4302, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.5251238499646143, |
| "grad_norm": 0.2786810220220998, |
| "learning_rate": 4.585519412381952e-05, |
| "loss": 0.4643, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.5265392781316348, |
| "grad_norm": 0.2669933920291989, |
| "learning_rate": 4.58289611752361e-05, |
| "loss": 0.4624, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.5279547062986554, |
| "grad_norm": 0.2858980696521966, |
| "learning_rate": 4.580272822665268e-05, |
| "loss": 0.4649, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.5293701344656758, |
| "grad_norm": 0.2622927831062312, |
| "learning_rate": 4.577649527806926e-05, |
| "loss": 0.4557, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.5307855626326964, |
| "grad_norm": 0.27472852707977724, |
| "learning_rate": 4.575026232948584e-05, |
| "loss": 0.4457, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.5322009907997169, |
| "grad_norm": 0.2727301904161722, |
| "learning_rate": 4.572402938090242e-05, |
| "loss": 0.4634, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.5336164189667374, |
| "grad_norm": 0.27932575044288194, |
| "learning_rate": 4.569779643231899e-05, |
| "loss": 0.4537, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.535031847133758, |
| "grad_norm": 0.30699616443917316, |
| "learning_rate": 4.567156348373557e-05, |
| "loss": 0.452, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.5364472753007785, |
| "grad_norm": 0.30299341502733096, |
| "learning_rate": 4.5645330535152154e-05, |
| "loss": 0.474, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.537862703467799, |
| "grad_norm": 0.2919960224567498, |
| "learning_rate": 4.5619097586568734e-05, |
| "loss": 0.4625, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.5392781316348195, |
| "grad_norm": 0.2990740185851751, |
| "learning_rate": 4.5592864637985314e-05, |
| "loss": 0.4617, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.5406935598018401, |
| "grad_norm": 0.2659823960473287, |
| "learning_rate": 4.556663168940189e-05, |
| "loss": 0.4477, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.5421089879688605, |
| "grad_norm": 0.26728861667810855, |
| "learning_rate": 4.554039874081847e-05, |
| "loss": 0.4592, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.5435244161358811, |
| "grad_norm": 0.2853257514446039, |
| "learning_rate": 4.551416579223505e-05, |
| "loss": 0.4565, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.5449398443029017, |
| "grad_norm": 0.2625814267311583, |
| "learning_rate": 4.548793284365163e-05, |
| "loss": 0.4389, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.5463552724699221, |
| "grad_norm": 0.26885049478533035, |
| "learning_rate": 4.546169989506821e-05, |
| "loss": 0.4516, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.5477707006369427, |
| "grad_norm": 0.26586269774742693, |
| "learning_rate": 4.543546694648479e-05, |
| "loss": 0.4537, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.5491861288039632, |
| "grad_norm": 0.2888622602970239, |
| "learning_rate": 4.540923399790137e-05, |
| "loss": 0.4458, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.5506015569709837, |
| "grad_norm": 0.2811624027110681, |
| "learning_rate": 4.538300104931795e-05, |
| "loss": 0.4546, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.5520169851380042, |
| "grad_norm": 0.2741830826094752, |
| "learning_rate": 4.535676810073452e-05, |
| "loss": 0.4571, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.5534324133050248, |
| "grad_norm": 0.2642055656361625, |
| "learning_rate": 4.5330535152151103e-05, |
| "loss": 0.4581, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.5548478414720452, |
| "grad_norm": 0.31492914277218653, |
| "learning_rate": 4.5304302203567684e-05, |
| "loss": 0.4375, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.5562632696390658, |
| "grad_norm": 0.25431995851103983, |
| "learning_rate": 4.5278069254984264e-05, |
| "loss": 0.4434, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.5576786978060864, |
| "grad_norm": 0.26192007224916264, |
| "learning_rate": 4.5251836306400844e-05, |
| "loss": 0.4464, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.5590941259731068, |
| "grad_norm": 0.24224205861266807, |
| "learning_rate": 4.522560335781742e-05, |
| "loss": 0.4426, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.5605095541401274, |
| "grad_norm": 0.2672754644431964, |
| "learning_rate": 4.5199370409234e-05, |
| "loss": 0.4624, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.5619249823071479, |
| "grad_norm": 0.244036715749111, |
| "learning_rate": 4.517313746065058e-05, |
| "loss": 0.4584, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.5633404104741684, |
| "grad_norm": 0.27071584258359827, |
| "learning_rate": 4.514690451206716e-05, |
| "loss": 0.4485, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.564755838641189, |
| "grad_norm": 0.2219741153795276, |
| "learning_rate": 4.512067156348373e-05, |
| "loss": 0.4479, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.5661712668082095, |
| "grad_norm": 0.25037227044897187, |
| "learning_rate": 4.509443861490032e-05, |
| "loss": 0.4285, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.56758669497523, |
| "grad_norm": 0.2605986634772363, |
| "learning_rate": 4.50682056663169e-05, |
| "loss": 0.4554, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.5690021231422505, |
| "grad_norm": 0.22255212223784326, |
| "learning_rate": 4.504197271773348e-05, |
| "loss": 0.4428, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.5704175513092711, |
| "grad_norm": 0.24900365207529354, |
| "learning_rate": 4.501573976915005e-05, |
| "loss": 0.4534, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.5718329794762915, |
| "grad_norm": 0.24923821687732778, |
| "learning_rate": 4.4989506820566634e-05, |
| "loss": 0.4465, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.5732484076433121, |
| "grad_norm": 0.24844544013157835, |
| "learning_rate": 4.4963273871983214e-05, |
| "loss": 0.4717, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.5746638358103326, |
| "grad_norm": 0.2535482892944214, |
| "learning_rate": 4.4937040923399794e-05, |
| "loss": 0.4661, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.5760792639773531, |
| "grad_norm": 0.2876988774674945, |
| "learning_rate": 4.4910807974816374e-05, |
| "loss": 0.4442, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.5774946921443737, |
| "grad_norm": 0.26993016677284876, |
| "learning_rate": 4.488457502623295e-05, |
| "loss": 0.4552, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.5789101203113942, |
| "grad_norm": 0.3358646447309973, |
| "learning_rate": 4.485834207764953e-05, |
| "loss": 0.4624, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.5803255484784147, |
| "grad_norm": 0.309893397096339, |
| "learning_rate": 4.483210912906611e-05, |
| "loss": 0.4559, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.5817409766454352, |
| "grad_norm": 0.3386303499703173, |
| "learning_rate": 4.480587618048269e-05, |
| "loss": 0.4514, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.5831564048124558, |
| "grad_norm": 0.31112587874364034, |
| "learning_rate": 4.477964323189926e-05, |
| "loss": 0.4561, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.5845718329794763, |
| "grad_norm": 0.30748628264223893, |
| "learning_rate": 4.475341028331584e-05, |
| "loss": 0.4196, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.5859872611464968, |
| "grad_norm": 0.3668354913960175, |
| "learning_rate": 4.472717733473243e-05, |
| "loss": 0.4593, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.5874026893135174, |
| "grad_norm": 0.3176035120916479, |
| "learning_rate": 4.470094438614901e-05, |
| "loss": 0.463, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.5888181174805379, |
| "grad_norm": 0.3330738765417286, |
| "learning_rate": 4.4674711437565583e-05, |
| "loss": 0.4385, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.5902335456475584, |
| "grad_norm": 0.2893855800235713, |
| "learning_rate": 4.4648478488982164e-05, |
| "loss": 0.4641, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.5916489738145789, |
| "grad_norm": 0.31639363837672396, |
| "learning_rate": 4.4622245540398744e-05, |
| "loss": 0.4403, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.5930644019815995, |
| "grad_norm": 0.22355317350787565, |
| "learning_rate": 4.4596012591815324e-05, |
| "loss": 0.4404, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.5944798301486199, |
| "grad_norm": 0.3576283924175163, |
| "learning_rate": 4.4569779643231905e-05, |
| "loss": 0.4644, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.5958952583156405, |
| "grad_norm": 0.25816574164368156, |
| "learning_rate": 4.454354669464848e-05, |
| "loss": 0.4558, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.5973106864826611, |
| "grad_norm": 0.33002317077220583, |
| "learning_rate": 4.451731374606506e-05, |
| "loss": 0.4704, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.5987261146496815, |
| "grad_norm": 0.2493116914048726, |
| "learning_rate": 4.449108079748164e-05, |
| "loss": 0.4247, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.6001415428167021, |
| "grad_norm": 0.26699909660092275, |
| "learning_rate": 4.446484784889822e-05, |
| "loss": 0.4252, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.6015569709837226, |
| "grad_norm": 0.32848504997824807, |
| "learning_rate": 4.443861490031479e-05, |
| "loss": 0.4343, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.6029723991507431, |
| "grad_norm": 0.25097142910919984, |
| "learning_rate": 4.441238195173137e-05, |
| "loss": 0.4683, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.6043878273177636, |
| "grad_norm": 0.2959450784149391, |
| "learning_rate": 4.438614900314795e-05, |
| "loss": 0.4701, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.6058032554847842, |
| "grad_norm": 0.3311493732639423, |
| "learning_rate": 4.435991605456454e-05, |
| "loss": 0.4587, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.6072186836518046, |
| "grad_norm": 0.3043125758832461, |
| "learning_rate": 4.4333683105981114e-05, |
| "loss": 0.4576, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.6086341118188252, |
| "grad_norm": 0.3040835362722505, |
| "learning_rate": 4.4307450157397694e-05, |
| "loss": 0.4594, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.6100495399858458, |
| "grad_norm": 0.30376844362999295, |
| "learning_rate": 4.4281217208814274e-05, |
| "loss": 0.4303, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.6114649681528662, |
| "grad_norm": 0.36277328985368, |
| "learning_rate": 4.4254984260230854e-05, |
| "loss": 0.453, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.6128803963198868, |
| "grad_norm": 0.3396860758261206, |
| "learning_rate": 4.4228751311647435e-05, |
| "loss": 0.4613, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.6142958244869073, |
| "grad_norm": 0.32397361588158885, |
| "learning_rate": 4.420251836306401e-05, |
| "loss": 0.4392, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.6157112526539278, |
| "grad_norm": 0.29303044508092985, |
| "learning_rate": 4.417628541448059e-05, |
| "loss": 0.4479, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.6171266808209483, |
| "grad_norm": 0.2945470593351959, |
| "learning_rate": 4.415005246589717e-05, |
| "loss": 0.4577, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.6185421089879689, |
| "grad_norm": 0.3092964622633318, |
| "learning_rate": 4.412381951731375e-05, |
| "loss": 0.4518, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.6199575371549894, |
| "grad_norm": 0.28334203463423846, |
| "learning_rate": 4.409758656873032e-05, |
| "loss": 0.4657, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.6213729653220099, |
| "grad_norm": 0.2877604209121691, |
| "learning_rate": 4.40713536201469e-05, |
| "loss": 0.4362, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.6227883934890305, |
| "grad_norm": 0.2740454084168051, |
| "learning_rate": 4.404512067156348e-05, |
| "loss": 0.4485, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.6242038216560509, |
| "grad_norm": 0.2877926081207299, |
| "learning_rate": 4.401888772298007e-05, |
| "loss": 0.4458, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.6256192498230715, |
| "grad_norm": 0.31627814411582056, |
| "learning_rate": 4.3992654774396644e-05, |
| "loss": 0.4283, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.627034677990092, |
| "grad_norm": 0.25505333075242553, |
| "learning_rate": 4.3966421825813224e-05, |
| "loss": 0.4389, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.6284501061571125, |
| "grad_norm": 0.2735518374145452, |
| "learning_rate": 4.3940188877229804e-05, |
| "loss": 0.4331, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.6298655343241331, |
| "grad_norm": 0.29238160109535305, |
| "learning_rate": 4.3913955928646385e-05, |
| "loss": 0.4472, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.6312809624911536, |
| "grad_norm": 0.24821045828243918, |
| "learning_rate": 4.3887722980062965e-05, |
| "loss": 0.4608, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.6326963906581741, |
| "grad_norm": 0.2516783612250606, |
| "learning_rate": 4.386149003147954e-05, |
| "loss": 0.4443, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.6341118188251946, |
| "grad_norm": 0.2729001615032857, |
| "learning_rate": 4.383525708289612e-05, |
| "loss": 0.4439, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.6355272469922152, |
| "grad_norm": 0.26562397446134683, |
| "learning_rate": 4.38090241343127e-05, |
| "loss": 0.4398, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.6369426751592356, |
| "grad_norm": 0.27848528470733824, |
| "learning_rate": 4.378279118572928e-05, |
| "loss": 0.4725, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.6383581033262562, |
| "grad_norm": 0.25076284762855006, |
| "learning_rate": 4.375655823714585e-05, |
| "loss": 0.4413, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.6397735314932768, |
| "grad_norm": 0.2787634519406765, |
| "learning_rate": 4.373032528856243e-05, |
| "loss": 0.4446, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.6411889596602972, |
| "grad_norm": 0.25957429540280813, |
| "learning_rate": 4.370409233997901e-05, |
| "loss": 0.4247, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.6426043878273178, |
| "grad_norm": 0.29047238021603844, |
| "learning_rate": 4.3677859391395594e-05, |
| "loss": 0.4516, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.6440198159943383, |
| "grad_norm": 0.27998705499552107, |
| "learning_rate": 4.3651626442812174e-05, |
| "loss": 0.4467, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.6454352441613588, |
| "grad_norm": 0.29272174897988407, |
| "learning_rate": 4.3625393494228754e-05, |
| "loss": 0.4516, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.6468506723283793, |
| "grad_norm": 0.27658532554543785, |
| "learning_rate": 4.3599160545645335e-05, |
| "loss": 0.4542, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.6482661004953999, |
| "grad_norm": 0.2710425573124413, |
| "learning_rate": 4.3572927597061915e-05, |
| "loss": 0.4357, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.6496815286624203, |
| "grad_norm": 0.2725022900196419, |
| "learning_rate": 4.3546694648478495e-05, |
| "loss": 0.4422, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.6510969568294409, |
| "grad_norm": 0.2742452505894262, |
| "learning_rate": 4.352046169989507e-05, |
| "loss": 0.4539, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.6525123849964615, |
| "grad_norm": 0.2587144103469113, |
| "learning_rate": 4.349422875131165e-05, |
| "loss": 0.4484, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.6539278131634819, |
| "grad_norm": 0.22826321118851936, |
| "learning_rate": 4.346799580272823e-05, |
| "loss": 0.4575, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.6553432413305025, |
| "grad_norm": 0.32262911700907015, |
| "learning_rate": 4.344176285414481e-05, |
| "loss": 0.462, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.656758669497523, |
| "grad_norm": 0.26791948806923965, |
| "learning_rate": 4.341552990556138e-05, |
| "loss": 0.4463, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.6581740976645435, |
| "grad_norm": 0.27271173640188784, |
| "learning_rate": 4.338929695697796e-05, |
| "loss": 0.4297, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.659589525831564, |
| "grad_norm": 0.285366983143651, |
| "learning_rate": 4.3363064008394544e-05, |
| "loss": 0.4367, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.6610049539985846, |
| "grad_norm": 0.2533623048370864, |
| "learning_rate": 4.3336831059811124e-05, |
| "loss": 0.4454, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.6624203821656051, |
| "grad_norm": 0.26610790144962626, |
| "learning_rate": 4.3310598111227704e-05, |
| "loss": 0.4406, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.6638358103326256, |
| "grad_norm": 0.27282829565311356, |
| "learning_rate": 4.3284365162644284e-05, |
| "loss": 0.425, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.6652512384996462, |
| "grad_norm": 0.33805278799552196, |
| "learning_rate": 4.3258132214060865e-05, |
| "loss": 0.4392, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.2924744394522499, |
| "learning_rate": 4.3231899265477445e-05, |
| "loss": 0.4459, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.6680820948336872, |
| "grad_norm": 0.27584231458857017, |
| "learning_rate": 4.3205666316894025e-05, |
| "loss": 0.4394, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.6694975230007078, |
| "grad_norm": 0.32991663219597556, |
| "learning_rate": 4.31794333683106e-05, |
| "loss": 0.4399, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.6709129511677282, |
| "grad_norm": 0.3003211945829358, |
| "learning_rate": 4.315320041972718e-05, |
| "loss": 0.4436, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.6723283793347488, |
| "grad_norm": 0.31229550572985726, |
| "learning_rate": 4.312696747114376e-05, |
| "loss": 0.438, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.6737438075017693, |
| "grad_norm": 0.347527441930185, |
| "learning_rate": 4.310073452256034e-05, |
| "loss": 0.4784, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.6751592356687898, |
| "grad_norm": 0.3532649194342591, |
| "learning_rate": 4.307450157397691e-05, |
| "loss": 0.462, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.6765746638358103, |
| "grad_norm": 0.3440426012872478, |
| "learning_rate": 4.304826862539349e-05, |
| "loss": 0.4693, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.6779900920028309, |
| "grad_norm": 0.2771881635662196, |
| "learning_rate": 4.3022035676810074e-05, |
| "loss": 0.4535, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.6794055201698513, |
| "grad_norm": 0.33367051540770565, |
| "learning_rate": 4.2995802728226654e-05, |
| "loss": 0.4268, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.6808209483368719, |
| "grad_norm": 0.3579079746745951, |
| "learning_rate": 4.2969569779643234e-05, |
| "loss": 0.4502, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.6822363765038925, |
| "grad_norm": 0.26239819868050757, |
| "learning_rate": 4.294333683105981e-05, |
| "loss": 0.4232, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.6836518046709129, |
| "grad_norm": 0.4129537053142466, |
| "learning_rate": 4.2917103882476395e-05, |
| "loss": 0.452, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.6850672328379335, |
| "grad_norm": 0.26267539356220626, |
| "learning_rate": 4.2890870933892975e-05, |
| "loss": 0.4483, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.686482661004954, |
| "grad_norm": 0.3353381200094175, |
| "learning_rate": 4.2864637985309555e-05, |
| "loss": 0.4377, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.6878980891719745, |
| "grad_norm": 0.34902186848107175, |
| "learning_rate": 4.283840503672613e-05, |
| "loss": 0.4651, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.689313517338995, |
| "grad_norm": 0.31170345792330595, |
| "learning_rate": 4.281217208814271e-05, |
| "loss": 0.4458, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.6907289455060156, |
| "grad_norm": 0.3312254997595215, |
| "learning_rate": 4.278593913955929e-05, |
| "loss": 0.4551, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.692144373673036, |
| "grad_norm": 0.25957333633861795, |
| "learning_rate": 4.275970619097587e-05, |
| "loss": 0.4422, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.6935598018400566, |
| "grad_norm": 0.3555889031083105, |
| "learning_rate": 4.273347324239245e-05, |
| "loss": 0.4441, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.6949752300070772, |
| "grad_norm": 0.2807205711336829, |
| "learning_rate": 4.2707240293809024e-05, |
| "loss": 0.4608, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.6963906581740976, |
| "grad_norm": 0.26805299842014746, |
| "learning_rate": 4.2681007345225604e-05, |
| "loss": 0.459, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.6978060863411182, |
| "grad_norm": 0.29511422434574813, |
| "learning_rate": 4.2654774396642184e-05, |
| "loss": 0.4341, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.6992215145081387, |
| "grad_norm": 0.2908216275567921, |
| "learning_rate": 4.2628541448058764e-05, |
| "loss": 0.4426, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.7006369426751592, |
| "grad_norm": 0.2815692706701559, |
| "learning_rate": 4.260230849947534e-05, |
| "loss": 0.4804, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.7020523708421798, |
| "grad_norm": 0.2734488463505051, |
| "learning_rate": 4.257607555089192e-05, |
| "loss": 0.4264, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.7034677990092003, |
| "grad_norm": 0.26114056612679676, |
| "learning_rate": 4.2549842602308505e-05, |
| "loss": 0.4479, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.7048832271762208, |
| "grad_norm": 0.2985639437723813, |
| "learning_rate": 4.2523609653725086e-05, |
| "loss": 0.4454, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.7062986553432413, |
| "grad_norm": 1.5976620033335418, |
| "learning_rate": 4.249737670514166e-05, |
| "loss": 0.4326, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.7077140835102619, |
| "grad_norm": 0.32874439874375233, |
| "learning_rate": 4.247114375655824e-05, |
| "loss": 0.4279, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.7091295116772823, |
| "grad_norm": 0.26165102344785035, |
| "learning_rate": 4.244491080797482e-05, |
| "loss": 0.43, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.7105449398443029, |
| "grad_norm": 0.3553343056755559, |
| "learning_rate": 4.24186778593914e-05, |
| "loss": 0.4367, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.7119603680113235, |
| "grad_norm": 0.2483556034336584, |
| "learning_rate": 4.239244491080798e-05, |
| "loss": 0.4404, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.7133757961783439, |
| "grad_norm": 0.3278205650791823, |
| "learning_rate": 4.2366211962224554e-05, |
| "loss": 0.4611, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.7147912243453645, |
| "grad_norm": 0.2690766337181193, |
| "learning_rate": 4.2339979013641134e-05, |
| "loss": 0.4498, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.716206652512385, |
| "grad_norm": 0.28512054363268413, |
| "learning_rate": 4.2313746065057714e-05, |
| "loss": 0.4379, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.7176220806794055, |
| "grad_norm": 0.3063504883919276, |
| "learning_rate": 4.2287513116474295e-05, |
| "loss": 0.4403, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.719037508846426, |
| "grad_norm": 0.23758970561440687, |
| "learning_rate": 4.226128016789087e-05, |
| "loss": 0.4283, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.7204529370134466, |
| "grad_norm": 0.27749982064530393, |
| "learning_rate": 4.223504721930745e-05, |
| "loss": 0.4632, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.721868365180467, |
| "grad_norm": 0.2662941743049646, |
| "learning_rate": 4.2208814270724035e-05, |
| "loss": 0.4357, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.7232837933474876, |
| "grad_norm": 0.2771224811633922, |
| "learning_rate": 4.2182581322140616e-05, |
| "loss": 0.4626, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.7246992215145082, |
| "grad_norm": 0.25271868649341617, |
| "learning_rate": 4.215634837355719e-05, |
| "loss": 0.4311, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.7261146496815286, |
| "grad_norm": 0.2889571898292604, |
| "learning_rate": 4.213011542497377e-05, |
| "loss": 0.4617, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.7275300778485492, |
| "grad_norm": 0.26364411492956125, |
| "learning_rate": 4.210388247639035e-05, |
| "loss": 0.4415, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.7289455060155697, |
| "grad_norm": 0.2931666870844134, |
| "learning_rate": 4.207764952780693e-05, |
| "loss": 0.4502, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.7303609341825902, |
| "grad_norm": 0.258645009906452, |
| "learning_rate": 4.205141657922351e-05, |
| "loss": 0.4497, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.7317763623496107, |
| "grad_norm": 0.24592005572202322, |
| "learning_rate": 4.2025183630640084e-05, |
| "loss": 0.4173, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.7331917905166313, |
| "grad_norm": 0.2917455139254434, |
| "learning_rate": 4.1998950682056664e-05, |
| "loss": 0.4273, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.7346072186836518, |
| "grad_norm": 0.26150067898128604, |
| "learning_rate": 4.1972717733473244e-05, |
| "loss": 0.4572, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.7360226468506723, |
| "grad_norm": 0.3061571725238614, |
| "learning_rate": 4.1946484784889825e-05, |
| "loss": 0.4417, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.7374380750176929, |
| "grad_norm": 0.2889083585606234, |
| "learning_rate": 4.19202518363064e-05, |
| "loss": 0.456, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.7388535031847133, |
| "grad_norm": 0.24657878589038265, |
| "learning_rate": 4.189401888772298e-05, |
| "loss": 0.4351, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.7402689313517339, |
| "grad_norm": 0.2961009417266799, |
| "learning_rate": 4.186778593913956e-05, |
| "loss": 0.4245, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.7416843595187544, |
| "grad_norm": 0.29000447963640213, |
| "learning_rate": 4.1841552990556146e-05, |
| "loss": 0.4235, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.7430997876857749, |
| "grad_norm": 0.24072534315865696, |
| "learning_rate": 4.181532004197272e-05, |
| "loss": 0.4555, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.7445152158527955, |
| "grad_norm": 0.311490815899024, |
| "learning_rate": 4.17890870933893e-05, |
| "loss": 0.4392, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.745930644019816, |
| "grad_norm": 0.25115626974902694, |
| "learning_rate": 4.176285414480588e-05, |
| "loss": 0.4452, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.7473460721868365, |
| "grad_norm": 0.25670617969187803, |
| "learning_rate": 4.173662119622246e-05, |
| "loss": 0.4559, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.748761500353857, |
| "grad_norm": 0.2562352740508127, |
| "learning_rate": 4.171038824763904e-05, |
| "loss": 0.4509, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.7501769285208776, |
| "grad_norm": 0.26465908253857906, |
| "learning_rate": 4.1684155299055614e-05, |
| "loss": 0.4407, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.7515923566878981, |
| "grad_norm": 0.2558921043589055, |
| "learning_rate": 4.1657922350472194e-05, |
| "loss": 0.4562, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.7530077848549186, |
| "grad_norm": 0.2556217189091007, |
| "learning_rate": 4.1631689401888775e-05, |
| "loss": 0.4524, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.7544232130219392, |
| "grad_norm": 0.2768146188546652, |
| "learning_rate": 4.1605456453305355e-05, |
| "loss": 0.433, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.7558386411889597, |
| "grad_norm": 0.25924426442650855, |
| "learning_rate": 4.157922350472193e-05, |
| "loss": 0.4288, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.7572540693559802, |
| "grad_norm": 0.3028465063884449, |
| "learning_rate": 4.155299055613851e-05, |
| "loss": 0.4655, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.7586694975230007, |
| "grad_norm": 0.28990196149589315, |
| "learning_rate": 4.152675760755509e-05, |
| "loss": 0.4438, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.7600849256900213, |
| "grad_norm": 0.29965783174238914, |
| "learning_rate": 4.150052465897167e-05, |
| "loss": 0.4321, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.7615003538570417, |
| "grad_norm": 0.2704665797850079, |
| "learning_rate": 4.147429171038825e-05, |
| "loss": 0.442, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.7629157820240623, |
| "grad_norm": 0.2846292282729222, |
| "learning_rate": 4.144805876180483e-05, |
| "loss": 0.4458, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.7643312101910829, |
| "grad_norm": 0.3351484846406856, |
| "learning_rate": 4.142182581322141e-05, |
| "loss": 0.4483, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.7657466383581033, |
| "grad_norm": 0.23511671534196746, |
| "learning_rate": 4.139559286463799e-05, |
| "loss": 0.4172, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.7671620665251239, |
| "grad_norm": 0.29165081709888513, |
| "learning_rate": 4.136935991605457e-05, |
| "loss": 0.4479, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.7685774946921444, |
| "grad_norm": 0.26675297246825325, |
| "learning_rate": 4.1343126967471144e-05, |
| "loss": 0.4436, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.7699929228591649, |
| "grad_norm": 0.2543439669312433, |
| "learning_rate": 4.1316894018887724e-05, |
| "loss": 0.4325, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.7714083510261854, |
| "grad_norm": 0.2647881854880466, |
| "learning_rate": 4.1290661070304305e-05, |
| "loss": 0.4334, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.772823779193206, |
| "grad_norm": 0.2659462907807334, |
| "learning_rate": 4.1264428121720885e-05, |
| "loss": 0.4373, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.7742392073602264, |
| "grad_norm": 0.31454427433265036, |
| "learning_rate": 4.123819517313746e-05, |
| "loss": 0.4356, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.775654635527247, |
| "grad_norm": 0.23037668235704842, |
| "learning_rate": 4.121196222455404e-05, |
| "loss": 0.4223, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.7770700636942676, |
| "grad_norm": 0.3038793619159252, |
| "learning_rate": 4.118572927597062e-05, |
| "loss": 0.4354, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.778485491861288, |
| "grad_norm": 0.2503175095869354, |
| "learning_rate": 4.11594963273872e-05, |
| "loss": 0.4248, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.7799009200283086, |
| "grad_norm": 0.23469319529918808, |
| "learning_rate": 4.113326337880378e-05, |
| "loss": 0.4359, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.7813163481953291, |
| "grad_norm": 0.3377368475859871, |
| "learning_rate": 4.110703043022036e-05, |
| "loss": 0.4602, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.7827317763623496, |
| "grad_norm": 0.26622970488718106, |
| "learning_rate": 4.108079748163694e-05, |
| "loss": 0.445, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.7841472045293701, |
| "grad_norm": 0.284056764258355, |
| "learning_rate": 4.105456453305352e-05, |
| "loss": 0.4293, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.7855626326963907, |
| "grad_norm": 0.2768862589880146, |
| "learning_rate": 4.10283315844701e-05, |
| "loss": 0.4501, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.7869780608634112, |
| "grad_norm": 0.24905867903454612, |
| "learning_rate": 4.1002098635886674e-05, |
| "loss": 0.4329, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.7883934890304317, |
| "grad_norm": 0.24635328000816292, |
| "learning_rate": 4.0975865687303255e-05, |
| "loss": 0.4364, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.7898089171974523, |
| "grad_norm": 0.2746854679948567, |
| "learning_rate": 4.0949632738719835e-05, |
| "loss": 0.4619, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.7912243453644727, |
| "grad_norm": 0.23544898533832812, |
| "learning_rate": 4.0923399790136415e-05, |
| "loss": 0.4235, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.7926397735314933, |
| "grad_norm": 0.2893309340044802, |
| "learning_rate": 4.089716684155299e-05, |
| "loss": 0.4686, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.7940552016985138, |
| "grad_norm": 0.25713408416266553, |
| "learning_rate": 4.087093389296957e-05, |
| "loss": 0.4613, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.7954706298655343, |
| "grad_norm": 0.283343162310229, |
| "learning_rate": 4.084470094438615e-05, |
| "loss": 0.4652, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.7968860580325549, |
| "grad_norm": 0.24124503356161994, |
| "learning_rate": 4.081846799580273e-05, |
| "loss": 0.4403, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.7983014861995754, |
| "grad_norm": 0.2684086698707159, |
| "learning_rate": 4.079223504721931e-05, |
| "loss": 0.4398, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.7997169143665959, |
| "grad_norm": 0.2467316807541914, |
| "learning_rate": 4.076600209863588e-05, |
| "loss": 0.4345, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.8011323425336164, |
| "grad_norm": 0.24452592123968678, |
| "learning_rate": 4.073976915005247e-05, |
| "loss": 0.4254, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.802547770700637, |
| "grad_norm": 0.2487243164721359, |
| "learning_rate": 4.071353620146905e-05, |
| "loss": 0.4287, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.8039631988676574, |
| "grad_norm": 0.24641304492332408, |
| "learning_rate": 4.068730325288563e-05, |
| "loss": 0.4407, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.805378627034678, |
| "grad_norm": 0.25730898378645617, |
| "learning_rate": 4.0661070304302204e-05, |
| "loss": 0.4455, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.8067940552016986, |
| "grad_norm": 0.26003093220287893, |
| "learning_rate": 4.0634837355718785e-05, |
| "loss": 0.4304, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.808209483368719, |
| "grad_norm": 0.2589801413670949, |
| "learning_rate": 4.0608604407135365e-05, |
| "loss": 0.4289, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.8096249115357396, |
| "grad_norm": 0.2314003574658966, |
| "learning_rate": 4.0582371458551945e-05, |
| "loss": 0.4323, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.8110403397027601, |
| "grad_norm": 0.25102135230481415, |
| "learning_rate": 4.055613850996852e-05, |
| "loss": 0.4302, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.8124557678697806, |
| "grad_norm": 0.23891828131337306, |
| "learning_rate": 4.05299055613851e-05, |
| "loss": 0.4266, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.8138711960368011, |
| "grad_norm": 0.2515143782722983, |
| "learning_rate": 4.050367261280168e-05, |
| "loss": 0.4401, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.8152866242038217, |
| "grad_norm": 0.30978202190326437, |
| "learning_rate": 4.047743966421826e-05, |
| "loss": 0.4196, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.8167020523708421, |
| "grad_norm": 0.2596078416967971, |
| "learning_rate": 4.045120671563484e-05, |
| "loss": 0.4193, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.8181174805378627, |
| "grad_norm": 0.2648377726503376, |
| "learning_rate": 4.0424973767051413e-05, |
| "loss": 0.4334, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.8195329087048833, |
| "grad_norm": 0.3033956340660034, |
| "learning_rate": 4.0398740818467994e-05, |
| "loss": 0.4491, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.8209483368719037, |
| "grad_norm": 0.23790213249393122, |
| "learning_rate": 4.037250786988458e-05, |
| "loss": 0.4156, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.8223637650389243, |
| "grad_norm": 0.2704426474419847, |
| "learning_rate": 4.034627492130116e-05, |
| "loss": 0.4409, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.8237791932059448, |
| "grad_norm": 0.2525590290369702, |
| "learning_rate": 4.0320041972717735e-05, |
| "loss": 0.4552, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.8251946213729653, |
| "grad_norm": 0.2820030156719851, |
| "learning_rate": 4.0293809024134315e-05, |
| "loss": 0.4334, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.8266100495399858, |
| "grad_norm": 0.27633787683248595, |
| "learning_rate": 4.0267576075550895e-05, |
| "loss": 0.4287, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.8280254777070064, |
| "grad_norm": 0.2591953413517348, |
| "learning_rate": 4.0241343126967475e-05, |
| "loss": 0.4416, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.8294409058740269, |
| "grad_norm": 0.2575631752650505, |
| "learning_rate": 4.021511017838405e-05, |
| "loss": 0.4427, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.8308563340410474, |
| "grad_norm": 0.233283484588631, |
| "learning_rate": 4.018887722980063e-05, |
| "loss": 0.4354, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.832271762208068, |
| "grad_norm": 0.25412762816285034, |
| "learning_rate": 4.016264428121721e-05, |
| "loss": 0.4349, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.8336871903750884, |
| "grad_norm": 0.25097603215877484, |
| "learning_rate": 4.013641133263379e-05, |
| "loss": 0.4576, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.835102618542109, |
| "grad_norm": 0.2668490361059998, |
| "learning_rate": 4.011017838405037e-05, |
| "loss": 0.4166, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.8365180467091295, |
| "grad_norm": 0.2615497111859223, |
| "learning_rate": 4.0083945435466944e-05, |
| "loss": 0.4461, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.83793347487615, |
| "grad_norm": 0.2626945943986861, |
| "learning_rate": 4.0057712486883524e-05, |
| "loss": 0.4296, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.8393489030431706, |
| "grad_norm": 0.24275266812210086, |
| "learning_rate": 4.003147953830011e-05, |
| "loss": 0.4191, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.8407643312101911, |
| "grad_norm": 0.2580872822583186, |
| "learning_rate": 4.000524658971669e-05, |
| "loss": 0.4176, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.8421797593772116, |
| "grad_norm": 0.21605702070130375, |
| "learning_rate": 3.9979013641133265e-05, |
| "loss": 0.4379, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.8435951875442321, |
| "grad_norm": 0.2451930026518869, |
| "learning_rate": 3.9952780692549845e-05, |
| "loss": 0.4306, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.8450106157112527, |
| "grad_norm": 0.22624512326095705, |
| "learning_rate": 3.9926547743966425e-05, |
| "loss": 0.4167, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.8464260438782731, |
| "grad_norm": 0.26298017826623193, |
| "learning_rate": 3.9900314795383006e-05, |
| "loss": 0.4421, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.8478414720452937, |
| "grad_norm": 0.2565705411358314, |
| "learning_rate": 3.987408184679958e-05, |
| "loss": 0.4543, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.8492569002123143, |
| "grad_norm": 0.2836966769986463, |
| "learning_rate": 3.984784889821616e-05, |
| "loss": 0.449, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.8506723283793347, |
| "grad_norm": 0.33121523517986445, |
| "learning_rate": 3.982161594963274e-05, |
| "loss": 0.4524, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.8520877565463553, |
| "grad_norm": 0.2728582746620306, |
| "learning_rate": 3.979538300104932e-05, |
| "loss": 0.4296, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.8535031847133758, |
| "grad_norm": 0.24504881169831788, |
| "learning_rate": 3.97691500524659e-05, |
| "loss": 0.4308, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.8549186128803963, |
| "grad_norm": 0.2556664330044345, |
| "learning_rate": 3.9742917103882474e-05, |
| "loss": 0.4419, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.8563340410474168, |
| "grad_norm": 0.249942745144725, |
| "learning_rate": 3.9716684155299054e-05, |
| "loss": 0.4316, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.8577494692144374, |
| "grad_norm": 0.25277013855046737, |
| "learning_rate": 3.9690451206715634e-05, |
| "loss": 0.4177, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.8591648973814578, |
| "grad_norm": 0.27490926708863284, |
| "learning_rate": 3.966421825813222e-05, |
| "loss": 0.4415, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.8605803255484784, |
| "grad_norm": 0.25363680318434917, |
| "learning_rate": 3.9637985309548795e-05, |
| "loss": 0.4435, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.861995753715499, |
| "grad_norm": 0.24172325545678805, |
| "learning_rate": 3.9611752360965375e-05, |
| "loss": 0.4441, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.8634111818825194, |
| "grad_norm": 0.24821946310064713, |
| "learning_rate": 3.9585519412381956e-05, |
| "loss": 0.4262, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.86482661004954, |
| "grad_norm": 0.2537246262286817, |
| "learning_rate": 3.9559286463798536e-05, |
| "loss": 0.4523, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.8662420382165605, |
| "grad_norm": 0.25925970718854424, |
| "learning_rate": 3.953305351521511e-05, |
| "loss": 0.4515, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.867657466383581, |
| "grad_norm": 0.23455080583476018, |
| "learning_rate": 3.950682056663169e-05, |
| "loss": 0.4141, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.8690728945506015, |
| "grad_norm": 0.265557541743459, |
| "learning_rate": 3.948058761804827e-05, |
| "loss": 0.4524, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.8704883227176221, |
| "grad_norm": 0.23864799283771135, |
| "learning_rate": 3.945435466946485e-05, |
| "loss": 0.4288, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.8719037508846426, |
| "grad_norm": 0.2637664502630157, |
| "learning_rate": 3.942812172088143e-05, |
| "loss": 0.4677, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.8733191790516631, |
| "grad_norm": 0.22574671608165228, |
| "learning_rate": 3.9401888772298004e-05, |
| "loss": 0.4587, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.8747346072186837, |
| "grad_norm": 0.26419008526198395, |
| "learning_rate": 3.9375655823714584e-05, |
| "loss": 0.4483, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.8761500353857041, |
| "grad_norm": 0.24585359511846885, |
| "learning_rate": 3.9349422875131165e-05, |
| "loss": 0.4284, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.8775654635527247, |
| "grad_norm": 0.23905515655418283, |
| "learning_rate": 3.9323189926547745e-05, |
| "loss": 0.4312, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.8789808917197452, |
| "grad_norm": 0.2728477728005644, |
| "learning_rate": 3.9296956977964325e-05, |
| "loss": 0.4227, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.8803963198867657, |
| "grad_norm": 0.23520624771873033, |
| "learning_rate": 3.9270724029380905e-05, |
| "loss": 0.4326, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.8818117480537863, |
| "grad_norm": 0.26074161845119215, |
| "learning_rate": 3.9244491080797486e-05, |
| "loss": 0.4234, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.8832271762208068, |
| "grad_norm": 0.28690755639338716, |
| "learning_rate": 3.9218258132214066e-05, |
| "loss": 0.4135, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.8846426043878273, |
| "grad_norm": 0.2787858417232892, |
| "learning_rate": 3.9192025183630646e-05, |
| "loss": 0.4399, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.8860580325548478, |
| "grad_norm": 0.2650050206775565, |
| "learning_rate": 3.916579223504722e-05, |
| "loss": 0.4313, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.8874734607218684, |
| "grad_norm": 0.28073341233108806, |
| "learning_rate": 3.91395592864638e-05, |
| "loss": 0.4358, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 0.2903436499787625, |
| "learning_rate": 3.911332633788038e-05, |
| "loss": 0.4647, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.8903043170559094, |
| "grad_norm": 0.25408490569862163, |
| "learning_rate": 3.908709338929696e-05, |
| "loss": 0.4158, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.89171974522293, |
| "grad_norm": 0.2954359201711501, |
| "learning_rate": 3.9060860440713534e-05, |
| "loss": 0.4211, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.8931351733899504, |
| "grad_norm": 0.2560466892940268, |
| "learning_rate": 3.9034627492130114e-05, |
| "loss": 0.4352, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.894550601556971, |
| "grad_norm": 0.2695807117497625, |
| "learning_rate": 3.9008394543546695e-05, |
| "loss": 0.4399, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.8959660297239915, |
| "grad_norm": 0.25329687351083074, |
| "learning_rate": 3.8982161594963275e-05, |
| "loss": 0.4069, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.897381457891012, |
| "grad_norm": 0.2636093344080135, |
| "learning_rate": 3.8955928646379855e-05, |
| "loss": 0.4561, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.8987968860580325, |
| "grad_norm": 0.2827350978156938, |
| "learning_rate": 3.8929695697796436e-05, |
| "loss": 0.4432, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.9002123142250531, |
| "grad_norm": 0.22168217202680376, |
| "learning_rate": 3.8903462749213016e-05, |
| "loss": 0.4257, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.9016277423920736, |
| "grad_norm": 0.2738450503933235, |
| "learning_rate": 3.8877229800629596e-05, |
| "loss": 0.4265, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.9030431705590941, |
| "grad_norm": 0.24444045291322025, |
| "learning_rate": 3.8850996852046176e-05, |
| "loss": 0.4088, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.9044585987261147, |
| "grad_norm": 0.26343060033893795, |
| "learning_rate": 3.882476390346275e-05, |
| "loss": 0.4331, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.9058740268931351, |
| "grad_norm": 0.23493361383383915, |
| "learning_rate": 3.879853095487933e-05, |
| "loss": 0.4291, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.9072894550601557, |
| "grad_norm": 0.22921948270380707, |
| "learning_rate": 3.877229800629591e-05, |
| "loss": 0.424, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.9087048832271762, |
| "grad_norm": 0.23124241606530554, |
| "learning_rate": 3.874606505771249e-05, |
| "loss": 0.4268, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.9101203113941967, |
| "grad_norm": 0.22397166748586314, |
| "learning_rate": 3.8719832109129064e-05, |
| "loss": 0.4356, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.9115357395612173, |
| "grad_norm": 0.23526999597742354, |
| "learning_rate": 3.8693599160545645e-05, |
| "loss": 0.4405, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.9129511677282378, |
| "grad_norm": 0.23143080534593588, |
| "learning_rate": 3.8667366211962225e-05, |
| "loss": 0.428, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.9143665958952583, |
| "grad_norm": 0.2398969695399371, |
| "learning_rate": 3.8641133263378805e-05, |
| "loss": 0.413, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.9157820240622788, |
| "grad_norm": 0.265000208973342, |
| "learning_rate": 3.8614900314795385e-05, |
| "loss": 0.4453, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.9171974522292994, |
| "grad_norm": 0.2314365688408304, |
| "learning_rate": 3.858866736621196e-05, |
| "loss": 0.4324, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.9186128803963199, |
| "grad_norm": 0.25430068981454346, |
| "learning_rate": 3.8562434417628546e-05, |
| "loss": 0.4375, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.9200283085633404, |
| "grad_norm": 0.26196086355330445, |
| "learning_rate": 3.8536201469045126e-05, |
| "loss": 0.4449, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.921443736730361, |
| "grad_norm": 0.24954221725629927, |
| "learning_rate": 3.8509968520461707e-05, |
| "loss": 0.4245, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.9228591648973815, |
| "grad_norm": 0.27419111732405277, |
| "learning_rate": 3.848373557187828e-05, |
| "loss": 0.4369, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.924274593064402, |
| "grad_norm": 0.22989973319759824, |
| "learning_rate": 3.845750262329486e-05, |
| "loss": 0.4089, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.9256900212314225, |
| "grad_norm": 0.27756466437594846, |
| "learning_rate": 3.843126967471144e-05, |
| "loss": 0.4175, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.9271054493984431, |
| "grad_norm": 0.23523394186249474, |
| "learning_rate": 3.840503672612802e-05, |
| "loss": 0.4359, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.9285208775654635, |
| "grad_norm": 0.23806322529924434, |
| "learning_rate": 3.8378803777544594e-05, |
| "loss": 0.4304, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.9299363057324841, |
| "grad_norm": 0.2251930698576581, |
| "learning_rate": 3.8352570828961175e-05, |
| "loss": 0.4351, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.9313517338995047, |
| "grad_norm": 0.2336333323067211, |
| "learning_rate": 3.8326337880377755e-05, |
| "loss": 0.4159, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.9327671620665251, |
| "grad_norm": 0.24200797436131163, |
| "learning_rate": 3.8300104931794335e-05, |
| "loss": 0.4309, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.9341825902335457, |
| "grad_norm": 0.23043702492712964, |
| "learning_rate": 3.8273871983210916e-05, |
| "loss": 0.4388, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.9355980184005662, |
| "grad_norm": 0.24789726741995202, |
| "learning_rate": 3.824763903462749e-05, |
| "loss": 0.4313, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.9370134465675867, |
| "grad_norm": 0.22798645877919682, |
| "learning_rate": 3.8221406086044076e-05, |
| "loss": 0.4258, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.9384288747346072, |
| "grad_norm": 0.27442869920495, |
| "learning_rate": 3.8195173137460656e-05, |
| "loss": 0.4223, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.9398443029016278, |
| "grad_norm": 0.22800353019756675, |
| "learning_rate": 3.816894018887724e-05, |
| "loss": 0.444, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.9412597310686482, |
| "grad_norm": 0.2962156144416487, |
| "learning_rate": 3.814270724029381e-05, |
| "loss": 0.4267, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.9426751592356688, |
| "grad_norm": 0.23959166750933444, |
| "learning_rate": 3.811647429171039e-05, |
| "loss": 0.4372, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.9440905874026894, |
| "grad_norm": 0.2455901443670002, |
| "learning_rate": 3.809024134312697e-05, |
| "loss": 0.4252, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.9455060155697098, |
| "grad_norm": 0.3204269037975826, |
| "learning_rate": 3.806400839454355e-05, |
| "loss": 0.4338, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.9469214437367304, |
| "grad_norm": 0.23500461276640994, |
| "learning_rate": 3.8037775445960125e-05, |
| "loss": 0.429, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.9483368719037509, |
| "grad_norm": 0.27072050273435716, |
| "learning_rate": 3.8011542497376705e-05, |
| "loss": 0.4602, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.9497523000707714, |
| "grad_norm": 0.30323135734042067, |
| "learning_rate": 3.7985309548793285e-05, |
| "loss": 0.4425, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.9511677282377919, |
| "grad_norm": 0.2913058978530256, |
| "learning_rate": 3.7959076600209865e-05, |
| "loss": 0.4261, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.9525831564048125, |
| "grad_norm": 0.30067495620914264, |
| "learning_rate": 3.7932843651626446e-05, |
| "loss": 0.4365, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.953998584571833, |
| "grad_norm": 0.3342344094135603, |
| "learning_rate": 3.790661070304302e-05, |
| "loss": 0.409, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.9554140127388535, |
| "grad_norm": 0.2245576022075877, |
| "learning_rate": 3.78803777544596e-05, |
| "loss": 0.4269, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.9568294409058741, |
| "grad_norm": 0.30343794677876584, |
| "learning_rate": 3.7854144805876187e-05, |
| "loss": 0.4483, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.9582448690728945, |
| "grad_norm": 0.3063562804828594, |
| "learning_rate": 3.782791185729277e-05, |
| "loss": 0.4299, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.9596602972399151, |
| "grad_norm": 0.26645005492549423, |
| "learning_rate": 3.780167890870934e-05, |
| "loss": 0.4405, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.9610757254069356, |
| "grad_norm": 0.3061269234294929, |
| "learning_rate": 3.777544596012592e-05, |
| "loss": 0.431, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.9624911535739561, |
| "grad_norm": 0.28465396623067296, |
| "learning_rate": 3.77492130115425e-05, |
| "loss": 0.4476, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.9639065817409767, |
| "grad_norm": 0.27440327110271684, |
| "learning_rate": 3.772298006295908e-05, |
| "loss": 0.4332, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.9653220099079972, |
| "grad_norm": 0.3203946284360972, |
| "learning_rate": 3.7696747114375655e-05, |
| "loss": 0.4054, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.9667374380750177, |
| "grad_norm": 0.2757673656158921, |
| "learning_rate": 3.7670514165792235e-05, |
| "loss": 0.4128, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.9681528662420382, |
| "grad_norm": 0.23866159456452424, |
| "learning_rate": 3.7644281217208815e-05, |
| "loss": 0.4383, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.9695682944090588, |
| "grad_norm": 0.3292539337191727, |
| "learning_rate": 3.7618048268625396e-05, |
| "loss": 0.4325, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.9709837225760792, |
| "grad_norm": 0.26397915273129924, |
| "learning_rate": 3.7591815320041976e-05, |
| "loss": 0.4352, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.9723991507430998, |
| "grad_norm": 0.30004166885793, |
| "learning_rate": 3.756558237145855e-05, |
| "loss": 0.4432, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.9738145789101204, |
| "grad_norm": 0.33248547118937477, |
| "learning_rate": 3.753934942287513e-05, |
| "loss": 0.4097, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.9752300070771408, |
| "grad_norm": 0.27787804232537733, |
| "learning_rate": 3.751311647429171e-05, |
| "loss": 0.4366, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.9766454352441614, |
| "grad_norm": 0.2789774810347891, |
| "learning_rate": 3.74868835257083e-05, |
| "loss": 0.4287, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.9780608634111819, |
| "grad_norm": 0.32553641228835045, |
| "learning_rate": 3.746065057712487e-05, |
| "loss": 0.4479, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.9794762915782024, |
| "grad_norm": 0.2984969942135477, |
| "learning_rate": 3.743441762854145e-05, |
| "loss": 0.4449, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.9808917197452229, |
| "grad_norm": 0.26360136789202876, |
| "learning_rate": 3.740818467995803e-05, |
| "loss": 0.4169, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.9823071479122435, |
| "grad_norm": 0.3002600679766135, |
| "learning_rate": 3.738195173137461e-05, |
| "loss": 0.4327, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.9837225760792639, |
| "grad_norm": 0.26761460969924294, |
| "learning_rate": 3.7355718782791185e-05, |
| "loss": 0.4295, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.9851380042462845, |
| "grad_norm": 0.2774009798673986, |
| "learning_rate": 3.7329485834207765e-05, |
| "loss": 0.4321, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.9865534324133051, |
| "grad_norm": 0.2963916085601546, |
| "learning_rate": 3.7303252885624345e-05, |
| "loss": 0.418, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.9879688605803255, |
| "grad_norm": 0.22876594613173415, |
| "learning_rate": 3.7277019937040926e-05, |
| "loss": 0.4214, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.9893842887473461, |
| "grad_norm": 0.2556840202116241, |
| "learning_rate": 3.7250786988457506e-05, |
| "loss": 0.4431, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.9907997169143666, |
| "grad_norm": 0.2347036488614427, |
| "learning_rate": 3.722455403987408e-05, |
| "loss": 0.4317, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.9922151450813871, |
| "grad_norm": 0.26576919366732943, |
| "learning_rate": 3.719832109129066e-05, |
| "loss": 0.4173, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.9936305732484076, |
| "grad_norm": 0.24149398831154964, |
| "learning_rate": 3.717208814270724e-05, |
| "loss": 0.4498, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.9950460014154282, |
| "grad_norm": 0.24034887998143845, |
| "learning_rate": 3.714585519412382e-05, |
| "loss": 0.4434, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.9964614295824487, |
| "grad_norm": 0.25419984417706315, |
| "learning_rate": 3.71196222455404e-05, |
| "loss": 0.441, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.9978768577494692, |
| "grad_norm": 0.22469194229972944, |
| "learning_rate": 3.709338929695698e-05, |
| "loss": 0.444, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.9992922859164898, |
| "grad_norm": 0.25259247575525834, |
| "learning_rate": 3.706715634837356e-05, |
| "loss": 0.4148, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.25259247575525834, |
| "learning_rate": 3.704092339979014e-05, |
| "loss": 0.4196, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.0014154281670204, |
| "grad_norm": 0.40330885122332444, |
| "learning_rate": 3.7014690451206715e-05, |
| "loss": 0.3866, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.0028308563340411, |
| "grad_norm": 0.25073682692017346, |
| "learning_rate": 3.6988457502623295e-05, |
| "loss": 0.3448, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.0042462845010616, |
| "grad_norm": 0.2853163729929654, |
| "learning_rate": 3.6962224554039876e-05, |
| "loss": 0.3736, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.005661712668082, |
| "grad_norm": 0.27424387603392747, |
| "learning_rate": 3.6935991605456456e-05, |
| "loss": 0.3744, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.0070771408351027, |
| "grad_norm": 0.37706972872591604, |
| "learning_rate": 3.6909758656873036e-05, |
| "loss": 0.392, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.0084925690021231, |
| "grad_norm": 0.26747508609564463, |
| "learning_rate": 3.688352570828961e-05, |
| "loss": 0.375, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.0099079971691436, |
| "grad_norm": 0.2511475510195298, |
| "learning_rate": 3.685729275970619e-05, |
| "loss": 0.3614, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.0113234253361643, |
| "grad_norm": 0.2874709688360536, |
| "learning_rate": 3.683105981112277e-05, |
| "loss": 0.3633, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.0127388535031847, |
| "grad_norm": 0.2664187385028287, |
| "learning_rate": 3.680482686253935e-05, |
| "loss": 0.3637, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.0141542816702052, |
| "grad_norm": 0.2849073293934639, |
| "learning_rate": 3.677859391395593e-05, |
| "loss": 0.3692, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.0155697098372258, |
| "grad_norm": 0.24917420653285635, |
| "learning_rate": 3.675236096537251e-05, |
| "loss": 0.3767, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.0169851380042463, |
| "grad_norm": 0.25923165709703305, |
| "learning_rate": 3.672612801678909e-05, |
| "loss": 0.3672, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.0184005661712667, |
| "grad_norm": 0.24804588645407025, |
| "learning_rate": 3.669989506820567e-05, |
| "loss": 0.385, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.0198159943382874, |
| "grad_norm": 0.27820354930045915, |
| "learning_rate": 3.6673662119622245e-05, |
| "loss": 0.3797, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.0212314225053079, |
| "grad_norm": 0.2571375959428547, |
| "learning_rate": 3.6647429171038825e-05, |
| "loss": 0.371, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.0226468506723283, |
| "grad_norm": 0.22299950541449304, |
| "learning_rate": 3.6621196222455406e-05, |
| "loss": 0.3729, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.024062278839349, |
| "grad_norm": 0.2672635859495513, |
| "learning_rate": 3.6594963273871986e-05, |
| "loss": 0.3673, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.0254777070063694, |
| "grad_norm": 0.2238148343196082, |
| "learning_rate": 3.6568730325288566e-05, |
| "loss": 0.3525, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.0268931351733899, |
| "grad_norm": 0.21943937606585898, |
| "learning_rate": 3.654249737670514e-05, |
| "loss": 0.3448, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.0283085633404105, |
| "grad_norm": 0.2286191309250446, |
| "learning_rate": 3.651626442812172e-05, |
| "loss": 0.3634, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.029723991507431, |
| "grad_norm": 0.21770251056841974, |
| "learning_rate": 3.64900314795383e-05, |
| "loss": 0.3529, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.0311394196744514, |
| "grad_norm": 0.23287135537136047, |
| "learning_rate": 3.646379853095488e-05, |
| "loss": 0.3526, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.0325548478414721, |
| "grad_norm": 0.2770755741062495, |
| "learning_rate": 3.643756558237146e-05, |
| "loss": 0.3834, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.0339702760084926, |
| "grad_norm": 0.24566882013501856, |
| "learning_rate": 3.641133263378804e-05, |
| "loss": 0.3758, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.035385704175513, |
| "grad_norm": 0.24987011426830819, |
| "learning_rate": 3.638509968520462e-05, |
| "loss": 0.3651, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.0368011323425337, |
| "grad_norm": 0.31041065460026135, |
| "learning_rate": 3.63588667366212e-05, |
| "loss": 0.3715, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.0382165605095541, |
| "grad_norm": 0.24177259651765037, |
| "learning_rate": 3.6332633788037775e-05, |
| "loss": 0.3708, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.0396319886765746, |
| "grad_norm": 0.21996306286337883, |
| "learning_rate": 3.6306400839454356e-05, |
| "loss": 0.3765, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.0410474168435953, |
| "grad_norm": 0.25494992550486884, |
| "learning_rate": 3.6280167890870936e-05, |
| "loss": 0.3627, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.0424628450106157, |
| "grad_norm": 0.21505088357504742, |
| "learning_rate": 3.6253934942287516e-05, |
| "loss": 0.3598, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.0438782731776362, |
| "grad_norm": 0.2262185276339128, |
| "learning_rate": 3.6227701993704097e-05, |
| "loss": 0.3526, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.0452937013446568, |
| "grad_norm": 0.21645046957698266, |
| "learning_rate": 3.620146904512067e-05, |
| "loss": 0.3669, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.0467091295116773, |
| "grad_norm": 0.24673938200497808, |
| "learning_rate": 3.617523609653725e-05, |
| "loss": 0.3857, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.0481245576786977, |
| "grad_norm": 0.2553921948350425, |
| "learning_rate": 3.614900314795383e-05, |
| "loss": 0.3694, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.0495399858457184, |
| "grad_norm": 0.24339109836533926, |
| "learning_rate": 3.612277019937041e-05, |
| "loss": 0.3623, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.0509554140127388, |
| "grad_norm": 0.24481636850870547, |
| "learning_rate": 3.609653725078699e-05, |
| "loss": 0.3691, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.0523708421797593, |
| "grad_norm": 0.24077314804690694, |
| "learning_rate": 3.6070304302203565e-05, |
| "loss": 0.3884, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.05378627034678, |
| "grad_norm": 0.26055509725576326, |
| "learning_rate": 3.604407135362015e-05, |
| "loss": 0.3675, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.0552016985138004, |
| "grad_norm": 0.22320689999537213, |
| "learning_rate": 3.601783840503673e-05, |
| "loss": 0.3769, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.0566171266808209, |
| "grad_norm": 0.20575120215372295, |
| "learning_rate": 3.599160545645331e-05, |
| "loss": 0.3589, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.0580325548478415, |
| "grad_norm": 0.2258193270812288, |
| "learning_rate": 3.5965372507869886e-05, |
| "loss": 0.3655, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.059447983014862, |
| "grad_norm": 0.22200653826263894, |
| "learning_rate": 3.5939139559286466e-05, |
| "loss": 0.3663, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.0608634111818824, |
| "grad_norm": 0.23199985002095336, |
| "learning_rate": 3.5912906610703046e-05, |
| "loss": 0.3856, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.062278839348903, |
| "grad_norm": 0.23475624219008442, |
| "learning_rate": 3.588667366211963e-05, |
| "loss": 0.3646, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.0636942675159236, |
| "grad_norm": 0.2363507036132983, |
| "learning_rate": 3.58604407135362e-05, |
| "loss": 0.3475, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.065109695682944, |
| "grad_norm": 0.23554394717989005, |
| "learning_rate": 3.583420776495278e-05, |
| "loss": 0.3621, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.0665251238499647, |
| "grad_norm": 0.25712807151393957, |
| "learning_rate": 3.580797481636936e-05, |
| "loss": 0.3758, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.0679405520169851, |
| "grad_norm": 0.21980062026864217, |
| "learning_rate": 3.578174186778594e-05, |
| "loss": 0.3637, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.0693559801840056, |
| "grad_norm": 0.24587003401940713, |
| "learning_rate": 3.575550891920252e-05, |
| "loss": 0.3495, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.0707714083510262, |
| "grad_norm": 0.24943392215339394, |
| "learning_rate": 3.5729275970619095e-05, |
| "loss": 0.3668, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.0721868365180467, |
| "grad_norm": 0.2817878261350363, |
| "learning_rate": 3.5703043022035675e-05, |
| "loss": 0.3667, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.0736022646850671, |
| "grad_norm": 0.21980883296838102, |
| "learning_rate": 3.567681007345226e-05, |
| "loss": 0.3757, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.0750176928520878, |
| "grad_norm": 0.25569911421845076, |
| "learning_rate": 3.565057712486884e-05, |
| "loss": 0.3617, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.0764331210191083, |
| "grad_norm": 0.2510990438768182, |
| "learning_rate": 3.5624344176285416e-05, |
| "loss": 0.3755, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.0778485491861287, |
| "grad_norm": 0.24448758916499447, |
| "learning_rate": 3.5598111227701996e-05, |
| "loss": 0.3701, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.0792639773531494, |
| "grad_norm": 0.21955348175980843, |
| "learning_rate": 3.5571878279118577e-05, |
| "loss": 0.3614, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.0806794055201698, |
| "grad_norm": 0.2763996762537386, |
| "learning_rate": 3.554564533053516e-05, |
| "loss": 0.3725, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.0820948336871903, |
| "grad_norm": 0.22060232690914258, |
| "learning_rate": 3.551941238195173e-05, |
| "loss": 0.3546, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.083510261854211, |
| "grad_norm": 0.2690127459305308, |
| "learning_rate": 3.549317943336831e-05, |
| "loss": 0.3701, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.0849256900212314, |
| "grad_norm": 0.23378521213147532, |
| "learning_rate": 3.546694648478489e-05, |
| "loss": 0.3553, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.0863411181882519, |
| "grad_norm": 0.293776933751109, |
| "learning_rate": 3.544071353620147e-05, |
| "loss": 0.3867, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.0877565463552725, |
| "grad_norm": 0.2769516330391029, |
| "learning_rate": 3.541448058761805e-05, |
| "loss": 0.3777, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.089171974522293, |
| "grad_norm": 0.2766476933433498, |
| "learning_rate": 3.5388247639034625e-05, |
| "loss": 0.3654, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.0905874026893134, |
| "grad_norm": 0.24123409080089156, |
| "learning_rate": 3.5362014690451205e-05, |
| "loss": 0.3706, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.092002830856334, |
| "grad_norm": 0.24952465460683615, |
| "learning_rate": 3.5335781741867786e-05, |
| "loss": 0.3579, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.0934182590233545, |
| "grad_norm": 0.25575201537604786, |
| "learning_rate": 3.530954879328437e-05, |
| "loss": 0.3673, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.094833687190375, |
| "grad_norm": 0.23603855217803518, |
| "learning_rate": 3.5283315844700946e-05, |
| "loss": 0.3794, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.0962491153573957, |
| "grad_norm": 0.23779241121404207, |
| "learning_rate": 3.5257082896117526e-05, |
| "loss": 0.3864, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.0976645435244161, |
| "grad_norm": 0.25184642467770774, |
| "learning_rate": 3.523084994753411e-05, |
| "loss": 0.3533, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.0990799716914366, |
| "grad_norm": 0.22707567886508429, |
| "learning_rate": 3.520461699895069e-05, |
| "loss": 0.381, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.1004953998584572, |
| "grad_norm": 0.22382456529288564, |
| "learning_rate": 3.517838405036726e-05, |
| "loss": 0.3599, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.1019108280254777, |
| "grad_norm": 0.22643392806140442, |
| "learning_rate": 3.515215110178384e-05, |
| "loss": 0.3549, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.1033262561924981, |
| "grad_norm": 0.249318689583364, |
| "learning_rate": 3.512591815320042e-05, |
| "loss": 0.3767, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.1047416843595188, |
| "grad_norm": 0.22818899513469243, |
| "learning_rate": 3.5099685204617e-05, |
| "loss": 0.3526, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.1061571125265393, |
| "grad_norm": 0.22205582440125177, |
| "learning_rate": 3.507345225603358e-05, |
| "loss": 0.377, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.1075725406935597, |
| "grad_norm": 0.23715401218273183, |
| "learning_rate": 3.5047219307450155e-05, |
| "loss": 0.3631, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.1089879688605804, |
| "grad_norm": 0.24731495141449844, |
| "learning_rate": 3.5020986358866735e-05, |
| "loss": 0.357, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.1104033970276008, |
| "grad_norm": 0.21043244736337724, |
| "learning_rate": 3.4994753410283316e-05, |
| "loss": 0.3758, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.1118188251946213, |
| "grad_norm": 0.22034294434212712, |
| "learning_rate": 3.4968520461699896e-05, |
| "loss": 0.3656, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.113234253361642, |
| "grad_norm": 0.22364850456219934, |
| "learning_rate": 3.4942287513116476e-05, |
| "loss": 0.3573, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.1146496815286624, |
| "grad_norm": 0.2271183339148488, |
| "learning_rate": 3.4916054564533057e-05, |
| "loss": 0.3667, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.1160651096956828, |
| "grad_norm": 0.2395804298830507, |
| "learning_rate": 3.488982161594964e-05, |
| "loss": 0.3815, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.1174805378627035, |
| "grad_norm": 0.2280965467181193, |
| "learning_rate": 3.486358866736622e-05, |
| "loss": 0.3696, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.118895966029724, |
| "grad_norm": 0.27142514033989723, |
| "learning_rate": 3.483735571878279e-05, |
| "loss": 0.3635, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.1203113941967444, |
| "grad_norm": 0.25250123373484423, |
| "learning_rate": 3.481112277019937e-05, |
| "loss": 0.3637, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.121726822363765, |
| "grad_norm": 0.2409488628984419, |
| "learning_rate": 3.478488982161595e-05, |
| "loss": 0.3767, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.1231422505307855, |
| "grad_norm": 0.26703802478917377, |
| "learning_rate": 3.475865687303253e-05, |
| "loss": 0.3581, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.124557678697806, |
| "grad_norm": 0.23190208676783128, |
| "learning_rate": 3.473242392444911e-05, |
| "loss": 0.3379, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.1259731068648267, |
| "grad_norm": 0.26784040366950523, |
| "learning_rate": 3.4706190975865685e-05, |
| "loss": 0.367, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.127388535031847, |
| "grad_norm": 0.2191740845730247, |
| "learning_rate": 3.4679958027282266e-05, |
| "loss": 0.3731, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.1288039631988678, |
| "grad_norm": 0.26068999486444, |
| "learning_rate": 3.4653725078698846e-05, |
| "loss": 0.3631, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.1302193913658882, |
| "grad_norm": 0.2311127862371228, |
| "learning_rate": 3.4627492130115426e-05, |
| "loss": 0.3435, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.1316348195329087, |
| "grad_norm": 0.22156342608749344, |
| "learning_rate": 3.4601259181532006e-05, |
| "loss": 0.3791, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.1330502476999293, |
| "grad_norm": 0.23528767608314413, |
| "learning_rate": 3.457502623294859e-05, |
| "loss": 0.379, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.1344656758669498, |
| "grad_norm": 0.2588075699698271, |
| "learning_rate": 3.454879328436517e-05, |
| "loss": 0.387, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.1358811040339702, |
| "grad_norm": 0.22063634101813656, |
| "learning_rate": 3.452256033578175e-05, |
| "loss": 0.3783, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.137296532200991, |
| "grad_norm": 0.2759546955219502, |
| "learning_rate": 3.449632738719832e-05, |
| "loss": 0.353, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.1387119603680114, |
| "grad_norm": 0.2590375007075574, |
| "learning_rate": 3.44700944386149e-05, |
| "loss": 0.372, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.1401273885350318, |
| "grad_norm": 0.26235020909100687, |
| "learning_rate": 3.444386149003148e-05, |
| "loss": 0.3681, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.1415428167020525, |
| "grad_norm": 0.25990600045468054, |
| "learning_rate": 3.441762854144806e-05, |
| "loss": 0.3734, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.142958244869073, |
| "grad_norm": 0.25421808078588554, |
| "learning_rate": 3.439139559286464e-05, |
| "loss": 0.3643, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.1443736730360934, |
| "grad_norm": 0.25954139055763814, |
| "learning_rate": 3.4365162644281215e-05, |
| "loss": 0.3828, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.145789101203114, |
| "grad_norm": 0.2485386767677047, |
| "learning_rate": 3.4338929695697796e-05, |
| "loss": 0.3768, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.1472045293701345, |
| "grad_norm": 0.22979787035800808, |
| "learning_rate": 3.4312696747114376e-05, |
| "loss": 0.3636, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.148619957537155, |
| "grad_norm": 0.237713998539904, |
| "learning_rate": 3.4286463798530956e-05, |
| "loss": 0.3603, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.1500353857041756, |
| "grad_norm": 0.21183509960801158, |
| "learning_rate": 3.4260230849947537e-05, |
| "loss": 0.366, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.151450813871196, |
| "grad_norm": 0.2232984894405917, |
| "learning_rate": 3.423399790136412e-05, |
| "loss": 0.3552, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.1528662420382165, |
| "grad_norm": 0.23826655047625447, |
| "learning_rate": 3.42077649527807e-05, |
| "loss": 0.3733, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.1542816702052372, |
| "grad_norm": 0.23222605074737757, |
| "learning_rate": 3.418153200419728e-05, |
| "loss": 0.3855, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.1556970983722576, |
| "grad_norm": 0.23808854243134703, |
| "learning_rate": 3.415529905561385e-05, |
| "loss": 0.3759, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.157112526539278, |
| "grad_norm": 0.2682038429151991, |
| "learning_rate": 3.412906610703043e-05, |
| "loss": 0.3891, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.1585279547062988, |
| "grad_norm": 0.25684259873831616, |
| "learning_rate": 3.410283315844701e-05, |
| "loss": 0.3671, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.1599433828733192, |
| "grad_norm": 0.22366707096330654, |
| "learning_rate": 3.407660020986359e-05, |
| "loss": 0.3679, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.1613588110403397, |
| "grad_norm": 0.23320918985975697, |
| "learning_rate": 3.405036726128017e-05, |
| "loss": 0.365, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.1627742392073603, |
| "grad_norm": 0.2560345240813397, |
| "learning_rate": 3.4024134312696746e-05, |
| "loss": 0.3761, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.1641896673743808, |
| "grad_norm": 0.22099107331766332, |
| "learning_rate": 3.3997901364113326e-05, |
| "loss": 0.3511, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.1656050955414012, |
| "grad_norm": 0.23197104371717864, |
| "learning_rate": 3.3971668415529906e-05, |
| "loss": 0.3677, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.167020523708422, |
| "grad_norm": 0.25715111525636897, |
| "learning_rate": 3.3945435466946486e-05, |
| "loss": 0.3597, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.1684359518754424, |
| "grad_norm": 0.22692191767057515, |
| "learning_rate": 3.391920251836307e-05, |
| "loss": 0.3698, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.1698513800424628, |
| "grad_norm": 0.2496074837513205, |
| "learning_rate": 3.389296956977964e-05, |
| "loss": 0.3659, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.1712668082094835, |
| "grad_norm": 0.2502694016706319, |
| "learning_rate": 3.386673662119623e-05, |
| "loss": 0.3759, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.172682236376504, |
| "grad_norm": 0.2532655245787918, |
| "learning_rate": 3.384050367261281e-05, |
| "loss": 0.3592, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.1740976645435244, |
| "grad_norm": 0.2593297195760385, |
| "learning_rate": 3.381427072402938e-05, |
| "loss": 0.375, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.175513092710545, |
| "grad_norm": 0.2359968199749705, |
| "learning_rate": 3.378803777544596e-05, |
| "loss": 0.3825, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.1769285208775655, |
| "grad_norm": 0.25889001677763246, |
| "learning_rate": 3.376180482686254e-05, |
| "loss": 0.3717, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.178343949044586, |
| "grad_norm": 0.23160734530040403, |
| "learning_rate": 3.373557187827912e-05, |
| "loss": 0.3428, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.1797593772116066, |
| "grad_norm": 0.2219389279783254, |
| "learning_rate": 3.37093389296957e-05, |
| "loss": 0.4042, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.181174805378627, |
| "grad_norm": 0.26257436972385445, |
| "learning_rate": 3.3683105981112276e-05, |
| "loss": 0.3754, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.1825902335456475, |
| "grad_norm": 0.2747050720214299, |
| "learning_rate": 3.3656873032528856e-05, |
| "loss": 0.3639, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.1840056617126682, |
| "grad_norm": 0.21820718002888326, |
| "learning_rate": 3.3630640083945436e-05, |
| "loss": 0.3603, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.1854210898796886, |
| "grad_norm": 0.24426222094514102, |
| "learning_rate": 3.3604407135362017e-05, |
| "loss": 0.3909, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.186836518046709, |
| "grad_norm": 0.2697440918806726, |
| "learning_rate": 3.35781741867786e-05, |
| "loss": 0.3722, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.1882519462137298, |
| "grad_norm": 0.24904913274161672, |
| "learning_rate": 3.355194123819517e-05, |
| "loss": 0.3633, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.1896673743807502, |
| "grad_norm": 0.22450159530467048, |
| "learning_rate": 3.352570828961175e-05, |
| "loss": 0.3727, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.1910828025477707, |
| "grad_norm": 0.24682980603479437, |
| "learning_rate": 3.349947534102834e-05, |
| "loss": 0.3739, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.1924982307147913, |
| "grad_norm": 0.22876377293747732, |
| "learning_rate": 3.347324239244491e-05, |
| "loss": 0.3589, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.1939136588818118, |
| "grad_norm": 0.2485727894540362, |
| "learning_rate": 3.344700944386149e-05, |
| "loss": 0.3704, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.1953290870488322, |
| "grad_norm": 0.24521133225067981, |
| "learning_rate": 3.342077649527807e-05, |
| "loss": 0.3826, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.196744515215853, |
| "grad_norm": 0.24329138505723324, |
| "learning_rate": 3.339454354669465e-05, |
| "loss": 0.3636, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.1981599433828733, |
| "grad_norm": 0.23853104947938453, |
| "learning_rate": 3.336831059811123e-05, |
| "loss": 0.3587, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.1995753715498938, |
| "grad_norm": 0.24251162814105084, |
| "learning_rate": 3.3342077649527806e-05, |
| "loss": 0.3616, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.2009907997169145, |
| "grad_norm": 0.26516103818641135, |
| "learning_rate": 3.3315844700944386e-05, |
| "loss": 0.3748, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.202406227883935, |
| "grad_norm": 0.20688415308994873, |
| "learning_rate": 3.3289611752360966e-05, |
| "loss": 0.3566, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.2038216560509554, |
| "grad_norm": 0.23466141155912668, |
| "learning_rate": 3.326337880377755e-05, |
| "loss": 0.3806, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.205237084217976, |
| "grad_norm": 0.23207353563777747, |
| "learning_rate": 3.323714585519413e-05, |
| "loss": 0.3568, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.2066525123849965, |
| "grad_norm": 0.22285080510272276, |
| "learning_rate": 3.32109129066107e-05, |
| "loss": 0.3782, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.208067940552017, |
| "grad_norm": 0.21098164834539135, |
| "learning_rate": 3.318467995802728e-05, |
| "loss": 0.3632, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.2094833687190376, |
| "grad_norm": 0.2158541149523356, |
| "learning_rate": 3.315844700944386e-05, |
| "loss": 0.3615, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.210898796886058, |
| "grad_norm": 0.22165253427051132, |
| "learning_rate": 3.313221406086044e-05, |
| "loss": 0.3655, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.2123142250530785, |
| "grad_norm": 0.21366192902117054, |
| "learning_rate": 3.310598111227702e-05, |
| "loss": 0.3647, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.2137296532200992, |
| "grad_norm": 0.23320640229059145, |
| "learning_rate": 3.30797481636936e-05, |
| "loss": 0.3843, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.2151450813871196, |
| "grad_norm": 0.22666909848091002, |
| "learning_rate": 3.305351521511018e-05, |
| "loss": 0.3575, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.21656050955414, |
| "grad_norm": 0.21896446877425702, |
| "learning_rate": 3.302728226652676e-05, |
| "loss": 0.3669, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.2179759377211608, |
| "grad_norm": 0.22732149506115526, |
| "learning_rate": 3.3001049317943336e-05, |
| "loss": 0.372, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.2193913658881812, |
| "grad_norm": 0.2331464445769384, |
| "learning_rate": 3.2974816369359916e-05, |
| "loss": 0.3571, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.2208067940552016, |
| "grad_norm": 0.2341286538913884, |
| "learning_rate": 3.29485834207765e-05, |
| "loss": 0.3619, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.2222222222222223, |
| "grad_norm": 0.242917135991931, |
| "learning_rate": 3.292235047219308e-05, |
| "loss": 0.3836, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.2236376503892428, |
| "grad_norm": 0.2377434057216746, |
| "learning_rate": 3.289611752360966e-05, |
| "loss": 0.3707, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.2250530785562632, |
| "grad_norm": 0.23061035158384427, |
| "learning_rate": 3.286988457502623e-05, |
| "loss": 0.3649, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.226468506723284, |
| "grad_norm": 0.26493593711379954, |
| "learning_rate": 3.284365162644281e-05, |
| "loss": 0.3547, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.2278839348903043, |
| "grad_norm": 0.2817735921773945, |
| "learning_rate": 3.281741867785939e-05, |
| "loss": 0.3581, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.2292993630573248, |
| "grad_norm": 0.24744764647826778, |
| "learning_rate": 3.279118572927597e-05, |
| "loss": 0.3572, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.2307147912243455, |
| "grad_norm": 0.257164415071009, |
| "learning_rate": 3.276495278069255e-05, |
| "loss": 0.3772, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.232130219391366, |
| "grad_norm": 0.2539971232037796, |
| "learning_rate": 3.273871983210913e-05, |
| "loss": 0.386, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.2335456475583864, |
| "grad_norm": 0.25451197681884186, |
| "learning_rate": 3.271248688352571e-05, |
| "loss": 0.3741, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.234961075725407, |
| "grad_norm": 0.23119645117655382, |
| "learning_rate": 3.268625393494229e-05, |
| "loss": 0.356, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.2363765038924275, |
| "grad_norm": 0.22536736183119602, |
| "learning_rate": 3.2660020986358866e-05, |
| "loss": 0.353, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.237791932059448, |
| "grad_norm": 0.228868007542863, |
| "learning_rate": 3.2633788037775447e-05, |
| "loss": 0.3678, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.2392073602264686, |
| "grad_norm": 0.22422487236974076, |
| "learning_rate": 3.260755508919203e-05, |
| "loss": 0.3516, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.240622788393489, |
| "grad_norm": 0.2273170241203407, |
| "learning_rate": 3.258132214060861e-05, |
| "loss": 0.3637, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.2420382165605095, |
| "grad_norm": 0.2339973397823559, |
| "learning_rate": 3.255508919202519e-05, |
| "loss": 0.3419, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.2434536447275302, |
| "grad_norm": 0.22059105204937277, |
| "learning_rate": 3.252885624344176e-05, |
| "loss": 0.3566, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.2448690728945506, |
| "grad_norm": 0.21716790979082953, |
| "learning_rate": 3.250262329485834e-05, |
| "loss": 0.3496, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.246284501061571, |
| "grad_norm": 0.22483525203992455, |
| "learning_rate": 3.247639034627492e-05, |
| "loss": 0.3602, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.2476999292285917, |
| "grad_norm": 0.2205110253793736, |
| "learning_rate": 3.24501573976915e-05, |
| "loss": 0.3503, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.2491153573956122, |
| "grad_norm": 0.2451649817997365, |
| "learning_rate": 3.242392444910808e-05, |
| "loss": 0.3869, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.2505307855626326, |
| "grad_norm": 0.22327314044589933, |
| "learning_rate": 3.239769150052466e-05, |
| "loss": 0.3786, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.2519462137296533, |
| "grad_norm": 0.2293582655618987, |
| "learning_rate": 3.237145855194124e-05, |
| "loss": 0.3582, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.2533616418966738, |
| "grad_norm": 0.22115564542833394, |
| "learning_rate": 3.234522560335782e-05, |
| "loss": 0.3922, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.2547770700636942, |
| "grad_norm": 0.2243335412240295, |
| "learning_rate": 3.2318992654774396e-05, |
| "loss": 0.3537, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.2561924982307149, |
| "grad_norm": 0.2503356687519151, |
| "learning_rate": 3.229275970619098e-05, |
| "loss": 0.3772, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.2576079263977353, |
| "grad_norm": 0.23483831990082277, |
| "learning_rate": 3.226652675760756e-05, |
| "loss": 0.3745, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.2590233545647558, |
| "grad_norm": 0.2335033231898062, |
| "learning_rate": 3.224029380902414e-05, |
| "loss": 0.3437, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.2604387827317765, |
| "grad_norm": 0.23691944679552432, |
| "learning_rate": 3.221406086044072e-05, |
| "loss": 0.3677, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.261854210898797, |
| "grad_norm": 0.2181595939852755, |
| "learning_rate": 3.218782791185729e-05, |
| "loss": 0.3738, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.2632696390658174, |
| "grad_norm": 0.22391237606573866, |
| "learning_rate": 3.216159496327387e-05, |
| "loss": 0.3631, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.264685067232838, |
| "grad_norm": 0.233880286054612, |
| "learning_rate": 3.213536201469045e-05, |
| "loss": 0.3727, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.2661004953998585, |
| "grad_norm": 0.23652089597038298, |
| "learning_rate": 3.210912906610703e-05, |
| "loss": 0.3751, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.267515923566879, |
| "grad_norm": 0.21802470429593748, |
| "learning_rate": 3.2082896117523605e-05, |
| "loss": 0.3812, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.2689313517338996, |
| "grad_norm": 0.5826761382172103, |
| "learning_rate": 3.205666316894019e-05, |
| "loss": 0.3636, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.27034677990092, |
| "grad_norm": 0.2363193520278659, |
| "learning_rate": 3.203043022035677e-05, |
| "loss": 0.3604, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.2717622080679405, |
| "grad_norm": 0.2363204162806771, |
| "learning_rate": 3.200419727177335e-05, |
| "loss": 0.3647, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.2731776362349612, |
| "grad_norm": 0.25601617210524324, |
| "learning_rate": 3.1977964323189927e-05, |
| "loss": 0.3785, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.2745930644019816, |
| "grad_norm": 0.21012097538566218, |
| "learning_rate": 3.195173137460651e-05, |
| "loss": 0.3672, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.276008492569002, |
| "grad_norm": 0.2559276276421834, |
| "learning_rate": 3.192549842602309e-05, |
| "loss": 0.3651, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.2774239207360227, |
| "grad_norm": 0.27317903610927285, |
| "learning_rate": 3.189926547743967e-05, |
| "loss": 0.3499, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.2788393489030432, |
| "grad_norm": 0.23156491652563996, |
| "learning_rate": 3.187303252885625e-05, |
| "loss": 0.3631, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.2802547770700636, |
| "grad_norm": 0.24253951927325784, |
| "learning_rate": 3.184679958027282e-05, |
| "loss": 0.354, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.2816702052370843, |
| "grad_norm": 0.2278015940984733, |
| "learning_rate": 3.18205666316894e-05, |
| "loss": 0.3899, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.2830856334041048, |
| "grad_norm": 0.26821722319228736, |
| "learning_rate": 3.179433368310598e-05, |
| "loss": 0.3591, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.2845010615711252, |
| "grad_norm": 0.2201091318809155, |
| "learning_rate": 3.176810073452256e-05, |
| "loss": 0.3809, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.2859164897381459, |
| "grad_norm": 0.22005725140665, |
| "learning_rate": 3.1741867785939136e-05, |
| "loss": 0.3537, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.2873319179051663, |
| "grad_norm": 0.25439967947504993, |
| "learning_rate": 3.1715634837355716e-05, |
| "loss": 0.3802, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.2887473460721868, |
| "grad_norm": 0.2077398037825165, |
| "learning_rate": 3.16894018887723e-05, |
| "loss": 0.3529, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.2901627742392074, |
| "grad_norm": 0.2138094473716172, |
| "learning_rate": 3.166316894018888e-05, |
| "loss": 0.3541, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.291578202406228, |
| "grad_norm": 0.274627449971989, |
| "learning_rate": 3.163693599160546e-05, |
| "loss": 0.3826, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.2929936305732483, |
| "grad_norm": 0.23347713681140717, |
| "learning_rate": 3.161070304302204e-05, |
| "loss": 0.3716, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.294409058740269, |
| "grad_norm": 0.22243696869454346, |
| "learning_rate": 3.158447009443862e-05, |
| "loss": 0.3542, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.2958244869072895, |
| "grad_norm": 0.2482782039679565, |
| "learning_rate": 3.15582371458552e-05, |
| "loss": 0.3613, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.29723991507431, |
| "grad_norm": 0.22891859018706157, |
| "learning_rate": 3.153200419727178e-05, |
| "loss": 0.3742, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.2986553432413306, |
| "grad_norm": 0.2261013605218962, |
| "learning_rate": 3.150577124868835e-05, |
| "loss": 0.3581, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.300070771408351, |
| "grad_norm": 0.23196626960986785, |
| "learning_rate": 3.147953830010493e-05, |
| "loss": 0.3679, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.3014861995753715, |
| "grad_norm": 0.21890014293606355, |
| "learning_rate": 3.145330535152151e-05, |
| "loss": 0.3615, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.3029016277423922, |
| "grad_norm": 0.2529204490877138, |
| "learning_rate": 3.142707240293809e-05, |
| "loss": 0.3786, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.3043170559094126, |
| "grad_norm": 0.22439298889284814, |
| "learning_rate": 3.140083945435467e-05, |
| "loss": 0.3426, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.305732484076433, |
| "grad_norm": 0.2204337693599633, |
| "learning_rate": 3.1374606505771246e-05, |
| "loss": 0.3955, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.3071479122434537, |
| "grad_norm": 0.2598213431903822, |
| "learning_rate": 3.1348373557187826e-05, |
| "loss": 0.3778, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.3085633404104742, |
| "grad_norm": 0.2042771559540871, |
| "learning_rate": 3.132214060860441e-05, |
| "loss": 0.3554, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.3099787685774946, |
| "grad_norm": 0.2288031574818764, |
| "learning_rate": 3.129590766002099e-05, |
| "loss": 0.3687, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.3113941967445153, |
| "grad_norm": 0.21371355059026345, |
| "learning_rate": 3.126967471143757e-05, |
| "loss": 0.3657, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.3128096249115357, |
| "grad_norm": 0.21709384669920462, |
| "learning_rate": 3.124344176285415e-05, |
| "loss": 0.3764, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.3142250530785562, |
| "grad_norm": 0.21520446958395642, |
| "learning_rate": 3.121720881427073e-05, |
| "loss": 0.3637, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.3156404812455769, |
| "grad_norm": 0.20771227297372433, |
| "learning_rate": 3.119097586568731e-05, |
| "loss": 0.3625, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.3170559094125973, |
| "grad_norm": 0.21476805571570684, |
| "learning_rate": 3.116474291710388e-05, |
| "loss": 0.3681, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.3184713375796178, |
| "grad_norm": 0.22151467216274381, |
| "learning_rate": 3.113850996852046e-05, |
| "loss": 0.3694, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.3198867657466384, |
| "grad_norm": 0.22748528401606127, |
| "learning_rate": 3.111227701993704e-05, |
| "loss": 0.3688, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.3213021939136589, |
| "grad_norm": 0.19688475770704744, |
| "learning_rate": 3.108604407135362e-05, |
| "loss": 0.3722, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.3227176220806793, |
| "grad_norm": 0.23937633238769224, |
| "learning_rate": 3.10598111227702e-05, |
| "loss": 0.3713, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.3241330502477, |
| "grad_norm": 0.25045636062936005, |
| "learning_rate": 3.1033578174186776e-05, |
| "loss": 0.3608, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.3255484784147205, |
| "grad_norm": 0.2226942847550117, |
| "learning_rate": 3.1007345225603356e-05, |
| "loss": 0.3812, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.326963906581741, |
| "grad_norm": 0.2310460390764128, |
| "learning_rate": 3.098111227701994e-05, |
| "loss": 0.3709, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.3283793347487616, |
| "grad_norm": 0.24080935704570205, |
| "learning_rate": 3.095487932843652e-05, |
| "loss": 0.3673, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.329794762915782, |
| "grad_norm": 0.2342103443904471, |
| "learning_rate": 3.09286463798531e-05, |
| "loss": 0.3644, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.3312101910828025, |
| "grad_norm": 0.2032076310557637, |
| "learning_rate": 3.090241343126968e-05, |
| "loss": 0.3722, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.3326256192498231, |
| "grad_norm": 0.26965196767838706, |
| "learning_rate": 3.087618048268626e-05, |
| "loss": 0.3774, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.3340410474168436, |
| "grad_norm": 0.2304105585688344, |
| "learning_rate": 3.084994753410284e-05, |
| "loss": 0.3716, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.335456475583864, |
| "grad_norm": 0.23259721220450802, |
| "learning_rate": 3.082371458551941e-05, |
| "loss": 0.357, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.3368719037508847, |
| "grad_norm": 0.2525325695276776, |
| "learning_rate": 3.079748163693599e-05, |
| "loss": 0.3669, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.3382873319179052, |
| "grad_norm": 0.24111992235444446, |
| "learning_rate": 3.077124868835257e-05, |
| "loss": 0.3725, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.3397027600849256, |
| "grad_norm": 0.2408990661356157, |
| "learning_rate": 3.074501573976915e-05, |
| "loss": 0.3603, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.3411181882519463, |
| "grad_norm": 0.23285325781846208, |
| "learning_rate": 3.071878279118573e-05, |
| "loss": 0.3707, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.3425336164189667, |
| "grad_norm": 0.21092131922844104, |
| "learning_rate": 3.0692549842602306e-05, |
| "loss": 0.3594, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.3439490445859872, |
| "grad_norm": 0.23308712460130282, |
| "learning_rate": 3.0666316894018887e-05, |
| "loss": 0.3525, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.3453644727530079, |
| "grad_norm": 0.6803790920448556, |
| "learning_rate": 3.064008394543547e-05, |
| "loss": 0.3603, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.3467799009200283, |
| "grad_norm": 0.2018088093223601, |
| "learning_rate": 3.061385099685205e-05, |
| "loss": 0.3732, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.3481953290870488, |
| "grad_norm": 0.2199294319998087, |
| "learning_rate": 3.058761804826863e-05, |
| "loss": 0.3892, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.3496107572540694, |
| "grad_norm": 0.2327514036438414, |
| "learning_rate": 3.056138509968521e-05, |
| "loss": 0.3491, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.3510261854210899, |
| "grad_norm": 0.25130993082161057, |
| "learning_rate": 3.053515215110179e-05, |
| "loss": 0.3851, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.3524416135881103, |
| "grad_norm": 0.21527950192576611, |
| "learning_rate": 3.0508919202518365e-05, |
| "loss": 0.3627, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.353857041755131, |
| "grad_norm": 0.25980533573369863, |
| "learning_rate": 3.0482686253934945e-05, |
| "loss": 0.3585, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.3552724699221514, |
| "grad_norm": 0.2257822702451958, |
| "learning_rate": 3.0456453305351522e-05, |
| "loss": 0.3649, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.356687898089172, |
| "grad_norm": 0.2386397562035341, |
| "learning_rate": 3.0430220356768102e-05, |
| "loss": 0.3745, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.3581033262561926, |
| "grad_norm": 0.25076804426446075, |
| "learning_rate": 3.040398740818468e-05, |
| "loss": 0.3736, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.359518754423213, |
| "grad_norm": 0.22408468395551037, |
| "learning_rate": 3.037775445960126e-05, |
| "loss": 0.3732, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.3609341825902335, |
| "grad_norm": 0.24001877886842707, |
| "learning_rate": 3.035152151101784e-05, |
| "loss": 0.3596, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.3623496107572541, |
| "grad_norm": 0.2343436942475651, |
| "learning_rate": 3.0325288562434417e-05, |
| "loss": 0.3728, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.3637650389242746, |
| "grad_norm": 0.2360728276864227, |
| "learning_rate": 3.0299055613850997e-05, |
| "loss": 0.3791, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.365180467091295, |
| "grad_norm": 0.20414828750484898, |
| "learning_rate": 3.0272822665267574e-05, |
| "loss": 0.3626, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.3665958952583157, |
| "grad_norm": 0.26034336343202064, |
| "learning_rate": 3.024658971668416e-05, |
| "loss": 0.3653, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.3680113234253362, |
| "grad_norm": 0.23404953257833314, |
| "learning_rate": 3.0220356768100738e-05, |
| "loss": 0.3721, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.3694267515923566, |
| "grad_norm": 0.20222020504755647, |
| "learning_rate": 3.0194123819517318e-05, |
| "loss": 0.3823, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.3708421797593773, |
| "grad_norm": 0.2711360684778627, |
| "learning_rate": 3.0167890870933895e-05, |
| "loss": 0.3923, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.3722576079263977, |
| "grad_norm": 0.23346782220582887, |
| "learning_rate": 3.0141657922350475e-05, |
| "loss": 0.3675, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.3736730360934182, |
| "grad_norm": 0.2178239812830853, |
| "learning_rate": 3.0115424973767052e-05, |
| "loss": 0.3632, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.3750884642604388, |
| "grad_norm": 0.22909818108672744, |
| "learning_rate": 3.0089192025183633e-05, |
| "loss": 0.3699, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.3765038924274593, |
| "grad_norm": 0.2187912992394647, |
| "learning_rate": 3.0062959076600213e-05, |
| "loss": 0.369, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.3779193205944797, |
| "grad_norm": 0.23081880330934232, |
| "learning_rate": 3.003672612801679e-05, |
| "loss": 0.3512, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.3793347487615004, |
| "grad_norm": 0.20996682928697005, |
| "learning_rate": 3.001049317943337e-05, |
| "loss": 0.3747, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.3807501769285209, |
| "grad_norm": 0.22204530254709298, |
| "learning_rate": 2.9984260230849947e-05, |
| "loss": 0.3465, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.3821656050955413, |
| "grad_norm": 0.23901856106889752, |
| "learning_rate": 2.9958027282266527e-05, |
| "loss": 0.3574, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.383581033262562, |
| "grad_norm": 0.22443739525827147, |
| "learning_rate": 2.9931794333683104e-05, |
| "loss": 0.3612, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.3849964614295824, |
| "grad_norm": 0.2112580984860598, |
| "learning_rate": 2.9905561385099684e-05, |
| "loss": 0.3716, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.3864118895966029, |
| "grad_norm": 0.25386132304792175, |
| "learning_rate": 2.9879328436516268e-05, |
| "loss": 0.3771, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.3878273177636236, |
| "grad_norm": 0.22408230073908217, |
| "learning_rate": 2.985309548793285e-05, |
| "loss": 0.3647, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.389242745930644, |
| "grad_norm": 0.2400530598350793, |
| "learning_rate": 2.9826862539349425e-05, |
| "loss": 0.3738, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.3906581740976645, |
| "grad_norm": 0.2490902161302705, |
| "learning_rate": 2.9800629590766006e-05, |
| "loss": 0.3831, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.3920736022646851, |
| "grad_norm": 0.22605134604891303, |
| "learning_rate": 2.9774396642182582e-05, |
| "loss": 0.3637, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.3934890304317056, |
| "grad_norm": 0.25582374149556475, |
| "learning_rate": 2.9748163693599163e-05, |
| "loss": 0.3604, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.394904458598726, |
| "grad_norm": 0.2456668245143142, |
| "learning_rate": 2.9721930745015743e-05, |
| "loss": 0.3683, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.3963198867657467, |
| "grad_norm": 0.22842410348728334, |
| "learning_rate": 2.969569779643232e-05, |
| "loss": 0.3738, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.3977353149327671, |
| "grad_norm": 0.260567163270158, |
| "learning_rate": 2.96694648478489e-05, |
| "loss": 0.3571, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.3991507430997876, |
| "grad_norm": 0.22473798072782286, |
| "learning_rate": 2.9643231899265477e-05, |
| "loss": 0.3647, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.4005661712668083, |
| "grad_norm": 0.22814357369487426, |
| "learning_rate": 2.9616998950682057e-05, |
| "loss": 0.3507, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.4019815994338287, |
| "grad_norm": 0.2442911897850581, |
| "learning_rate": 2.9590766002098634e-05, |
| "loss": 0.362, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.4033970276008492, |
| "grad_norm": 0.24090280154357516, |
| "learning_rate": 2.9564533053515215e-05, |
| "loss": 0.3708, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.4048124557678698, |
| "grad_norm": 0.20438235888587558, |
| "learning_rate": 2.9538300104931795e-05, |
| "loss": 0.3672, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.4062278839348903, |
| "grad_norm": 0.2434081473944459, |
| "learning_rate": 2.951206715634838e-05, |
| "loss": 0.3742, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.4076433121019107, |
| "grad_norm": 0.20574095461819145, |
| "learning_rate": 2.9485834207764955e-05, |
| "loss": 0.3537, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.4090587402689314, |
| "grad_norm": 0.21182339581130577, |
| "learning_rate": 2.9459601259181536e-05, |
| "loss": 0.3565, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.4104741684359519, |
| "grad_norm": 0.24332941408128245, |
| "learning_rate": 2.9433368310598113e-05, |
| "loss": 0.3788, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.4118895966029723, |
| "grad_norm": 0.23924818096665304, |
| "learning_rate": 2.9407135362014693e-05, |
| "loss": 0.3768, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.413305024769993, |
| "grad_norm": 0.2571126671988022, |
| "learning_rate": 2.9380902413431273e-05, |
| "loss": 0.3558, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.4147204529370134, |
| "grad_norm": 0.23637822861459193, |
| "learning_rate": 2.935466946484785e-05, |
| "loss": 0.3672, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.4161358811040339, |
| "grad_norm": 0.2447175247640114, |
| "learning_rate": 2.932843651626443e-05, |
| "loss": 0.3732, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.4175513092710545, |
| "grad_norm": 0.24802730208698692, |
| "learning_rate": 2.9302203567681007e-05, |
| "loss": 0.3414, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.418966737438075, |
| "grad_norm": 0.20554775024342742, |
| "learning_rate": 2.9275970619097587e-05, |
| "loss": 0.3497, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.4203821656050954, |
| "grad_norm": 0.23003987528232994, |
| "learning_rate": 2.9249737670514164e-05, |
| "loss": 0.3691, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.4217975937721161, |
| "grad_norm": 0.23311796810883165, |
| "learning_rate": 2.9223504721930745e-05, |
| "loss": 0.3815, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.4232130219391366, |
| "grad_norm": 0.2562837129069495, |
| "learning_rate": 2.9197271773347325e-05, |
| "loss": 0.3864, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.424628450106157, |
| "grad_norm": 0.22296198226904215, |
| "learning_rate": 2.9171038824763902e-05, |
| "loss": 0.3701, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.4260438782731777, |
| "grad_norm": 0.23142877036513443, |
| "learning_rate": 2.9144805876180486e-05, |
| "loss": 0.3876, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.4274593064401981, |
| "grad_norm": 0.21364474385565857, |
| "learning_rate": 2.9118572927597066e-05, |
| "loss": 0.3583, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.4288747346072186, |
| "grad_norm": 0.23181334641384221, |
| "learning_rate": 2.9092339979013643e-05, |
| "loss": 0.3704, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.4302901627742393, |
| "grad_norm": 0.2193168659273958, |
| "learning_rate": 2.9066107030430223e-05, |
| "loss": 0.3671, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.4317055909412597, |
| "grad_norm": 0.20782991741805573, |
| "learning_rate": 2.9039874081846803e-05, |
| "loss": 0.3642, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.4331210191082802, |
| "grad_norm": 0.23108283829844503, |
| "learning_rate": 2.901364113326338e-05, |
| "loss": 0.3781, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.4345364472753008, |
| "grad_norm": 0.22638433711160158, |
| "learning_rate": 2.898740818467996e-05, |
| "loss": 0.3788, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.4359518754423213, |
| "grad_norm": 0.23004131381196977, |
| "learning_rate": 2.8961175236096537e-05, |
| "loss": 0.375, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.4373673036093417, |
| "grad_norm": 0.2405447495265062, |
| "learning_rate": 2.8934942287513118e-05, |
| "loss": 0.3737, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.4387827317763624, |
| "grad_norm": 0.24058873760567664, |
| "learning_rate": 2.8908709338929695e-05, |
| "loss": 0.3683, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.4401981599433828, |
| "grad_norm": 0.20937837279181093, |
| "learning_rate": 2.8882476390346275e-05, |
| "loss": 0.3727, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.4416135881104033, |
| "grad_norm": 0.23496697162074306, |
| "learning_rate": 2.8856243441762855e-05, |
| "loss": 0.3765, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.443029016277424, |
| "grad_norm": 0.22718655907664106, |
| "learning_rate": 2.8830010493179432e-05, |
| "loss": 0.3544, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.4444444444444444, |
| "grad_norm": 0.20884300023114202, |
| "learning_rate": 2.8803777544596016e-05, |
| "loss": 0.3586, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.4458598726114649, |
| "grad_norm": 0.25482456658386277, |
| "learning_rate": 2.8777544596012596e-05, |
| "loss": 0.3822, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.4472753007784855, |
| "grad_norm": 0.2180908582290951, |
| "learning_rate": 2.8751311647429173e-05, |
| "loss": 0.3618, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.448690728945506, |
| "grad_norm": 0.20662718782443495, |
| "learning_rate": 2.8725078698845753e-05, |
| "loss": 0.3782, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.4501061571125264, |
| "grad_norm": 0.2348809728633647, |
| "learning_rate": 2.8698845750262333e-05, |
| "loss": 0.363, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.451521585279547, |
| "grad_norm": 0.25302998104070096, |
| "learning_rate": 2.867261280167891e-05, |
| "loss": 0.367, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.4529370134465676, |
| "grad_norm": 0.2187538653616147, |
| "learning_rate": 2.864637985309549e-05, |
| "loss": 0.3584, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.454352441613588, |
| "grad_norm": 0.22053612475806544, |
| "learning_rate": 2.8620146904512068e-05, |
| "loss": 0.3784, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.4557678697806087, |
| "grad_norm": 0.2519320002641784, |
| "learning_rate": 2.8593913955928648e-05, |
| "loss": 0.3743, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.4571832979476291, |
| "grad_norm": 0.2201988589155671, |
| "learning_rate": 2.8567681007345225e-05, |
| "loss": 0.3676, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.4585987261146496, |
| "grad_norm": 0.22204855415179556, |
| "learning_rate": 2.8541448058761805e-05, |
| "loss": 0.3599, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.4600141542816703, |
| "grad_norm": 0.21904247423641438, |
| "learning_rate": 2.8515215110178385e-05, |
| "loss": 0.3658, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.4614295824486907, |
| "grad_norm": 0.25494283985786187, |
| "learning_rate": 2.8488982161594962e-05, |
| "loss": 0.3512, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.4628450106157111, |
| "grad_norm": 0.19494031012127083, |
| "learning_rate": 2.8462749213011542e-05, |
| "loss": 0.3663, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.4642604387827318, |
| "grad_norm": 0.22600307482991233, |
| "learning_rate": 2.8436516264428126e-05, |
| "loss": 0.3659, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.4656758669497523, |
| "grad_norm": 0.2368082752205335, |
| "learning_rate": 2.8410283315844703e-05, |
| "loss": 0.3531, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.4670912951167727, |
| "grad_norm": 0.19113265825566395, |
| "learning_rate": 2.8384050367261283e-05, |
| "loss": 0.3521, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.4685067232837934, |
| "grad_norm": 0.24235960233572124, |
| "learning_rate": 2.8357817418677864e-05, |
| "loss": 0.3846, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.4699221514508138, |
| "grad_norm": 0.21475512342055572, |
| "learning_rate": 2.833158447009444e-05, |
| "loss": 0.3738, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.4713375796178343, |
| "grad_norm": 0.24023920601917217, |
| "learning_rate": 2.830535152151102e-05, |
| "loss": 0.3663, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.472753007784855, |
| "grad_norm": 0.2529808662363619, |
| "learning_rate": 2.8279118572927598e-05, |
| "loss": 0.3756, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.4741684359518754, |
| "grad_norm": 0.1903618997021652, |
| "learning_rate": 2.8252885624344178e-05, |
| "loss": 0.3564, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.4755838641188959, |
| "grad_norm": 0.2358280615340478, |
| "learning_rate": 2.8226652675760755e-05, |
| "loss": 0.3654, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.4769992922859165, |
| "grad_norm": 0.26434316703105387, |
| "learning_rate": 2.8200419727177335e-05, |
| "loss": 0.3513, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.478414720452937, |
| "grad_norm": 0.2533092842511611, |
| "learning_rate": 2.8174186778593915e-05, |
| "loss": 0.3805, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.4798301486199574, |
| "grad_norm": 0.25468139675596396, |
| "learning_rate": 2.8147953830010492e-05, |
| "loss": 0.3549, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.481245576786978, |
| "grad_norm": 0.2754205033982748, |
| "learning_rate": 2.8121720881427073e-05, |
| "loss": 0.3652, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.4826610049539986, |
| "grad_norm": 0.2885005954030202, |
| "learning_rate": 2.809548793284365e-05, |
| "loss": 0.376, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.484076433121019, |
| "grad_norm": 0.19856502383557567, |
| "learning_rate": 2.8069254984260233e-05, |
| "loss": 0.3505, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.4854918612880397, |
| "grad_norm": 0.24301586162419772, |
| "learning_rate": 2.8043022035676813e-05, |
| "loss": 0.3528, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.4869072894550601, |
| "grad_norm": 0.3249546696287922, |
| "learning_rate": 2.8016789087093394e-05, |
| "loss": 0.3674, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.4883227176220806, |
| "grad_norm": 0.19954257697387418, |
| "learning_rate": 2.799055613850997e-05, |
| "loss": 0.3682, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.4897381457891012, |
| "grad_norm": 0.27863340166427214, |
| "learning_rate": 2.796432318992655e-05, |
| "loss": 0.3711, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.4911535739561217, |
| "grad_norm": 0.24351922549594743, |
| "learning_rate": 2.7938090241343128e-05, |
| "loss": 0.3658, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.4925690021231421, |
| "grad_norm": 0.2288387976793292, |
| "learning_rate": 2.7911857292759708e-05, |
| "loss": 0.3537, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.4939844302901628, |
| "grad_norm": 0.22026517679773322, |
| "learning_rate": 2.7885624344176285e-05, |
| "loss": 0.3604, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.4953998584571833, |
| "grad_norm": 0.23735410105725857, |
| "learning_rate": 2.7859391395592865e-05, |
| "loss": 0.365, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.4968152866242037, |
| "grad_norm": 0.21146990067723273, |
| "learning_rate": 2.7833158447009446e-05, |
| "loss": 0.3669, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.4982307147912244, |
| "grad_norm": 0.2144299492716616, |
| "learning_rate": 2.7806925498426022e-05, |
| "loss": 0.3437, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.4996461429582448, |
| "grad_norm": 0.21300314069787124, |
| "learning_rate": 2.7780692549842603e-05, |
| "loss": 0.3376, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.5010615711252653, |
| "grad_norm": 0.20151751265838325, |
| "learning_rate": 2.775445960125918e-05, |
| "loss": 0.3741, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.502476999292286, |
| "grad_norm": 0.21844206486312426, |
| "learning_rate": 2.772822665267576e-05, |
| "loss": 0.3639, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.5038924274593064, |
| "grad_norm": 0.20312416602967337, |
| "learning_rate": 2.7701993704092344e-05, |
| "loss": 0.3432, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.5053078556263269, |
| "grad_norm": 0.18205707926616682, |
| "learning_rate": 2.7675760755508924e-05, |
| "loss": 0.345, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.5067232837933475, |
| "grad_norm": 0.21890121527783746, |
| "learning_rate": 2.76495278069255e-05, |
| "loss": 0.3578, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.508138711960368, |
| "grad_norm": 0.20010961881540978, |
| "learning_rate": 2.762329485834208e-05, |
| "loss": 0.3771, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.5095541401273884, |
| "grad_norm": 0.23165164400389923, |
| "learning_rate": 2.7597061909758658e-05, |
| "loss": 0.3765, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.510969568294409, |
| "grad_norm": 0.19652743167014758, |
| "learning_rate": 2.7570828961175238e-05, |
| "loss": 0.3564, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.5123849964614295, |
| "grad_norm": 0.2209830707675103, |
| "learning_rate": 2.7544596012591815e-05, |
| "loss": 0.3612, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.51380042462845, |
| "grad_norm": 0.2137239523923151, |
| "learning_rate": 2.7518363064008395e-05, |
| "loss": 0.3525, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.5152158527954707, |
| "grad_norm": 0.19768010727047206, |
| "learning_rate": 2.7492130115424976e-05, |
| "loss": 0.37, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.5166312809624911, |
| "grad_norm": 0.24691943698866267, |
| "learning_rate": 2.7465897166841553e-05, |
| "loss": 0.3595, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.5180467091295116, |
| "grad_norm": 0.2225667560651534, |
| "learning_rate": 2.7439664218258133e-05, |
| "loss": 0.3555, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.5194621372965322, |
| "grad_norm": 0.20554886183569684, |
| "learning_rate": 2.741343126967471e-05, |
| "loss": 0.3588, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.5208775654635527, |
| "grad_norm": 0.2505762201576205, |
| "learning_rate": 2.738719832109129e-05, |
| "loss": 0.377, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.5222929936305731, |
| "grad_norm": 0.19715242913361825, |
| "learning_rate": 2.7360965372507867e-05, |
| "loss": 0.3516, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.5237084217975938, |
| "grad_norm": 0.22718238951725306, |
| "learning_rate": 2.7334732423924454e-05, |
| "loss": 0.3594, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.5251238499646143, |
| "grad_norm": 0.2279872658425017, |
| "learning_rate": 2.730849947534103e-05, |
| "loss": 0.3738, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.5265392781316347, |
| "grad_norm": 0.20948469884476686, |
| "learning_rate": 2.728226652675761e-05, |
| "loss": 0.3682, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.5279547062986554, |
| "grad_norm": 0.2463602564574548, |
| "learning_rate": 2.7256033578174188e-05, |
| "loss": 0.3686, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.5293701344656758, |
| "grad_norm": 0.21928438259582247, |
| "learning_rate": 2.722980062959077e-05, |
| "loss": 0.3836, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.5307855626326963, |
| "grad_norm": 0.20912170994070226, |
| "learning_rate": 2.7203567681007345e-05, |
| "loss": 0.3717, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.532200990799717, |
| "grad_norm": 0.20838477992310525, |
| "learning_rate": 2.7177334732423926e-05, |
| "loss": 0.3681, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.5336164189667374, |
| "grad_norm": 0.21226189954948013, |
| "learning_rate": 2.7151101783840506e-05, |
| "loss": 0.3806, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.5350318471337578, |
| "grad_norm": 0.2090527854066501, |
| "learning_rate": 2.7124868835257083e-05, |
| "loss": 0.3813, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.5364472753007785, |
| "grad_norm": 0.20644505102138733, |
| "learning_rate": 2.7098635886673663e-05, |
| "loss": 0.3703, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.537862703467799, |
| "grad_norm": 0.22661853567657378, |
| "learning_rate": 2.707240293809024e-05, |
| "loss": 0.363, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.5392781316348194, |
| "grad_norm": 0.20511838703888072, |
| "learning_rate": 2.704616998950682e-05, |
| "loss": 0.3785, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.54069355980184, |
| "grad_norm": 0.21196080769048828, |
| "learning_rate": 2.7019937040923397e-05, |
| "loss": 0.3579, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.5421089879688605, |
| "grad_norm": 0.20735173163276482, |
| "learning_rate": 2.6993704092339977e-05, |
| "loss": 0.358, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.543524416135881, |
| "grad_norm": 0.224486876680772, |
| "learning_rate": 2.696747114375656e-05, |
| "loss": 0.3589, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.5449398443029017, |
| "grad_norm": 0.2176990110171203, |
| "learning_rate": 2.694123819517314e-05, |
| "loss": 0.3684, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.546355272469922, |
| "grad_norm": 0.24338052069509808, |
| "learning_rate": 2.6915005246589718e-05, |
| "loss": 0.3568, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.5477707006369426, |
| "grad_norm": 0.2064407637815234, |
| "learning_rate": 2.68887722980063e-05, |
| "loss": 0.36, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.5491861288039632, |
| "grad_norm": 0.22650926555396364, |
| "learning_rate": 2.686253934942288e-05, |
| "loss": 0.3454, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.5506015569709837, |
| "grad_norm": 0.22872938113961477, |
| "learning_rate": 2.6836306400839456e-05, |
| "loss": 0.3609, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.5520169851380041, |
| "grad_norm": 0.20175298543747824, |
| "learning_rate": 2.6810073452256036e-05, |
| "loss": 0.3902, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.5534324133050248, |
| "grad_norm": 0.20200444825151267, |
| "learning_rate": 2.6783840503672613e-05, |
| "loss": 0.3524, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.5548478414720452, |
| "grad_norm": 0.2043927249238711, |
| "learning_rate": 2.6757607555089193e-05, |
| "loss": 0.375, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.5562632696390657, |
| "grad_norm": 0.20839038411254784, |
| "learning_rate": 2.673137460650577e-05, |
| "loss": 0.3572, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.5576786978060864, |
| "grad_norm": 0.20193426343055904, |
| "learning_rate": 2.670514165792235e-05, |
| "loss": 0.3592, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.5590941259731068, |
| "grad_norm": 0.2181862764076479, |
| "learning_rate": 2.6678908709338927e-05, |
| "loss": 0.3781, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.5605095541401273, |
| "grad_norm": 0.23147357207714458, |
| "learning_rate": 2.6652675760755508e-05, |
| "loss": 0.3754, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.561924982307148, |
| "grad_norm": 0.21284670672504122, |
| "learning_rate": 2.662644281217209e-05, |
| "loss": 0.3632, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.5633404104741684, |
| "grad_norm": 0.22805341924589922, |
| "learning_rate": 2.660020986358867e-05, |
| "loss": 0.3509, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.5647558386411888, |
| "grad_norm": 0.2345407386045021, |
| "learning_rate": 2.657397691500525e-05, |
| "loss": 0.372, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.5661712668082095, |
| "grad_norm": 0.20720783219676922, |
| "learning_rate": 2.654774396642183e-05, |
| "loss": 0.353, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.56758669497523, |
| "grad_norm": 0.21894913401677554, |
| "learning_rate": 2.652151101783841e-05, |
| "loss": 0.3694, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.5690021231422504, |
| "grad_norm": 0.22380173004214715, |
| "learning_rate": 2.6495278069254986e-05, |
| "loss": 0.3606, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.570417551309271, |
| "grad_norm": 0.20898100258282237, |
| "learning_rate": 2.6469045120671566e-05, |
| "loss": 0.3841, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.5718329794762915, |
| "grad_norm": 0.23414724230585565, |
| "learning_rate": 2.6442812172088143e-05, |
| "loss": 0.3566, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.573248407643312, |
| "grad_norm": 0.2341494998744966, |
| "learning_rate": 2.6416579223504723e-05, |
| "loss": 0.3853, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.5746638358103326, |
| "grad_norm": 0.20981474741627684, |
| "learning_rate": 2.63903462749213e-05, |
| "loss": 0.3728, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.576079263977353, |
| "grad_norm": 0.20158378943617117, |
| "learning_rate": 2.636411332633788e-05, |
| "loss": 0.3503, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.5774946921443735, |
| "grad_norm": 0.21060938604199136, |
| "learning_rate": 2.6337880377754457e-05, |
| "loss": 0.3367, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.5789101203113942, |
| "grad_norm": 0.20811196733887938, |
| "learning_rate": 2.6311647429171038e-05, |
| "loss": 0.3451, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.5803255484784147, |
| "grad_norm": 0.2140577294474795, |
| "learning_rate": 2.6285414480587618e-05, |
| "loss": 0.3583, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.5817409766454351, |
| "grad_norm": 0.21441477409429785, |
| "learning_rate": 2.6259181532004202e-05, |
| "loss": 0.3668, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.5831564048124558, |
| "grad_norm": 0.23804938645493584, |
| "learning_rate": 2.623294858342078e-05, |
| "loss": 0.3669, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.5845718329794765, |
| "grad_norm": 0.23920713672808078, |
| "learning_rate": 2.620671563483736e-05, |
| "loss": 0.3487, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.5859872611464967, |
| "grad_norm": 0.24404548961406083, |
| "learning_rate": 2.618048268625394e-05, |
| "loss": 0.3703, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.5874026893135174, |
| "grad_norm": 0.2002433289115946, |
| "learning_rate": 2.6154249737670516e-05, |
| "loss": 0.3687, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.588818117480538, |
| "grad_norm": 0.24372277480931426, |
| "learning_rate": 2.6128016789087096e-05, |
| "loss": 0.3611, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.5902335456475583, |
| "grad_norm": 0.26436244382151886, |
| "learning_rate": 2.6101783840503673e-05, |
| "loss": 0.3559, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.591648973814579, |
| "grad_norm": 0.21150768737427175, |
| "learning_rate": 2.6075550891920254e-05, |
| "loss": 0.3595, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.5930644019815996, |
| "grad_norm": 0.2259403164389297, |
| "learning_rate": 2.604931794333683e-05, |
| "loss": 0.3848, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.5944798301486198, |
| "grad_norm": 0.28490702017390696, |
| "learning_rate": 2.602308499475341e-05, |
| "loss": 0.358, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.5958952583156405, |
| "grad_norm": 0.22472009084309402, |
| "learning_rate": 2.599685204616999e-05, |
| "loss": 0.3471, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.5973106864826612, |
| "grad_norm": 0.2130869303407535, |
| "learning_rate": 2.5970619097586568e-05, |
| "loss": 0.3762, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.5987261146496814, |
| "grad_norm": 0.24686052819553847, |
| "learning_rate": 2.5944386149003148e-05, |
| "loss": 0.361, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.600141542816702, |
| "grad_norm": 0.6512322260544745, |
| "learning_rate": 2.5918153200419725e-05, |
| "loss": 0.3913, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.6015569709837227, |
| "grad_norm": 0.22223853192188883, |
| "learning_rate": 2.589192025183631e-05, |
| "loss": 0.3709, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.602972399150743, |
| "grad_norm": 0.20356356200066156, |
| "learning_rate": 2.586568730325289e-05, |
| "loss": 0.3568, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.6043878273177636, |
| "grad_norm": 0.21906757130975804, |
| "learning_rate": 2.583945435466947e-05, |
| "loss": 0.3673, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.6058032554847843, |
| "grad_norm": 0.21619433178114403, |
| "learning_rate": 2.5813221406086046e-05, |
| "loss": 0.365, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.6072186836518045, |
| "grad_norm": 0.20454654351321744, |
| "learning_rate": 2.5786988457502627e-05, |
| "loss": 0.368, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.6086341118188252, |
| "grad_norm": 0.2315571747498643, |
| "learning_rate": 2.5760755508919203e-05, |
| "loss": 0.3648, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.6100495399858459, |
| "grad_norm": 0.21673882039259132, |
| "learning_rate": 2.5734522560335784e-05, |
| "loss": 0.3628, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.611464968152866, |
| "grad_norm": 0.2068740654670679, |
| "learning_rate": 2.570828961175236e-05, |
| "loss": 0.3618, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.6128803963198868, |
| "grad_norm": 0.20087506797620958, |
| "learning_rate": 2.568205666316894e-05, |
| "loss": 0.3762, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.6142958244869074, |
| "grad_norm": 0.2123128231858163, |
| "learning_rate": 2.565582371458552e-05, |
| "loss": 0.3773, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.6157112526539277, |
| "grad_norm": 0.21442235677515326, |
| "learning_rate": 2.5629590766002098e-05, |
| "loss": 0.3777, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.6171266808209483, |
| "grad_norm": 0.20873851860300455, |
| "learning_rate": 2.560335781741868e-05, |
| "loss": 0.3619, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.618542108987969, |
| "grad_norm": 0.189086589071609, |
| "learning_rate": 2.5577124868835255e-05, |
| "loss": 0.3566, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.6199575371549892, |
| "grad_norm": 0.2166760306608455, |
| "learning_rate": 2.5550891920251836e-05, |
| "loss": 0.3627, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.62137296532201, |
| "grad_norm": 0.20996874666882176, |
| "learning_rate": 2.552465897166842e-05, |
| "loss": 0.3711, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.6227883934890306, |
| "grad_norm": 0.22105885419371793, |
| "learning_rate": 2.5498426023085e-05, |
| "loss": 0.3667, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.6242038216560508, |
| "grad_norm": 0.22800684499540488, |
| "learning_rate": 2.5472193074501576e-05, |
| "loss": 0.3739, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.6256192498230715, |
| "grad_norm": 0.22698536578933734, |
| "learning_rate": 2.5445960125918157e-05, |
| "loss": 0.36, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.6270346779900922, |
| "grad_norm": 0.18825409854030373, |
| "learning_rate": 2.5419727177334734e-05, |
| "loss": 0.3693, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.6284501061571124, |
| "grad_norm": 0.2281011630827293, |
| "learning_rate": 2.5393494228751314e-05, |
| "loss": 0.3567, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.629865534324133, |
| "grad_norm": 0.19742550616229176, |
| "learning_rate": 2.536726128016789e-05, |
| "loss": 0.3693, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.6312809624911537, |
| "grad_norm": 0.20797475786931, |
| "learning_rate": 2.534102833158447e-05, |
| "loss": 0.3692, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.632696390658174, |
| "grad_norm": 0.2138619317417531, |
| "learning_rate": 2.531479538300105e-05, |
| "loss": 0.3701, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.6341118188251946, |
| "grad_norm": 0.2069543005190604, |
| "learning_rate": 2.5288562434417628e-05, |
| "loss": 0.3705, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.6355272469922153, |
| "grad_norm": 0.21017404070821266, |
| "learning_rate": 2.526232948583421e-05, |
| "loss": 0.3569, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.6369426751592355, |
| "grad_norm": 0.1991742248894112, |
| "learning_rate": 2.5236096537250785e-05, |
| "loss": 0.3553, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.6383581033262562, |
| "grad_norm": 0.21323664699460512, |
| "learning_rate": 2.5209863588667366e-05, |
| "loss": 0.3605, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.6397735314932769, |
| "grad_norm": 0.21728754436493367, |
| "learning_rate": 2.5183630640083943e-05, |
| "loss": 0.3561, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.641188959660297, |
| "grad_norm": 0.23906888466302859, |
| "learning_rate": 2.515739769150053e-05, |
| "loss": 0.3526, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.6426043878273178, |
| "grad_norm": 0.2252437181573515, |
| "learning_rate": 2.5131164742917107e-05, |
| "loss": 0.3748, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.6440198159943384, |
| "grad_norm": 0.2701946642824661, |
| "learning_rate": 2.5104931794333687e-05, |
| "loss": 0.3807, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.6454352441613587, |
| "grad_norm": 0.24750083676506437, |
| "learning_rate": 2.5078698845750264e-05, |
| "loss": 0.3857, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.6468506723283793, |
| "grad_norm": 0.21529557169837596, |
| "learning_rate": 2.5052465897166844e-05, |
| "loss": 0.3665, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.6482661004954, |
| "grad_norm": 0.2258206847573629, |
| "learning_rate": 2.502623294858342e-05, |
| "loss": 0.3574, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.6496815286624202, |
| "grad_norm": 0.2069826183446378, |
| "learning_rate": 2.5e-05, |
| "loss": 0.3665, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.651096956829441, |
| "grad_norm": 0.2169513049293553, |
| "learning_rate": 2.497376705141658e-05, |
| "loss": 0.3732, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.6525123849964616, |
| "grad_norm": 0.212906101059109, |
| "learning_rate": 2.494753410283316e-05, |
| "loss": 0.3534, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.6539278131634818, |
| "grad_norm": 0.2105570290098452, |
| "learning_rate": 2.492130115424974e-05, |
| "loss": 0.3622, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.6553432413305025, |
| "grad_norm": 0.21546636807833525, |
| "learning_rate": 2.489506820566632e-05, |
| "loss": 0.3673, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.6567586694975232, |
| "grad_norm": 0.22406045718755155, |
| "learning_rate": 2.48688352570829e-05, |
| "loss": 0.3465, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.6581740976645434, |
| "grad_norm": 0.21206566096914026, |
| "learning_rate": 2.4842602308499476e-05, |
| "loss": 0.3701, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.659589525831564, |
| "grad_norm": 0.2178940497544651, |
| "learning_rate": 2.4816369359916056e-05, |
| "loss": 0.381, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.6610049539985847, |
| "grad_norm": 0.2261416831205509, |
| "learning_rate": 2.4790136411332633e-05, |
| "loss": 0.372, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.662420382165605, |
| "grad_norm": 0.214874681558974, |
| "learning_rate": 2.4763903462749214e-05, |
| "loss": 0.3717, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.6638358103326256, |
| "grad_norm": 0.2313551183517155, |
| "learning_rate": 2.473767051416579e-05, |
| "loss": 0.3752, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.6652512384996463, |
| "grad_norm": 0.22257778337500586, |
| "learning_rate": 2.4711437565582374e-05, |
| "loss": 0.3648, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.2106210720419038, |
| "learning_rate": 2.468520461699895e-05, |
| "loss": 0.3683, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.6680820948336872, |
| "grad_norm": 0.20845895875371392, |
| "learning_rate": 2.465897166841553e-05, |
| "loss": 0.3777, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.6694975230007079, |
| "grad_norm": 0.21186402961211692, |
| "learning_rate": 2.463273871983211e-05, |
| "loss": 0.3698, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.670912951167728, |
| "grad_norm": 0.1959558234904786, |
| "learning_rate": 2.460650577124869e-05, |
| "loss": 0.3511, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.6723283793347488, |
| "grad_norm": 0.20690244788270776, |
| "learning_rate": 2.458027282266527e-05, |
| "loss": 0.3678, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.6737438075017694, |
| "grad_norm": 0.23862560377417902, |
| "learning_rate": 2.4554039874081846e-05, |
| "loss": 0.3713, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.6751592356687897, |
| "grad_norm": 0.19173248146815988, |
| "learning_rate": 2.452780692549843e-05, |
| "loss": 0.3437, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.6765746638358103, |
| "grad_norm": 0.21354581946825002, |
| "learning_rate": 2.4501573976915006e-05, |
| "loss": 0.3783, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.677990092002831, |
| "grad_norm": 0.21694878267745185, |
| "learning_rate": 2.4475341028331587e-05, |
| "loss": 0.3669, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.6794055201698512, |
| "grad_norm": 0.20270673295676334, |
| "learning_rate": 2.4449108079748163e-05, |
| "loss": 0.3713, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.680820948336872, |
| "grad_norm": 0.2297035807958727, |
| "learning_rate": 2.4422875131164744e-05, |
| "loss": 0.358, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.6822363765038926, |
| "grad_norm": 0.19338680798787286, |
| "learning_rate": 2.4396642182581324e-05, |
| "loss": 0.3638, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.6836518046709128, |
| "grad_norm": 0.21068205711888074, |
| "learning_rate": 2.43704092339979e-05, |
| "loss": 0.3821, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.6850672328379335, |
| "grad_norm": 0.20778466400564496, |
| "learning_rate": 2.434417628541448e-05, |
| "loss": 0.3555, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.6864826610049541, |
| "grad_norm": 0.1966167293258372, |
| "learning_rate": 2.431794333683106e-05, |
| "loss": 0.3566, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.6878980891719744, |
| "grad_norm": 0.1961489333151967, |
| "learning_rate": 2.4291710388247642e-05, |
| "loss": 0.3614, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.689313517338995, |
| "grad_norm": 0.19471307863244142, |
| "learning_rate": 2.426547743966422e-05, |
| "loss": 0.358, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.6907289455060157, |
| "grad_norm": 0.20180964645438249, |
| "learning_rate": 2.42392444910808e-05, |
| "loss": 0.3479, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.692144373673036, |
| "grad_norm": 0.19026046834471852, |
| "learning_rate": 2.4213011542497376e-05, |
| "loss": 0.3582, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.6935598018400566, |
| "grad_norm": 0.1903543816431227, |
| "learning_rate": 2.4186778593913956e-05, |
| "loss": 0.3709, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.6949752300070773, |
| "grad_norm": 0.2092728212533432, |
| "learning_rate": 2.4160545645330536e-05, |
| "loss": 0.3725, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.6963906581740975, |
| "grad_norm": 0.21226469419764257, |
| "learning_rate": 2.4134312696747117e-05, |
| "loss": 0.3587, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.6978060863411182, |
| "grad_norm": 0.2044049166977702, |
| "learning_rate": 2.4108079748163694e-05, |
| "loss": 0.3591, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.6992215145081389, |
| "grad_norm": 0.20048213843328103, |
| "learning_rate": 2.4081846799580274e-05, |
| "loss": 0.3627, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.700636942675159, |
| "grad_norm": 0.19580747071870352, |
| "learning_rate": 2.4055613850996854e-05, |
| "loss": 0.3502, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.7020523708421798, |
| "grad_norm": 0.20105011429903724, |
| "learning_rate": 2.402938090241343e-05, |
| "loss": 0.3441, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.7034677990092004, |
| "grad_norm": 0.2018537985655408, |
| "learning_rate": 2.400314795383001e-05, |
| "loss": 0.3556, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.7048832271762207, |
| "grad_norm": 0.2044453087083259, |
| "learning_rate": 2.397691500524659e-05, |
| "loss": 0.3558, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.7062986553432413, |
| "grad_norm": 0.22086503599296572, |
| "learning_rate": 2.3950682056663172e-05, |
| "loss": 0.3602, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.707714083510262, |
| "grad_norm": 0.19641136659257846, |
| "learning_rate": 2.392444910807975e-05, |
| "loss": 0.3743, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.7091295116772822, |
| "grad_norm": 0.20260136418832903, |
| "learning_rate": 2.389821615949633e-05, |
| "loss": 0.378, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.710544939844303, |
| "grad_norm": 0.9136989290123337, |
| "learning_rate": 2.3871983210912906e-05, |
| "loss": 0.3782, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.7119603680113236, |
| "grad_norm": 0.22703872536118613, |
| "learning_rate": 2.3845750262329486e-05, |
| "loss": 0.352, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.7133757961783438, |
| "grad_norm": 0.28524183327470354, |
| "learning_rate": 2.3819517313746067e-05, |
| "loss": 0.3421, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.7147912243453645, |
| "grad_norm": 0.20985868088696918, |
| "learning_rate": 2.3793284365162647e-05, |
| "loss": 0.3641, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.7162066525123851, |
| "grad_norm": 0.2096411624630168, |
| "learning_rate": 2.3767051416579224e-05, |
| "loss": 0.3627, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.7176220806794054, |
| "grad_norm": 0.2289168020447138, |
| "learning_rate": 2.3740818467995804e-05, |
| "loss": 0.3701, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.719037508846426, |
| "grad_norm": 0.23972621162129334, |
| "learning_rate": 2.3714585519412384e-05, |
| "loss": 0.3528, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.7204529370134467, |
| "grad_norm": 0.2047453070334584, |
| "learning_rate": 2.368835257082896e-05, |
| "loss": 0.3594, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.721868365180467, |
| "grad_norm": 0.2080153214325207, |
| "learning_rate": 2.366211962224554e-05, |
| "loss": 0.3766, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.7232837933474876, |
| "grad_norm": 0.25294177188787764, |
| "learning_rate": 2.3635886673662122e-05, |
| "loss": 0.3601, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.7246992215145083, |
| "grad_norm": 0.20454836431609313, |
| "learning_rate": 2.3609653725078702e-05, |
| "loss": 0.3588, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.7261146496815285, |
| "grad_norm": 0.19248036034295865, |
| "learning_rate": 2.358342077649528e-05, |
| "loss": 0.3663, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.7275300778485492, |
| "grad_norm": 0.24928635062323073, |
| "learning_rate": 2.355718782791186e-05, |
| "loss": 0.3543, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.7289455060155698, |
| "grad_norm": 0.22813261657970457, |
| "learning_rate": 2.3530954879328436e-05, |
| "loss": 0.3837, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.73036093418259, |
| "grad_norm": 0.21568958693996085, |
| "learning_rate": 2.3504721930745016e-05, |
| "loss": 0.3725, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.7317763623496107, |
| "grad_norm": 0.23325960207870589, |
| "learning_rate": 2.3478488982161593e-05, |
| "loss": 0.3717, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.7331917905166314, |
| "grad_norm": 0.23918959953390725, |
| "learning_rate": 2.3452256033578177e-05, |
| "loss": 0.3698, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.7346072186836516, |
| "grad_norm": 0.20037216180436881, |
| "learning_rate": 2.3426023084994754e-05, |
| "loss": 0.3628, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.7360226468506723, |
| "grad_norm": 0.22088144016706737, |
| "learning_rate": 2.3399790136411334e-05, |
| "loss": 0.3686, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.737438075017693, |
| "grad_norm": 0.22951569956332904, |
| "learning_rate": 2.3373557187827915e-05, |
| "loss": 0.3777, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.7388535031847132, |
| "grad_norm": 0.20888138755141997, |
| "learning_rate": 2.334732423924449e-05, |
| "loss": 0.3697, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.7402689313517339, |
| "grad_norm": 0.20044970774934878, |
| "learning_rate": 2.332109129066107e-05, |
| "loss": 0.3624, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.7416843595187546, |
| "grad_norm": 0.2119562447197313, |
| "learning_rate": 2.329485834207765e-05, |
| "loss": 0.3649, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.7430997876857748, |
| "grad_norm": 0.2122586202791008, |
| "learning_rate": 2.3268625393494232e-05, |
| "loss": 0.3707, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.7445152158527955, |
| "grad_norm": 0.21687015008306162, |
| "learning_rate": 2.324239244491081e-05, |
| "loss": 0.3556, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.7459306440198161, |
| "grad_norm": 0.2108980964868219, |
| "learning_rate": 2.321615949632739e-05, |
| "loss": 0.3748, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.7473460721868364, |
| "grad_norm": 1.0913112858035567, |
| "learning_rate": 2.3189926547743966e-05, |
| "loss": 0.3518, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.748761500353857, |
| "grad_norm": 0.23288980005086307, |
| "learning_rate": 2.3163693599160547e-05, |
| "loss": 0.3638, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.7501769285208777, |
| "grad_norm": 0.21201148675782364, |
| "learning_rate": 2.3137460650577124e-05, |
| "loss": 0.3502, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.7515923566878981, |
| "grad_norm": 0.20520118643995194, |
| "learning_rate": 2.3111227701993704e-05, |
| "loss": 0.3695, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.7530077848549186, |
| "grad_norm": 0.23805035582970135, |
| "learning_rate": 2.3084994753410284e-05, |
| "loss": 0.3885, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.7544232130219393, |
| "grad_norm": 0.23750328263631534, |
| "learning_rate": 2.3058761804826864e-05, |
| "loss": 0.3617, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.7558386411889597, |
| "grad_norm": 0.2146087119167009, |
| "learning_rate": 2.3032528856243445e-05, |
| "loss": 0.364, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.7572540693559802, |
| "grad_norm": 0.20739386731977583, |
| "learning_rate": 2.300629590766002e-05, |
| "loss": 0.3537, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.7586694975230008, |
| "grad_norm": 0.2633167341683389, |
| "learning_rate": 2.2980062959076602e-05, |
| "loss": 0.3695, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.7600849256900213, |
| "grad_norm": 0.2121191502498069, |
| "learning_rate": 2.295383001049318e-05, |
| "loss": 0.3589, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.7615003538570417, |
| "grad_norm": 0.19799416955148808, |
| "learning_rate": 2.292759706190976e-05, |
| "loss": 0.3731, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.7629157820240624, |
| "grad_norm": 0.23500520602126604, |
| "learning_rate": 2.290136411332634e-05, |
| "loss": 0.3698, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.7643312101910829, |
| "grad_norm": 0.22391141058695835, |
| "learning_rate": 2.287513116474292e-05, |
| "loss": 0.3513, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.7657466383581033, |
| "grad_norm": 0.21100652384119065, |
| "learning_rate": 2.2848898216159496e-05, |
| "loss": 0.3626, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.767162066525124, |
| "grad_norm": 0.21100168937294683, |
| "learning_rate": 2.2822665267576077e-05, |
| "loss": 0.3664, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.7685774946921444, |
| "grad_norm": 0.22302360086013803, |
| "learning_rate": 2.2796432318992657e-05, |
| "loss": 0.3742, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.7699929228591649, |
| "grad_norm": 0.22908994489609763, |
| "learning_rate": 2.2770199370409234e-05, |
| "loss": 0.3694, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.7714083510261855, |
| "grad_norm": 0.2316123139862592, |
| "learning_rate": 2.2743966421825814e-05, |
| "loss": 0.3533, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.772823779193206, |
| "grad_norm": 0.21261889885287916, |
| "learning_rate": 2.2717733473242395e-05, |
| "loss": 0.334, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.7742392073602264, |
| "grad_norm": 0.2423707099137475, |
| "learning_rate": 2.2691500524658975e-05, |
| "loss": 0.3604, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.7756546355272471, |
| "grad_norm": 0.20433184791489278, |
| "learning_rate": 2.2665267576075552e-05, |
| "loss": 0.3651, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.7770700636942676, |
| "grad_norm": 0.22088648020444507, |
| "learning_rate": 2.2639034627492132e-05, |
| "loss": 0.3911, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.778485491861288, |
| "grad_norm": 0.255384104535801, |
| "learning_rate": 2.261280167890871e-05, |
| "loss": 0.3359, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.7799009200283087, |
| "grad_norm": 0.21730056521809238, |
| "learning_rate": 2.258656873032529e-05, |
| "loss": 0.3452, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.7813163481953291, |
| "grad_norm": 0.23095327884837383, |
| "learning_rate": 2.2560335781741866e-05, |
| "loss": 0.379, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.7827317763623496, |
| "grad_norm": 0.23134452811986084, |
| "learning_rate": 2.253410283315845e-05, |
| "loss": 0.3871, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.7841472045293703, |
| "grad_norm": 0.21404094506623178, |
| "learning_rate": 2.2507869884575027e-05, |
| "loss": 0.3555, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.7855626326963907, |
| "grad_norm": 0.3466551124045913, |
| "learning_rate": 2.2481636935991607e-05, |
| "loss": 0.3697, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.7869780608634112, |
| "grad_norm": 0.20695439752442338, |
| "learning_rate": 2.2455403987408187e-05, |
| "loss": 0.3656, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.7883934890304318, |
| "grad_norm": 0.20396098860354842, |
| "learning_rate": 2.2429171038824764e-05, |
| "loss": 0.3884, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.7898089171974523, |
| "grad_norm": 0.25635116226554294, |
| "learning_rate": 2.2402938090241344e-05, |
| "loss": 0.3817, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.7912243453644727, |
| "grad_norm": 0.2115951041264189, |
| "learning_rate": 2.237670514165792e-05, |
| "loss": 0.3685, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.7926397735314934, |
| "grad_norm": 0.20319279122789133, |
| "learning_rate": 2.2350472193074505e-05, |
| "loss": 0.369, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.7940552016985138, |
| "grad_norm": 0.2274827066830239, |
| "learning_rate": 2.2324239244491082e-05, |
| "loss": 0.3468, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.7954706298655343, |
| "grad_norm": 0.2046829944609587, |
| "learning_rate": 2.2298006295907662e-05, |
| "loss": 0.3668, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.796886058032555, |
| "grad_norm": 0.19991518384288134, |
| "learning_rate": 2.227177334732424e-05, |
| "loss": 0.3504, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.7983014861995754, |
| "grad_norm": 0.22501211515746133, |
| "learning_rate": 2.224554039874082e-05, |
| "loss": 0.358, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.7997169143665959, |
| "grad_norm": 0.21863795747539036, |
| "learning_rate": 2.2219307450157396e-05, |
| "loss": 0.3423, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.8011323425336165, |
| "grad_norm": 0.19463870238231556, |
| "learning_rate": 2.2193074501573977e-05, |
| "loss": 0.38, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.802547770700637, |
| "grad_norm": 0.20625397008836294, |
| "learning_rate": 2.2166841552990557e-05, |
| "loss": 0.363, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.8039631988676574, |
| "grad_norm": 0.22540358011888795, |
| "learning_rate": 2.2140608604407137e-05, |
| "loss": 0.3766, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.805378627034678, |
| "grad_norm": 0.2056579233410256, |
| "learning_rate": 2.2114375655823717e-05, |
| "loss": 0.3727, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.8067940552016986, |
| "grad_norm": 0.23614851830523279, |
| "learning_rate": 2.2088142707240294e-05, |
| "loss": 0.3598, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.808209483368719, |
| "grad_norm": 0.20523123531066292, |
| "learning_rate": 2.2061909758656875e-05, |
| "loss": 0.3718, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.8096249115357397, |
| "grad_norm": 0.2369288444316511, |
| "learning_rate": 2.203567681007345e-05, |
| "loss": 0.3814, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.8110403397027601, |
| "grad_norm": 0.21759488565678942, |
| "learning_rate": 2.2009443861490035e-05, |
| "loss": 0.3825, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.8124557678697806, |
| "grad_norm": 0.20459769459772856, |
| "learning_rate": 2.1983210912906612e-05, |
| "loss": 0.3861, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.8138711960368012, |
| "grad_norm": 0.23408130549169665, |
| "learning_rate": 2.1956977964323192e-05, |
| "loss": 0.365, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.8152866242038217, |
| "grad_norm": 0.19418467624251456, |
| "learning_rate": 2.193074501573977e-05, |
| "loss": 0.3412, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.8167020523708421, |
| "grad_norm": 0.19656100787624234, |
| "learning_rate": 2.190451206715635e-05, |
| "loss": 0.3695, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.8181174805378628, |
| "grad_norm": 0.20293810051390856, |
| "learning_rate": 2.1878279118572926e-05, |
| "loss": 0.3637, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.8195329087048833, |
| "grad_norm": 0.23094731602569574, |
| "learning_rate": 2.1852046169989507e-05, |
| "loss": 0.3631, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.8209483368719037, |
| "grad_norm": 0.23025613270928239, |
| "learning_rate": 2.1825813221406087e-05, |
| "loss": 0.3674, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.8223637650389244, |
| "grad_norm": 0.2226632950521876, |
| "learning_rate": 2.1799580272822667e-05, |
| "loss": 0.3743, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.8237791932059448, |
| "grad_norm": 0.21775390538899728, |
| "learning_rate": 2.1773347324239248e-05, |
| "loss": 0.3566, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.8251946213729653, |
| "grad_norm": 0.26983059901854184, |
| "learning_rate": 2.1747114375655824e-05, |
| "loss": 0.347, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.826610049539986, |
| "grad_norm": 0.1987013924047395, |
| "learning_rate": 2.1720881427072405e-05, |
| "loss": 0.3558, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.8280254777070064, |
| "grad_norm": 0.2051541900220511, |
| "learning_rate": 2.169464847848898e-05, |
| "loss": 0.3711, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.8294409058740269, |
| "grad_norm": 0.25572075355083324, |
| "learning_rate": 2.1668415529905562e-05, |
| "loss": 0.3536, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.8308563340410475, |
| "grad_norm": 0.19893210566212471, |
| "learning_rate": 2.1642182581322142e-05, |
| "loss": 0.3611, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.832271762208068, |
| "grad_norm": 0.1994034930117717, |
| "learning_rate": 2.1615949632738722e-05, |
| "loss": 0.3825, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.8336871903750884, |
| "grad_norm": 0.20061911877028749, |
| "learning_rate": 2.15897166841553e-05, |
| "loss": 0.3612, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.835102618542109, |
| "grad_norm": 0.20471315318320596, |
| "learning_rate": 2.156348373557188e-05, |
| "loss": 0.3477, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.8365180467091295, |
| "grad_norm": 0.19983725111724615, |
| "learning_rate": 2.1537250786988457e-05, |
| "loss": 0.3546, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.83793347487615, |
| "grad_norm": 0.19499829531879675, |
| "learning_rate": 2.1511017838405037e-05, |
| "loss": 0.3473, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.8393489030431707, |
| "grad_norm": 0.2055781282636102, |
| "learning_rate": 2.1484784889821617e-05, |
| "loss": 0.3484, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.8407643312101911, |
| "grad_norm": 0.22577798017905634, |
| "learning_rate": 2.1458551941238197e-05, |
| "loss": 0.3647, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.8421797593772116, |
| "grad_norm": 0.19401973361309152, |
| "learning_rate": 2.1432318992654778e-05, |
| "loss": 0.369, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.8435951875442322, |
| "grad_norm": 0.23586191495152506, |
| "learning_rate": 2.1406086044071355e-05, |
| "loss": 0.3633, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.8450106157112527, |
| "grad_norm": 0.2137537597890475, |
| "learning_rate": 2.1379853095487935e-05, |
| "loss": 0.3564, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.8464260438782731, |
| "grad_norm": 0.19896395757117963, |
| "learning_rate": 2.1353620146904512e-05, |
| "loss": 0.3557, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.8478414720452938, |
| "grad_norm": 0.20906941348976985, |
| "learning_rate": 2.1327387198321092e-05, |
| "loss": 0.3596, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.8492569002123143, |
| "grad_norm": 0.22159183190357726, |
| "learning_rate": 2.130115424973767e-05, |
| "loss": 0.3562, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.8506723283793347, |
| "grad_norm": 0.18295249145000686, |
| "learning_rate": 2.1274921301154253e-05, |
| "loss": 0.3551, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.8520877565463554, |
| "grad_norm": 0.21844066556104139, |
| "learning_rate": 2.124868835257083e-05, |
| "loss": 0.3675, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.8535031847133758, |
| "grad_norm": 0.1957473561795513, |
| "learning_rate": 2.122245540398741e-05, |
| "loss": 0.3383, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.8549186128803963, |
| "grad_norm": 0.4087020703926344, |
| "learning_rate": 2.119622245540399e-05, |
| "loss": 0.3873, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.856334041047417, |
| "grad_norm": 0.20848052268632755, |
| "learning_rate": 2.1169989506820567e-05, |
| "loss": 0.3553, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.8577494692144374, |
| "grad_norm": 0.20476002741793628, |
| "learning_rate": 2.1143756558237147e-05, |
| "loss": 0.3654, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.8591648973814578, |
| "grad_norm": 0.22650052167287157, |
| "learning_rate": 2.1117523609653724e-05, |
| "loss": 0.3688, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.8605803255484785, |
| "grad_norm": 0.21967639631932895, |
| "learning_rate": 2.1091290661070308e-05, |
| "loss": 0.3659, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.861995753715499, |
| "grad_norm": 0.19573350674272505, |
| "learning_rate": 2.1065057712486885e-05, |
| "loss": 0.3518, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.8634111818825194, |
| "grad_norm": 0.21566711659758614, |
| "learning_rate": 2.1038824763903465e-05, |
| "loss": 0.3737, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.86482661004954, |
| "grad_norm": 0.2106286767662346, |
| "learning_rate": 2.1012591815320042e-05, |
| "loss": 0.3535, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.8662420382165605, |
| "grad_norm": 0.20610275935482994, |
| "learning_rate": 2.0986358866736622e-05, |
| "loss": 0.3706, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.867657466383581, |
| "grad_norm": 0.2094944978172916, |
| "learning_rate": 2.09601259181532e-05, |
| "loss": 0.3773, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.8690728945506017, |
| "grad_norm": 0.2009512235832306, |
| "learning_rate": 2.093389296956978e-05, |
| "loss": 0.3651, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.870488322717622, |
| "grad_norm": 0.19933772882372747, |
| "learning_rate": 2.090766002098636e-05, |
| "loss": 0.3707, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.8719037508846426, |
| "grad_norm": 0.20506289727859822, |
| "learning_rate": 2.088142707240294e-05, |
| "loss": 0.349, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.8733191790516632, |
| "grad_norm": 0.21095123180435418, |
| "learning_rate": 2.085519412381952e-05, |
| "loss": 0.3573, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.8747346072186837, |
| "grad_norm": 0.19076272836405586, |
| "learning_rate": 2.0828961175236097e-05, |
| "loss": 0.3607, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.8761500353857041, |
| "grad_norm": 0.19288645362192516, |
| "learning_rate": 2.0802728226652677e-05, |
| "loss": 0.3697, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.8775654635527248, |
| "grad_norm": 0.2003841123589038, |
| "learning_rate": 2.0776495278069254e-05, |
| "loss": 0.3556, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.8789808917197452, |
| "grad_norm": 0.20691879619101566, |
| "learning_rate": 2.0750262329485835e-05, |
| "loss": 0.3662, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.8803963198867657, |
| "grad_norm": 0.18551678784944658, |
| "learning_rate": 2.0724029380902415e-05, |
| "loss": 0.3589, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.8818117480537864, |
| "grad_norm": 0.21042470211569628, |
| "learning_rate": 2.0697796432318995e-05, |
| "loss": 0.3593, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.8832271762208068, |
| "grad_norm": 0.2243848627008227, |
| "learning_rate": 2.0671563483735572e-05, |
| "loss": 0.3631, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.8846426043878273, |
| "grad_norm": 0.20252637607681076, |
| "learning_rate": 2.0645330535152152e-05, |
| "loss": 0.3596, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.886058032554848, |
| "grad_norm": 0.20201222497778787, |
| "learning_rate": 2.061909758656873e-05, |
| "loss": 0.3734, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.8874734607218684, |
| "grad_norm": 0.21797866269539595, |
| "learning_rate": 2.059286463798531e-05, |
| "loss": 0.368, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.8888888888888888, |
| "grad_norm": 0.19661726770953397, |
| "learning_rate": 2.056663168940189e-05, |
| "loss": 0.3446, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.8903043170559095, |
| "grad_norm": 0.20102166812135902, |
| "learning_rate": 2.054039874081847e-05, |
| "loss": 0.354, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.89171974522293, |
| "grad_norm": 0.21327874311920472, |
| "learning_rate": 2.051416579223505e-05, |
| "loss": 0.3687, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.8931351733899504, |
| "grad_norm": 0.19691581170479022, |
| "learning_rate": 2.0487932843651627e-05, |
| "loss": 0.345, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.894550601556971, |
| "grad_norm": 0.20012006527780385, |
| "learning_rate": 2.0461699895068208e-05, |
| "loss": 0.3512, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.8959660297239915, |
| "grad_norm": 0.224910501612043, |
| "learning_rate": 2.0435466946484784e-05, |
| "loss": 0.3613, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.897381457891012, |
| "grad_norm": 0.19538273423275948, |
| "learning_rate": 2.0409233997901365e-05, |
| "loss": 0.3499, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.8987968860580327, |
| "grad_norm": 0.19171272752787255, |
| "learning_rate": 2.038300104931794e-05, |
| "loss": 0.368, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.900212314225053, |
| "grad_norm": 0.1983476334140531, |
| "learning_rate": 2.0356768100734525e-05, |
| "loss": 0.3609, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.9016277423920736, |
| "grad_norm": 0.20005901565438508, |
| "learning_rate": 2.0330535152151102e-05, |
| "loss": 0.3458, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.9030431705590942, |
| "grad_norm": 0.2291395605242673, |
| "learning_rate": 2.0304302203567683e-05, |
| "loss": 0.3463, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.9044585987261147, |
| "grad_norm": 0.1924624447422567, |
| "learning_rate": 2.027806925498426e-05, |
| "loss": 0.3668, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.9058740268931351, |
| "grad_norm": 0.22436482558939622, |
| "learning_rate": 2.025183630640084e-05, |
| "loss": 0.3556, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.9072894550601558, |
| "grad_norm": 0.20674060334416514, |
| "learning_rate": 2.022560335781742e-05, |
| "loss": 0.3775, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.9087048832271762, |
| "grad_norm": 0.21241679987762357, |
| "learning_rate": 2.0199370409233997e-05, |
| "loss": 0.3706, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.9101203113941967, |
| "grad_norm": 0.2035827700235495, |
| "learning_rate": 2.017313746065058e-05, |
| "loss": 0.3682, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.9115357395612174, |
| "grad_norm": 0.2102112730791476, |
| "learning_rate": 2.0146904512067157e-05, |
| "loss": 0.3533, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.9129511677282378, |
| "grad_norm": 0.1977936871380788, |
| "learning_rate": 2.0120671563483738e-05, |
| "loss": 0.3659, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.9143665958952583, |
| "grad_norm": 0.2073046648660033, |
| "learning_rate": 2.0094438614900315e-05, |
| "loss": 0.3494, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.915782024062279, |
| "grad_norm": 0.21001193349632688, |
| "learning_rate": 2.0068205666316895e-05, |
| "loss": 0.3546, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.9171974522292994, |
| "grad_norm": 0.21798544832491265, |
| "learning_rate": 2.0041972717733472e-05, |
| "loss": 0.3566, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.9186128803963198, |
| "grad_norm": 0.21290936487815346, |
| "learning_rate": 2.0015739769150055e-05, |
| "loss": 0.3632, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.9200283085633405, |
| "grad_norm": 0.20721108578002567, |
| "learning_rate": 1.9989506820566632e-05, |
| "loss": 0.3605, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.921443736730361, |
| "grad_norm": 0.21341532178889183, |
| "learning_rate": 1.9963273871983213e-05, |
| "loss": 0.3714, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.9228591648973814, |
| "grad_norm": 0.20509852001562356, |
| "learning_rate": 1.993704092339979e-05, |
| "loss": 0.3639, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.924274593064402, |
| "grad_norm": 0.18996587530747674, |
| "learning_rate": 1.991080797481637e-05, |
| "loss": 0.3491, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.9256900212314225, |
| "grad_norm": 0.22186916332584036, |
| "learning_rate": 1.988457502623295e-05, |
| "loss": 0.3584, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.927105449398443, |
| "grad_norm": 0.19859813027365922, |
| "learning_rate": 1.9858342077649527e-05, |
| "loss": 0.3507, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.9285208775654636, |
| "grad_norm": 0.19952702214893042, |
| "learning_rate": 1.983210912906611e-05, |
| "loss": 0.3639, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.929936305732484, |
| "grad_norm": 0.20121877277304315, |
| "learning_rate": 1.9805876180482688e-05, |
| "loss": 0.374, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.9313517338995045, |
| "grad_norm": 0.19745024884216902, |
| "learning_rate": 1.9779643231899268e-05, |
| "loss": 0.3627, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.9327671620665252, |
| "grad_norm": 0.20892556854783773, |
| "learning_rate": 1.9753410283315845e-05, |
| "loss": 0.356, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.9341825902335457, |
| "grad_norm": 0.20820929191051837, |
| "learning_rate": 1.9727177334732425e-05, |
| "loss": 0.363, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.9355980184005661, |
| "grad_norm": 0.19872227127810696, |
| "learning_rate": 1.9700944386149002e-05, |
| "loss": 0.3788, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.9370134465675868, |
| "grad_norm": 0.20891761384090757, |
| "learning_rate": 1.9674711437565582e-05, |
| "loss": 0.3648, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.9384288747346072, |
| "grad_norm": 0.19845543506864766, |
| "learning_rate": 1.9648478488982163e-05, |
| "loss": 0.369, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.9398443029016277, |
| "grad_norm": 0.19113509199428175, |
| "learning_rate": 1.9622245540398743e-05, |
| "loss": 0.364, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.9412597310686484, |
| "grad_norm": 0.21549384559079116, |
| "learning_rate": 1.9596012591815323e-05, |
| "loss": 0.3625, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.9426751592356688, |
| "grad_norm": 0.22030815818854466, |
| "learning_rate": 1.95697796432319e-05, |
| "loss": 0.3745, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.9440905874026893, |
| "grad_norm": 0.19402336745232818, |
| "learning_rate": 1.954354669464848e-05, |
| "loss": 0.3534, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.94550601556971, |
| "grad_norm": 0.21121560897394442, |
| "learning_rate": 1.9517313746065057e-05, |
| "loss": 0.3591, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.9469214437367304, |
| "grad_norm": 0.20599072914025068, |
| "learning_rate": 1.9491080797481637e-05, |
| "loss": 0.3482, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.9483368719037508, |
| "grad_norm": 0.2101078777457639, |
| "learning_rate": 1.9464847848898218e-05, |
| "loss": 0.3666, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.9497523000707715, |
| "grad_norm": 0.18892328435730557, |
| "learning_rate": 1.9438614900314798e-05, |
| "loss": 0.3628, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.951167728237792, |
| "grad_norm": 0.2131996417180784, |
| "learning_rate": 1.9412381951731375e-05, |
| "loss": 0.3635, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.9525831564048124, |
| "grad_norm": 0.20128606932865298, |
| "learning_rate": 1.9386149003147955e-05, |
| "loss": 0.3638, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.953998584571833, |
| "grad_norm": 0.21900352568972972, |
| "learning_rate": 1.9359916054564532e-05, |
| "loss": 0.3638, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.9554140127388535, |
| "grad_norm": 0.1956686768669383, |
| "learning_rate": 1.9333683105981112e-05, |
| "loss": 0.3625, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.956829440905874, |
| "grad_norm": 0.19093257924184567, |
| "learning_rate": 1.9307450157397693e-05, |
| "loss": 0.3684, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.9582448690728946, |
| "grad_norm": 0.20479976158398785, |
| "learning_rate": 1.9281217208814273e-05, |
| "loss": 0.3625, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.959660297239915, |
| "grad_norm": 0.20885102906024197, |
| "learning_rate": 1.9254984260230853e-05, |
| "loss": 0.3579, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.9610757254069355, |
| "grad_norm": 0.17258172072412006, |
| "learning_rate": 1.922875131164743e-05, |
| "loss": 0.3565, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.9624911535739562, |
| "grad_norm": 0.2178182423526484, |
| "learning_rate": 1.920251836306401e-05, |
| "loss": 0.3698, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.9639065817409767, |
| "grad_norm": 0.19896161831763218, |
| "learning_rate": 1.9176285414480587e-05, |
| "loss": 0.3502, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.965322009907997, |
| "grad_norm": 0.18768357869637123, |
| "learning_rate": 1.9150052465897168e-05, |
| "loss": 0.3668, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.9667374380750178, |
| "grad_norm": 0.19297739606631034, |
| "learning_rate": 1.9123819517313745e-05, |
| "loss": 0.365, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.9681528662420382, |
| "grad_norm": 0.22915739722100426, |
| "learning_rate": 1.9097586568730328e-05, |
| "loss": 0.374, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.9695682944090587, |
| "grad_norm": 0.21354617076364507, |
| "learning_rate": 1.9071353620146905e-05, |
| "loss": 0.3504, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.9709837225760793, |
| "grad_norm": 0.1978571548565372, |
| "learning_rate": 1.9045120671563485e-05, |
| "loss": 0.333, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.9723991507430998, |
| "grad_norm": 0.19721042495263594, |
| "learning_rate": 1.9018887722980062e-05, |
| "loss": 0.3448, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.9738145789101202, |
| "grad_norm": 0.19005416630899646, |
| "learning_rate": 1.8992654774396643e-05, |
| "loss": 0.3771, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.975230007077141, |
| "grad_norm": 0.2133322733488417, |
| "learning_rate": 1.8966421825813223e-05, |
| "loss": 0.357, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.9766454352441614, |
| "grad_norm": 0.1894050791328522, |
| "learning_rate": 1.89401888772298e-05, |
| "loss": 0.3533, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.9780608634111818, |
| "grad_norm": 0.20012948540827005, |
| "learning_rate": 1.8913955928646383e-05, |
| "loss": 0.3681, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.9794762915782025, |
| "grad_norm": 0.20502607195124412, |
| "learning_rate": 1.888772298006296e-05, |
| "loss": 0.3688, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.980891719745223, |
| "grad_norm": 0.19446905713223603, |
| "learning_rate": 1.886149003147954e-05, |
| "loss": 0.3488, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.9823071479122434, |
| "grad_norm": 0.19833011267832945, |
| "learning_rate": 1.8835257082896118e-05, |
| "loss": 0.3752, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.983722576079264, |
| "grad_norm": 0.19273254120634167, |
| "learning_rate": 1.8809024134312698e-05, |
| "loss": 0.3621, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.9851380042462845, |
| "grad_norm": 0.20749102898898195, |
| "learning_rate": 1.8782791185729275e-05, |
| "loss": 0.3509, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.986553432413305, |
| "grad_norm": 0.20304238601187583, |
| "learning_rate": 1.8756558237145855e-05, |
| "loss": 0.3514, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.9879688605803256, |
| "grad_norm": 0.1855035727758363, |
| "learning_rate": 1.8730325288562435e-05, |
| "loss": 0.3559, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.989384288747346, |
| "grad_norm": 0.1900038858199582, |
| "learning_rate": 1.8704092339979016e-05, |
| "loss": 0.3593, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.9907997169143665, |
| "grad_norm": 0.26751614648177435, |
| "learning_rate": 1.8677859391395592e-05, |
| "loss": 0.3584, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.9922151450813872, |
| "grad_norm": 0.1895064557610461, |
| "learning_rate": 1.8651626442812173e-05, |
| "loss": 0.3389, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.9936305732484076, |
| "grad_norm": 0.20279845259843857, |
| "learning_rate": 1.8625393494228753e-05, |
| "loss": 0.363, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.995046001415428, |
| "grad_norm": 0.2039992306549258, |
| "learning_rate": 1.859916054564533e-05, |
| "loss": 0.3554, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.9964614295824488, |
| "grad_norm": 0.18649910427325964, |
| "learning_rate": 1.857292759706191e-05, |
| "loss": 0.3424, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.9978768577494692, |
| "grad_norm": 0.20570040271922255, |
| "learning_rate": 1.854669464847849e-05, |
| "loss": 0.3775, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.9992922859164897, |
| "grad_norm": 0.19929248975834807, |
| "learning_rate": 1.852046169989507e-05, |
| "loss": 0.3617, |
| "step": 1413 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.31812015380648995, |
| "learning_rate": 1.8494228751311648e-05, |
| "loss": 0.3737, |
| "step": 1414 |
| }, |
| { |
| "epoch": 2.0014154281670207, |
| "grad_norm": 0.2618432835627456, |
| "learning_rate": 1.8467995802728228e-05, |
| "loss": 0.3034, |
| "step": 1415 |
| }, |
| { |
| "epoch": 2.002830856334041, |
| "grad_norm": 0.22735472922428987, |
| "learning_rate": 1.8441762854144805e-05, |
| "loss": 0.3015, |
| "step": 1416 |
| }, |
| { |
| "epoch": 2.0042462845010616, |
| "grad_norm": 0.2304654888028929, |
| "learning_rate": 1.8415529905561385e-05, |
| "loss": 0.2929, |
| "step": 1417 |
| }, |
| { |
| "epoch": 2.0056617126680822, |
| "grad_norm": 0.3077158798541765, |
| "learning_rate": 1.8389296956977965e-05, |
| "loss": 0.2998, |
| "step": 1418 |
| }, |
| { |
| "epoch": 2.0070771408351025, |
| "grad_norm": 0.22354884644249307, |
| "learning_rate": 1.8363064008394546e-05, |
| "loss": 0.2892, |
| "step": 1419 |
| }, |
| { |
| "epoch": 2.008492569002123, |
| "grad_norm": 0.2307122902012782, |
| "learning_rate": 1.8336831059811123e-05, |
| "loss": 0.288, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.009907997169144, |
| "grad_norm": 0.3004574962063541, |
| "learning_rate": 1.8310598111227703e-05, |
| "loss": 0.2954, |
| "step": 1421 |
| }, |
| { |
| "epoch": 2.011323425336164, |
| "grad_norm": 0.25742558193381443, |
| "learning_rate": 1.8284365162644283e-05, |
| "loss": 0.2904, |
| "step": 1422 |
| }, |
| { |
| "epoch": 2.0127388535031847, |
| "grad_norm": 0.22091196543564764, |
| "learning_rate": 1.825813221406086e-05, |
| "loss": 0.2984, |
| "step": 1423 |
| }, |
| { |
| "epoch": 2.0141542816702054, |
| "grad_norm": 0.2693133643238991, |
| "learning_rate": 1.823189926547744e-05, |
| "loss": 0.2809, |
| "step": 1424 |
| }, |
| { |
| "epoch": 2.0155697098372256, |
| "grad_norm": 0.2946008164449425, |
| "learning_rate": 1.820566631689402e-05, |
| "loss": 0.3134, |
| "step": 1425 |
| }, |
| { |
| "epoch": 2.0169851380042463, |
| "grad_norm": 0.22329898493932543, |
| "learning_rate": 1.81794333683106e-05, |
| "loss": 0.2792, |
| "step": 1426 |
| }, |
| { |
| "epoch": 2.018400566171267, |
| "grad_norm": 0.23521702050759785, |
| "learning_rate": 1.8153200419727178e-05, |
| "loss": 0.2834, |
| "step": 1427 |
| }, |
| { |
| "epoch": 2.019815994338287, |
| "grad_norm": 0.2301865651909983, |
| "learning_rate": 1.8126967471143758e-05, |
| "loss": 0.2833, |
| "step": 1428 |
| }, |
| { |
| "epoch": 2.021231422505308, |
| "grad_norm": 0.23805812832238346, |
| "learning_rate": 1.8100734522560335e-05, |
| "loss": 0.2948, |
| "step": 1429 |
| }, |
| { |
| "epoch": 2.0226468506723285, |
| "grad_norm": 0.2204999436021115, |
| "learning_rate": 1.8074501573976915e-05, |
| "loss": 0.2925, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.0240622788393487, |
| "grad_norm": 0.2099342934273649, |
| "learning_rate": 1.8048268625393496e-05, |
| "loss": 0.2917, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.0254777070063694, |
| "grad_norm": 0.21316955775068464, |
| "learning_rate": 1.8022035676810076e-05, |
| "loss": 0.2911, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.02689313517339, |
| "grad_norm": 0.203591778774364, |
| "learning_rate": 1.7995802728226656e-05, |
| "loss": 0.284, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.0283085633404103, |
| "grad_norm": 0.19857682958767808, |
| "learning_rate": 1.7969569779643233e-05, |
| "loss": 0.2682, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.029723991507431, |
| "grad_norm": 0.20738110099610785, |
| "learning_rate": 1.7943336831059813e-05, |
| "loss": 0.2856, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.0311394196744517, |
| "grad_norm": 0.20879171795426643, |
| "learning_rate": 1.791710388247639e-05, |
| "loss": 0.2898, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.032554847841472, |
| "grad_norm": 0.2100994408662993, |
| "learning_rate": 1.789087093389297e-05, |
| "loss": 0.2853, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.0339702760084926, |
| "grad_norm": 0.21861018036891264, |
| "learning_rate": 1.7864637985309547e-05, |
| "loss": 0.2842, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.0353857041755132, |
| "grad_norm": 0.1998874003435123, |
| "learning_rate": 1.783840503672613e-05, |
| "loss": 0.2877, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.0368011323425335, |
| "grad_norm": 0.19733644838624692, |
| "learning_rate": 1.7812172088142708e-05, |
| "loss": 0.2805, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.038216560509554, |
| "grad_norm": 0.2102345477010653, |
| "learning_rate": 1.7785939139559288e-05, |
| "loss": 0.2893, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.039631988676575, |
| "grad_norm": 0.22967771644312862, |
| "learning_rate": 1.7759706190975865e-05, |
| "loss": 0.2897, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.041047416843595, |
| "grad_norm": 0.21654862978318307, |
| "learning_rate": 1.7733473242392445e-05, |
| "loss": 0.283, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.0424628450106157, |
| "grad_norm": 0.2069240069772118, |
| "learning_rate": 1.7707240293809026e-05, |
| "loss": 0.2932, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.0438782731776364, |
| "grad_norm": 0.20251972837802149, |
| "learning_rate": 1.7681007345225603e-05, |
| "loss": 0.2823, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.0452937013446566, |
| "grad_norm": 0.1985558114956269, |
| "learning_rate": 1.7654774396642186e-05, |
| "loss": 0.2774, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.0467091295116773, |
| "grad_norm": 0.21672062844894024, |
| "learning_rate": 1.7628541448058763e-05, |
| "loss": 0.2801, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.048124557678698, |
| "grad_norm": 0.1868319256602976, |
| "learning_rate": 1.7602308499475343e-05, |
| "loss": 0.285, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.049539985845718, |
| "grad_norm": 0.20743295673083878, |
| "learning_rate": 1.757607555089192e-05, |
| "loss": 0.278, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.050955414012739, |
| "grad_norm": 0.22077380379857767, |
| "learning_rate": 1.75498426023085e-05, |
| "loss": 0.3091, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.0523708421797595, |
| "grad_norm": 0.2034193756295043, |
| "learning_rate": 1.7523609653725078e-05, |
| "loss": 0.2831, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.0537862703467797, |
| "grad_norm": 0.1977083504359432, |
| "learning_rate": 1.7497376705141658e-05, |
| "loss": 0.2934, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.0552016985138004, |
| "grad_norm": 0.21295222552781645, |
| "learning_rate": 1.7471143756558238e-05, |
| "loss": 0.2801, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.056617126680821, |
| "grad_norm": 0.20122015505948554, |
| "learning_rate": 1.744491080797482e-05, |
| "loss": 0.2948, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.0580325548478413, |
| "grad_norm": 0.19851809324642486, |
| "learning_rate": 1.7418677859391395e-05, |
| "loss": 0.288, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.059447983014862, |
| "grad_norm": 0.2084094141218526, |
| "learning_rate": 1.7392444910807976e-05, |
| "loss": 0.3048, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.0608634111818827, |
| "grad_norm": 0.18950859697388175, |
| "learning_rate": 1.7366211962224556e-05, |
| "loss": 0.2781, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.062278839348903, |
| "grad_norm": 0.17827020683407357, |
| "learning_rate": 1.7339979013641133e-05, |
| "loss": 0.2872, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.0636942675159236, |
| "grad_norm": 0.20524943163518478, |
| "learning_rate": 1.7313746065057713e-05, |
| "loss": 0.2844, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.0651096956829442, |
| "grad_norm": 0.20828751157840567, |
| "learning_rate": 1.7287513116474293e-05, |
| "loss": 0.2911, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.0665251238499645, |
| "grad_norm": 0.19230496490646315, |
| "learning_rate": 1.7261280167890874e-05, |
| "loss": 0.2997, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.067940552016985, |
| "grad_norm": 0.18348519918314377, |
| "learning_rate": 1.723504721930745e-05, |
| "loss": 0.2772, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.069355980184006, |
| "grad_norm": 0.194299296268244, |
| "learning_rate": 1.720881427072403e-05, |
| "loss": 0.2987, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.070771408351026, |
| "grad_norm": 0.19044245083990521, |
| "learning_rate": 1.7182581322140608e-05, |
| "loss": 0.2886, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.0721868365180467, |
| "grad_norm": 0.209393739696165, |
| "learning_rate": 1.7156348373557188e-05, |
| "loss": 0.2848, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.0736022646850674, |
| "grad_norm": 0.20147250073789505, |
| "learning_rate": 1.7130115424973768e-05, |
| "loss": 0.2771, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.0750176928520876, |
| "grad_norm": 0.20558457802376123, |
| "learning_rate": 1.710388247639035e-05, |
| "loss": 0.2948, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.0764331210191083, |
| "grad_norm": 0.19255025397263117, |
| "learning_rate": 1.7077649527806925e-05, |
| "loss": 0.2833, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.077848549186129, |
| "grad_norm": 0.19957625475208307, |
| "learning_rate": 1.7051416579223506e-05, |
| "loss": 0.3008, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.079263977353149, |
| "grad_norm": 0.2166469264979805, |
| "learning_rate": 1.7025183630640086e-05, |
| "loss": 0.2824, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.08067940552017, |
| "grad_norm": 0.2013825115658045, |
| "learning_rate": 1.6998950682056663e-05, |
| "loss": 0.2886, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.0820948336871905, |
| "grad_norm": 0.2099458003952584, |
| "learning_rate": 1.6972717733473243e-05, |
| "loss": 0.2835, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.0835102618542107, |
| "grad_norm": 0.1937562307282105, |
| "learning_rate": 1.694648478488982e-05, |
| "loss": 0.2777, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.0849256900212314, |
| "grad_norm": 0.22143430661074096, |
| "learning_rate": 1.6920251836306404e-05, |
| "loss": 0.295, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.086341118188252, |
| "grad_norm": 0.19961487797679497, |
| "learning_rate": 1.689401888772298e-05, |
| "loss": 0.2893, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.0877565463552723, |
| "grad_norm": 0.21740387957685733, |
| "learning_rate": 1.686778593913956e-05, |
| "loss": 0.2869, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.089171974522293, |
| "grad_norm": 0.1984607922205683, |
| "learning_rate": 1.6841552990556138e-05, |
| "loss": 0.2914, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.0905874026893136, |
| "grad_norm": 0.1970620738011852, |
| "learning_rate": 1.6815320041972718e-05, |
| "loss": 0.2872, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.092002830856334, |
| "grad_norm": 0.20203859593307708, |
| "learning_rate": 1.67890870933893e-05, |
| "loss": 0.2935, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.0934182590233545, |
| "grad_norm": 0.19345474961814704, |
| "learning_rate": 1.6762854144805875e-05, |
| "loss": 0.2919, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.094833687190375, |
| "grad_norm": 0.19683053511901935, |
| "learning_rate": 1.6736621196222456e-05, |
| "loss": 0.2902, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.0962491153573954, |
| "grad_norm": 0.20117757322148833, |
| "learning_rate": 1.6710388247639036e-05, |
| "loss": 0.2907, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.097664543524416, |
| "grad_norm": 0.19094219547799363, |
| "learning_rate": 1.6684155299055616e-05, |
| "loss": 0.2771, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.099079971691437, |
| "grad_norm": 0.1917696231454564, |
| "learning_rate": 1.6657922350472193e-05, |
| "loss": 0.2791, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.100495399858457, |
| "grad_norm": 0.20887738097023587, |
| "learning_rate": 1.6631689401888773e-05, |
| "loss": 0.2895, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.1019108280254777, |
| "grad_norm": 0.20145195422505788, |
| "learning_rate": 1.660545645330535e-05, |
| "loss": 0.277, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.1033262561924984, |
| "grad_norm": 0.21906519913947312, |
| "learning_rate": 1.657922350472193e-05, |
| "loss": 0.3038, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.1047416843595186, |
| "grad_norm": 0.2032232846451326, |
| "learning_rate": 1.655299055613851e-05, |
| "loss": 0.2772, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.1061571125265393, |
| "grad_norm": 0.19785341115033714, |
| "learning_rate": 1.652675760755509e-05, |
| "loss": 0.2867, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.10757254069356, |
| "grad_norm": 0.2049090742872932, |
| "learning_rate": 1.6500524658971668e-05, |
| "loss": 0.2868, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.10898796886058, |
| "grad_norm": 0.2026792528862842, |
| "learning_rate": 1.647429171038825e-05, |
| "loss": 0.2923, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.110403397027601, |
| "grad_norm": 0.21064805818044663, |
| "learning_rate": 1.644805876180483e-05, |
| "loss": 0.2933, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.1118188251946215, |
| "grad_norm": 0.21631163550771182, |
| "learning_rate": 1.6421825813221406e-05, |
| "loss": 0.2794, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.1132342533616417, |
| "grad_norm": 0.20710600992144443, |
| "learning_rate": 1.6395592864637986e-05, |
| "loss": 0.2731, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.1146496815286624, |
| "grad_norm": 0.2030260738287538, |
| "learning_rate": 1.6369359916054566e-05, |
| "loss": 0.2753, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.116065109695683, |
| "grad_norm": 0.2384668341939414, |
| "learning_rate": 1.6343126967471146e-05, |
| "loss": 0.2786, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.1174805378627033, |
| "grad_norm": 0.2151123848622728, |
| "learning_rate": 1.6316894018887723e-05, |
| "loss": 0.2877, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.118895966029724, |
| "grad_norm": 0.21168635358673377, |
| "learning_rate": 1.6290661070304304e-05, |
| "loss": 0.2972, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.1203113941967446, |
| "grad_norm": 0.19550048749813878, |
| "learning_rate": 1.626442812172088e-05, |
| "loss": 0.2818, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.121726822363765, |
| "grad_norm": 0.19624090199655572, |
| "learning_rate": 1.623819517313746e-05, |
| "loss": 0.285, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.1231422505307855, |
| "grad_norm": 0.2089549740401407, |
| "learning_rate": 1.621196222455404e-05, |
| "loss": 0.294, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.124557678697806, |
| "grad_norm": 0.21856180217421553, |
| "learning_rate": 1.618572927597062e-05, |
| "loss": 0.2834, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.1259731068648264, |
| "grad_norm": 0.19946569492829458, |
| "learning_rate": 1.6159496327387198e-05, |
| "loss": 0.2988, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.127388535031847, |
| "grad_norm": 0.18882579304258193, |
| "learning_rate": 1.613326337880378e-05, |
| "loss": 0.2768, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.1288039631988678, |
| "grad_norm": 0.21404486282736535, |
| "learning_rate": 1.610703043022036e-05, |
| "loss": 0.2804, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.130219391365888, |
| "grad_norm": 0.20090952321056332, |
| "learning_rate": 1.6080797481636936e-05, |
| "loss": 0.2756, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.1316348195329087, |
| "grad_norm": 0.1943995402386731, |
| "learning_rate": 1.6054564533053516e-05, |
| "loss": 0.2937, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.1330502476999293, |
| "grad_norm": 0.21000063080660478, |
| "learning_rate": 1.6028331584470096e-05, |
| "loss": 0.2989, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.1344656758669496, |
| "grad_norm": 0.19677351524392991, |
| "learning_rate": 1.6002098635886677e-05, |
| "loss": 0.2794, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.1358811040339702, |
| "grad_norm": 0.18794381684145336, |
| "learning_rate": 1.5975865687303253e-05, |
| "loss": 0.2789, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.137296532200991, |
| "grad_norm": 0.20567790184577053, |
| "learning_rate": 1.5949632738719834e-05, |
| "loss": 0.2872, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.138711960368011, |
| "grad_norm": 0.18201871625247118, |
| "learning_rate": 1.592339979013641e-05, |
| "loss": 0.2771, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.140127388535032, |
| "grad_norm": 0.19584814842799103, |
| "learning_rate": 1.589716684155299e-05, |
| "loss": 0.2831, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.1415428167020525, |
| "grad_norm": 0.1903199666447714, |
| "learning_rate": 1.5870933892969568e-05, |
| "loss": 0.2929, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.1429582448690727, |
| "grad_norm": 0.18679516628993567, |
| "learning_rate": 1.584470094438615e-05, |
| "loss": 0.2925, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.1443736730360934, |
| "grad_norm": 0.18996749381846695, |
| "learning_rate": 1.581846799580273e-05, |
| "loss": 0.2944, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.145789101203114, |
| "grad_norm": 0.19897930810530637, |
| "learning_rate": 1.579223504721931e-05, |
| "loss": 0.2916, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.1472045293701343, |
| "grad_norm": 0.19099277382658197, |
| "learning_rate": 1.576600209863589e-05, |
| "loss": 0.2924, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.148619957537155, |
| "grad_norm": 0.18179013442370356, |
| "learning_rate": 1.5739769150052466e-05, |
| "loss": 0.2847, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.1500353857041756, |
| "grad_norm": 0.191157987934032, |
| "learning_rate": 1.5713536201469046e-05, |
| "loss": 0.2788, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.151450813871196, |
| "grad_norm": 0.20928856949707, |
| "learning_rate": 1.5687303252885623e-05, |
| "loss": 0.281, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.1528662420382165, |
| "grad_norm": 0.18497058614375653, |
| "learning_rate": 1.5661070304302207e-05, |
| "loss": 0.2795, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.154281670205237, |
| "grad_norm": 0.18326141099307156, |
| "learning_rate": 1.5634837355718784e-05, |
| "loss": 0.2886, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.1556970983722574, |
| "grad_norm": 0.20853024897514022, |
| "learning_rate": 1.5608604407135364e-05, |
| "loss": 0.2889, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.157112526539278, |
| "grad_norm": 0.20387139051690897, |
| "learning_rate": 1.558237145855194e-05, |
| "loss": 0.2975, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.1585279547062988, |
| "grad_norm": 0.18843544363156378, |
| "learning_rate": 1.555613850996852e-05, |
| "loss": 0.2839, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.159943382873319, |
| "grad_norm": 0.18707200958840547, |
| "learning_rate": 1.55299055613851e-05, |
| "loss": 0.2687, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.1613588110403397, |
| "grad_norm": 0.21495673801240114, |
| "learning_rate": 1.5503672612801678e-05, |
| "loss": 0.2852, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.1627742392073603, |
| "grad_norm": 0.18758479870398373, |
| "learning_rate": 1.547743966421826e-05, |
| "loss": 0.289, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.1641896673743806, |
| "grad_norm": 0.19202383784848073, |
| "learning_rate": 1.545120671563484e-05, |
| "loss": 0.2847, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.1656050955414012, |
| "grad_norm": 0.20312171765896048, |
| "learning_rate": 1.542497376705142e-05, |
| "loss": 0.2901, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.167020523708422, |
| "grad_norm": 0.20618143284667947, |
| "learning_rate": 1.5398740818467996e-05, |
| "loss": 0.2831, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.168435951875442, |
| "grad_norm": 0.196538483127226, |
| "learning_rate": 1.5372507869884576e-05, |
| "loss": 0.2787, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.169851380042463, |
| "grad_norm": 0.19945378439158584, |
| "learning_rate": 1.5346274921301153e-05, |
| "loss": 0.2894, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.1712668082094835, |
| "grad_norm": 0.2097338644680216, |
| "learning_rate": 1.5320041972717733e-05, |
| "loss": 0.2855, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.1726822363765037, |
| "grad_norm": 0.20428088695183094, |
| "learning_rate": 1.5293809024134314e-05, |
| "loss": 0.2785, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.1740976645435244, |
| "grad_norm": 0.19921228761845436, |
| "learning_rate": 1.5267576075550894e-05, |
| "loss": 0.2858, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.175513092710545, |
| "grad_norm": 0.2019443429253189, |
| "learning_rate": 1.5241343126967473e-05, |
| "loss": 0.2841, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.1769285208775653, |
| "grad_norm": 0.21353560230811622, |
| "learning_rate": 1.5215110178384051e-05, |
| "loss": 0.2857, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.178343949044586, |
| "grad_norm": 0.20643770177726536, |
| "learning_rate": 1.518887722980063e-05, |
| "loss": 0.2835, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.1797593772116066, |
| "grad_norm": 0.20839208391175837, |
| "learning_rate": 1.5162644281217208e-05, |
| "loss": 0.2777, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.181174805378627, |
| "grad_norm": 0.19473202944524465, |
| "learning_rate": 1.5136411332633787e-05, |
| "loss": 0.283, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.1825902335456475, |
| "grad_norm": 0.22478890469445573, |
| "learning_rate": 1.5110178384050369e-05, |
| "loss": 0.2784, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.184005661712668, |
| "grad_norm": 0.20242980114206965, |
| "learning_rate": 1.5083945435466948e-05, |
| "loss": 0.287, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.1854210898796884, |
| "grad_norm": 0.20043047241395306, |
| "learning_rate": 1.5057712486883526e-05, |
| "loss": 0.2873, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.186836518046709, |
| "grad_norm": 0.20468599869509715, |
| "learning_rate": 1.5031479538300106e-05, |
| "loss": 0.292, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.1882519462137298, |
| "grad_norm": 0.20662087487734626, |
| "learning_rate": 1.5005246589716685e-05, |
| "loss": 0.2941, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.18966737438075, |
| "grad_norm": 0.21542200419312985, |
| "learning_rate": 1.4979013641133264e-05, |
| "loss": 0.2834, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.1910828025477707, |
| "grad_norm": 0.1863274592133216, |
| "learning_rate": 1.4952780692549842e-05, |
| "loss": 0.2863, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.1924982307147913, |
| "grad_norm": 0.19340686279433708, |
| "learning_rate": 1.4926547743966424e-05, |
| "loss": 0.2985, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.1939136588818116, |
| "grad_norm": 0.19711186712331613, |
| "learning_rate": 1.4900314795383003e-05, |
| "loss": 0.2883, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.1953290870488322, |
| "grad_norm": 0.21712207731412586, |
| "learning_rate": 1.4874081846799581e-05, |
| "loss": 0.279, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.196744515215853, |
| "grad_norm": 0.2052315554511649, |
| "learning_rate": 1.484784889821616e-05, |
| "loss": 0.2965, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.198159943382873, |
| "grad_norm": 0.23380152085394898, |
| "learning_rate": 1.4821615949632739e-05, |
| "loss": 0.2723, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.199575371549894, |
| "grad_norm": 0.19439749822742092, |
| "learning_rate": 1.4795383001049317e-05, |
| "loss": 0.2885, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.2009907997169145, |
| "grad_norm": 0.2208178838349699, |
| "learning_rate": 1.4769150052465897e-05, |
| "loss": 0.2922, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.2024062278839347, |
| "grad_norm": 0.18946801359847978, |
| "learning_rate": 1.4742917103882478e-05, |
| "loss": 0.2816, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.2038216560509554, |
| "grad_norm": 0.24766768111708512, |
| "learning_rate": 1.4716684155299056e-05, |
| "loss": 0.2803, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.205237084217976, |
| "grad_norm": 0.19010237210609238, |
| "learning_rate": 1.4690451206715637e-05, |
| "loss": 0.2882, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.2066525123849963, |
| "grad_norm": 0.2079249719794975, |
| "learning_rate": 1.4664218258132215e-05, |
| "loss": 0.2882, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.208067940552017, |
| "grad_norm": 0.1886838074370487, |
| "learning_rate": 1.4637985309548794e-05, |
| "loss": 0.3127, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.2094833687190376, |
| "grad_norm": 0.20139912485284628, |
| "learning_rate": 1.4611752360965372e-05, |
| "loss": 0.313, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.210898796886058, |
| "grad_norm": 0.19938539644589226, |
| "learning_rate": 1.4585519412381951e-05, |
| "loss": 0.2859, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.2123142250530785, |
| "grad_norm": 0.191817556837941, |
| "learning_rate": 1.4559286463798533e-05, |
| "loss": 0.2841, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.213729653220099, |
| "grad_norm": 0.19815427633012597, |
| "learning_rate": 1.4533053515215112e-05, |
| "loss": 0.2806, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.2151450813871194, |
| "grad_norm": 0.18164242679367584, |
| "learning_rate": 1.450682056663169e-05, |
| "loss": 0.2743, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.21656050955414, |
| "grad_norm": 0.2032830666017277, |
| "learning_rate": 1.4480587618048269e-05, |
| "loss": 0.2957, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.2179759377211608, |
| "grad_norm": 0.19073119961731047, |
| "learning_rate": 1.4454354669464847e-05, |
| "loss": 0.2887, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.219391365888181, |
| "grad_norm": 0.1847595315159744, |
| "learning_rate": 1.4428121720881428e-05, |
| "loss": 0.2919, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.2208067940552016, |
| "grad_norm": 0.1917366937540336, |
| "learning_rate": 1.4401888772298008e-05, |
| "loss": 0.294, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.20226926426518502, |
| "learning_rate": 1.4375655823714586e-05, |
| "loss": 0.2882, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.2236376503892425, |
| "grad_norm": 0.201197899603766, |
| "learning_rate": 1.4349422875131167e-05, |
| "loss": 0.2828, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.225053078556263, |
| "grad_norm": 0.18603936110751054, |
| "learning_rate": 1.4323189926547745e-05, |
| "loss": 0.2827, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.226468506723284, |
| "grad_norm": 0.19669786047984944, |
| "learning_rate": 1.4296956977964324e-05, |
| "loss": 0.288, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.227883934890304, |
| "grad_norm": 0.19406508734392347, |
| "learning_rate": 1.4270724029380902e-05, |
| "loss": 0.2812, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.229299363057325, |
| "grad_norm": 0.18691275882541675, |
| "learning_rate": 1.4244491080797481e-05, |
| "loss": 0.2815, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.2307147912243455, |
| "grad_norm": 0.2178364402165408, |
| "learning_rate": 1.4218258132214063e-05, |
| "loss": 0.3096, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.2321302193913657, |
| "grad_norm": 0.19373188157453802, |
| "learning_rate": 1.4192025183630642e-05, |
| "loss": 0.301, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.2335456475583864, |
| "grad_norm": 0.2056928076228888, |
| "learning_rate": 1.416579223504722e-05, |
| "loss": 0.2857, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.234961075725407, |
| "grad_norm": 0.18382361352956086, |
| "learning_rate": 1.4139559286463799e-05, |
| "loss": 0.2864, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.2363765038924273, |
| "grad_norm": 0.20618476503814526, |
| "learning_rate": 1.4113326337880377e-05, |
| "loss": 0.2878, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.237791932059448, |
| "grad_norm": 0.1937950024125009, |
| "learning_rate": 1.4087093389296958e-05, |
| "loss": 0.268, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.2392073602264686, |
| "grad_norm": 0.19092954796711575, |
| "learning_rate": 1.4060860440713536e-05, |
| "loss": 0.288, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.240622788393489, |
| "grad_norm": 0.19717194852662742, |
| "learning_rate": 1.4034627492130117e-05, |
| "loss": 0.3031, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.2420382165605095, |
| "grad_norm": 0.1902434038668803, |
| "learning_rate": 1.4008394543546697e-05, |
| "loss": 0.2884, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.24345364472753, |
| "grad_norm": 0.18215832190034617, |
| "learning_rate": 1.3982161594963275e-05, |
| "loss": 0.2759, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.2448690728945504, |
| "grad_norm": 0.18167957909640053, |
| "learning_rate": 1.3955928646379854e-05, |
| "loss": 0.2719, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.246284501061571, |
| "grad_norm": 0.20532991110898174, |
| "learning_rate": 1.3929695697796433e-05, |
| "loss": 0.2805, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.2476999292285917, |
| "grad_norm": 0.20280748433545553, |
| "learning_rate": 1.3903462749213011e-05, |
| "loss": 0.2945, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.249115357395612, |
| "grad_norm": 0.2102414685159178, |
| "learning_rate": 1.387722980062959e-05, |
| "loss": 0.2879, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.2505307855626326, |
| "grad_norm": 0.2069270659647197, |
| "learning_rate": 1.3850996852046172e-05, |
| "loss": 0.2771, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.2519462137296533, |
| "grad_norm": 0.21114683188258085, |
| "learning_rate": 1.382476390346275e-05, |
| "loss": 0.2864, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.2533616418966735, |
| "grad_norm": 0.1907649784482529, |
| "learning_rate": 1.3798530954879329e-05, |
| "loss": 0.2933, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.254777070063694, |
| "grad_norm": 0.18513807889980596, |
| "learning_rate": 1.3772298006295908e-05, |
| "loss": 0.2662, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.256192498230715, |
| "grad_norm": 0.20949970921326494, |
| "learning_rate": 1.3746065057712488e-05, |
| "loss": 0.2854, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.2576079263977356, |
| "grad_norm": 0.20091183098409918, |
| "learning_rate": 1.3719832109129066e-05, |
| "loss": 0.2994, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.259023354564756, |
| "grad_norm": 0.1859277391528771, |
| "learning_rate": 1.3693599160545645e-05, |
| "loss": 0.2808, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.2604387827317765, |
| "grad_norm": 0.18520117324638943, |
| "learning_rate": 1.3667366211962227e-05, |
| "loss": 0.2831, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.2618542108987967, |
| "grad_norm": 0.18833415298828882, |
| "learning_rate": 1.3641133263378806e-05, |
| "loss": 0.2871, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.2632696390658174, |
| "grad_norm": 0.18848821621206596, |
| "learning_rate": 1.3614900314795384e-05, |
| "loss": 0.2766, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.264685067232838, |
| "grad_norm": 0.2024115235104692, |
| "learning_rate": 1.3588667366211963e-05, |
| "loss": 0.2922, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.2661004953998587, |
| "grad_norm": 0.2008973713899374, |
| "learning_rate": 1.3562434417628541e-05, |
| "loss": 0.2962, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.267515923566879, |
| "grad_norm": 0.20729689011139363, |
| "learning_rate": 1.353620146904512e-05, |
| "loss": 0.2843, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.2689313517338996, |
| "grad_norm": 0.21523153306999607, |
| "learning_rate": 1.3509968520461699e-05, |
| "loss": 0.2742, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.27034677990092, |
| "grad_norm": 0.18912304205705838, |
| "learning_rate": 1.348373557187828e-05, |
| "loss": 0.2778, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.2717622080679405, |
| "grad_norm": 0.1974380153859812, |
| "learning_rate": 1.3457502623294859e-05, |
| "loss": 0.2868, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.273177636234961, |
| "grad_norm": 0.20180879535619137, |
| "learning_rate": 1.343126967471144e-05, |
| "loss": 0.2934, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.274593064401982, |
| "grad_norm": 0.19163665717120043, |
| "learning_rate": 1.3405036726128018e-05, |
| "loss": 0.2856, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.276008492569002, |
| "grad_norm": 0.22024265924133277, |
| "learning_rate": 1.3378803777544597e-05, |
| "loss": 0.2899, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.2774239207360227, |
| "grad_norm": 0.19289156194429993, |
| "learning_rate": 1.3352570828961175e-05, |
| "loss": 0.2793, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.278839348903043, |
| "grad_norm": 0.19600954795335204, |
| "learning_rate": 1.3326337880377754e-05, |
| "loss": 0.2766, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.2802547770700636, |
| "grad_norm": 0.20924587152335908, |
| "learning_rate": 1.3300104931794336e-05, |
| "loss": 0.2948, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.2816702052370843, |
| "grad_norm": 0.19653291000591008, |
| "learning_rate": 1.3273871983210914e-05, |
| "loss": 0.2939, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.283085633404105, |
| "grad_norm": 0.1910918798595604, |
| "learning_rate": 1.3247639034627493e-05, |
| "loss": 0.2743, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.284501061571125, |
| "grad_norm": 0.20205394590303571, |
| "learning_rate": 1.3221406086044072e-05, |
| "loss": 0.3006, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.285916489738146, |
| "grad_norm": 0.2060297460654065, |
| "learning_rate": 1.319517313746065e-05, |
| "loss": 0.2865, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.287331917905166, |
| "grad_norm": 0.18498862483625148, |
| "learning_rate": 1.3168940188877229e-05, |
| "loss": 0.2745, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.2887473460721868, |
| "grad_norm": 0.19030723917171546, |
| "learning_rate": 1.3142707240293809e-05, |
| "loss": 0.2827, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.2901627742392074, |
| "grad_norm": 0.18388847175027578, |
| "learning_rate": 1.311647429171039e-05, |
| "loss": 0.2726, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.291578202406228, |
| "grad_norm": 0.21625030859649072, |
| "learning_rate": 1.309024134312697e-05, |
| "loss": 0.2859, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.2929936305732483, |
| "grad_norm": 0.20451720977139845, |
| "learning_rate": 1.3064008394543548e-05, |
| "loss": 0.2933, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.294409058740269, |
| "grad_norm": 0.20094353353440728, |
| "learning_rate": 1.3037775445960127e-05, |
| "loss": 0.2929, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.2958244869072892, |
| "grad_norm": 0.19957600133289297, |
| "learning_rate": 1.3011542497376705e-05, |
| "loss": 0.2934, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.29723991507431, |
| "grad_norm": 0.18883357366458575, |
| "learning_rate": 1.2985309548793284e-05, |
| "loss": 0.2744, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.2986553432413306, |
| "grad_norm": 0.20674024088486972, |
| "learning_rate": 1.2959076600209863e-05, |
| "loss": 0.2998, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.3000707714083513, |
| "grad_norm": 0.21630882145739538, |
| "learning_rate": 1.2932843651626445e-05, |
| "loss": 0.3008, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.3014861995753715, |
| "grad_norm": 0.18992728343898063, |
| "learning_rate": 1.2906610703043023e-05, |
| "loss": 0.2934, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.302901627742392, |
| "grad_norm": 0.19675282780344314, |
| "learning_rate": 1.2880377754459602e-05, |
| "loss": 0.2974, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.3043170559094124, |
| "grad_norm": 0.23728782889287953, |
| "learning_rate": 1.285414480587618e-05, |
| "loss": 0.2844, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.305732484076433, |
| "grad_norm": 0.18387211272773954, |
| "learning_rate": 1.282791185729276e-05, |
| "loss": 0.2732, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.3071479122434537, |
| "grad_norm": 0.18857157563443108, |
| "learning_rate": 1.280167890870934e-05, |
| "loss": 0.284, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.3085633404104744, |
| "grad_norm": 0.19182401108083405, |
| "learning_rate": 1.2775445960125918e-05, |
| "loss": 0.28, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.3099787685774946, |
| "grad_norm": 0.18806438910368767, |
| "learning_rate": 1.27492130115425e-05, |
| "loss": 0.2669, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.3113941967445153, |
| "grad_norm": 0.19240347359549476, |
| "learning_rate": 1.2722980062959078e-05, |
| "loss": 0.2786, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.3128096249115355, |
| "grad_norm": 0.18641527282755707, |
| "learning_rate": 1.2696747114375657e-05, |
| "loss": 0.2711, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.314225053078556, |
| "grad_norm": 0.18234644719608498, |
| "learning_rate": 1.2670514165792236e-05, |
| "loss": 0.2801, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.315640481245577, |
| "grad_norm": 0.20968979176034708, |
| "learning_rate": 1.2644281217208814e-05, |
| "loss": 0.289, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.3170559094125975, |
| "grad_norm": 0.18806510420148745, |
| "learning_rate": 1.2618048268625393e-05, |
| "loss": 0.2917, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.3184713375796178, |
| "grad_norm": 0.2029698510668608, |
| "learning_rate": 1.2591815320041971e-05, |
| "loss": 0.2953, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.3198867657466384, |
| "grad_norm": 0.19660392567887017, |
| "learning_rate": 1.2565582371458553e-05, |
| "loss": 0.2721, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.3213021939136587, |
| "grad_norm": 0.19707921251142746, |
| "learning_rate": 1.2539349422875132e-05, |
| "loss": 0.2911, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.3227176220806793, |
| "grad_norm": 0.18014931048280203, |
| "learning_rate": 1.251311647429171e-05, |
| "loss": 0.2837, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.3241330502477, |
| "grad_norm": 0.20230753017131037, |
| "learning_rate": 1.248688352570829e-05, |
| "loss": 0.2878, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.3255484784147207, |
| "grad_norm": 0.1986302628772248, |
| "learning_rate": 1.246065057712487e-05, |
| "loss": 0.2833, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.326963906581741, |
| "grad_norm": 0.19288103271468032, |
| "learning_rate": 1.243441762854145e-05, |
| "loss": 0.2886, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.3283793347487616, |
| "grad_norm": 0.19483944520797117, |
| "learning_rate": 1.2408184679958028e-05, |
| "loss": 0.2877, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.329794762915782, |
| "grad_norm": 0.18771203694792818, |
| "learning_rate": 1.2381951731374607e-05, |
| "loss": 0.2875, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.3312101910828025, |
| "grad_norm": 0.19312612453223374, |
| "learning_rate": 1.2355718782791187e-05, |
| "loss": 0.2855, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.332625619249823, |
| "grad_norm": 0.18881242383849123, |
| "learning_rate": 1.2329485834207766e-05, |
| "loss": 0.2764, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.334041047416844, |
| "grad_norm": 0.18098008583019026, |
| "learning_rate": 1.2303252885624344e-05, |
| "loss": 0.2786, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.335456475583864, |
| "grad_norm": 0.19492465692066005, |
| "learning_rate": 1.2277019937040923e-05, |
| "loss": 0.2912, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.3368719037508847, |
| "grad_norm": 0.20935236412990635, |
| "learning_rate": 1.2250786988457503e-05, |
| "loss": 0.2895, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.338287331917905, |
| "grad_norm": 0.18797260869911742, |
| "learning_rate": 1.2224554039874082e-05, |
| "loss": 0.2892, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.3397027600849256, |
| "grad_norm": 0.1991671401529202, |
| "learning_rate": 1.2198321091290662e-05, |
| "loss": 0.2942, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.3411181882519463, |
| "grad_norm": 0.18970334539112932, |
| "learning_rate": 1.217208814270724e-05, |
| "loss": 0.2774, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.342533616418967, |
| "grad_norm": 0.1872611883631897, |
| "learning_rate": 1.2145855194123821e-05, |
| "loss": 0.2861, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.343949044585987, |
| "grad_norm": 0.20505699637426708, |
| "learning_rate": 1.21196222455404e-05, |
| "loss": 0.3072, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.345364472753008, |
| "grad_norm": 0.19186506797113723, |
| "learning_rate": 1.2093389296956978e-05, |
| "loss": 0.2971, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.346779900920028, |
| "grad_norm": 0.18294014123423893, |
| "learning_rate": 1.2067156348373558e-05, |
| "loss": 0.2836, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.3481953290870488, |
| "grad_norm": 0.18493772369567185, |
| "learning_rate": 1.2040923399790137e-05, |
| "loss": 0.2751, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.3496107572540694, |
| "grad_norm": 0.20501585148695503, |
| "learning_rate": 1.2014690451206716e-05, |
| "loss": 0.2854, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.35102618542109, |
| "grad_norm": 0.18909638186884367, |
| "learning_rate": 1.1988457502623296e-05, |
| "loss": 0.2816, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.3524416135881103, |
| "grad_norm": 0.18298287687612652, |
| "learning_rate": 1.1962224554039874e-05, |
| "loss": 0.2691, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.353857041755131, |
| "grad_norm": 0.195585784316558, |
| "learning_rate": 1.1935991605456453e-05, |
| "loss": 0.2806, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.3552724699221512, |
| "grad_norm": 0.20023242229909904, |
| "learning_rate": 1.1909758656873033e-05, |
| "loss": 0.2874, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.356687898089172, |
| "grad_norm": 0.20043783231352735, |
| "learning_rate": 1.1883525708289612e-05, |
| "loss": 0.292, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.3581033262561926, |
| "grad_norm": 0.21082009566883989, |
| "learning_rate": 1.1857292759706192e-05, |
| "loss": 0.2885, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.3595187544232132, |
| "grad_norm": 0.2045543901472315, |
| "learning_rate": 1.183105981112277e-05, |
| "loss": 0.2948, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.3609341825902335, |
| "grad_norm": 0.19600728280903276, |
| "learning_rate": 1.1804826862539351e-05, |
| "loss": 0.3041, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.362349610757254, |
| "grad_norm": 0.19437448899997456, |
| "learning_rate": 1.177859391395593e-05, |
| "loss": 0.2829, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.3637650389242744, |
| "grad_norm": 0.18823136977189336, |
| "learning_rate": 1.1752360965372508e-05, |
| "loss": 0.2818, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.365180467091295, |
| "grad_norm": 0.20404660050537488, |
| "learning_rate": 1.1726128016789089e-05, |
| "loss": 0.2957, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.3665958952583157, |
| "grad_norm": 0.18792494181919675, |
| "learning_rate": 1.1699895068205667e-05, |
| "loss": 0.2809, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.3680113234253364, |
| "grad_norm": 0.18945851267858987, |
| "learning_rate": 1.1673662119622246e-05, |
| "loss": 0.2946, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.3694267515923566, |
| "grad_norm": 0.1975555826468101, |
| "learning_rate": 1.1647429171038824e-05, |
| "loss": 0.2844, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.3708421797593773, |
| "grad_norm": 0.19905077106155683, |
| "learning_rate": 1.1621196222455405e-05, |
| "loss": 0.2728, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.3722576079263975, |
| "grad_norm": 0.2052488670029528, |
| "learning_rate": 1.1594963273871983e-05, |
| "loss": 0.2962, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.373673036093418, |
| "grad_norm": 0.1899601465728956, |
| "learning_rate": 1.1568730325288562e-05, |
| "loss": 0.2802, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.375088464260439, |
| "grad_norm": 0.18865472944370884, |
| "learning_rate": 1.1542497376705142e-05, |
| "loss": 0.2928, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.3765038924274595, |
| "grad_norm": 0.20141404594053566, |
| "learning_rate": 1.1516264428121722e-05, |
| "loss": 0.2827, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.3779193205944797, |
| "grad_norm": 0.2316866248634522, |
| "learning_rate": 1.1490031479538301e-05, |
| "loss": 0.2864, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.3793347487615004, |
| "grad_norm": 0.189109282917738, |
| "learning_rate": 1.146379853095488e-05, |
| "loss": 0.2947, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.3807501769285206, |
| "grad_norm": 0.18663462596385907, |
| "learning_rate": 1.143756558237146e-05, |
| "loss": 0.2883, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.3821656050955413, |
| "grad_norm": 0.24963661794508527, |
| "learning_rate": 1.1411332633788038e-05, |
| "loss": 0.2976, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.383581033262562, |
| "grad_norm": 0.20908853758881932, |
| "learning_rate": 1.1385099685204617e-05, |
| "loss": 0.3017, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.3849964614295827, |
| "grad_norm": 0.2020369520638962, |
| "learning_rate": 1.1358866736621197e-05, |
| "loss": 0.3039, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.386411889596603, |
| "grad_norm": 0.1841556772303126, |
| "learning_rate": 1.1332633788037776e-05, |
| "loss": 0.2749, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.3878273177636236, |
| "grad_norm": 0.21285164297603593, |
| "learning_rate": 1.1306400839454354e-05, |
| "loss": 0.2887, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.389242745930644, |
| "grad_norm": 0.20583511099567137, |
| "learning_rate": 1.1280167890870933e-05, |
| "loss": 0.2788, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.3906581740976645, |
| "grad_norm": 0.20708969381375833, |
| "learning_rate": 1.1253934942287513e-05, |
| "loss": 0.2765, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.392073602264685, |
| "grad_norm": 0.19666467582284977, |
| "learning_rate": 1.1227701993704094e-05, |
| "loss": 0.2926, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.393489030431706, |
| "grad_norm": 0.20505521755851502, |
| "learning_rate": 1.1201469045120672e-05, |
| "loss": 0.2841, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.394904458598726, |
| "grad_norm": 0.21731096035292308, |
| "learning_rate": 1.1175236096537252e-05, |
| "loss": 0.2868, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.3963198867657467, |
| "grad_norm": 0.18965142028058407, |
| "learning_rate": 1.1149003147953831e-05, |
| "loss": 0.2978, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.397735314932767, |
| "grad_norm": 0.2058365142452598, |
| "learning_rate": 1.112277019937041e-05, |
| "loss": 0.2912, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.3991507430997876, |
| "grad_norm": 0.20940864332061368, |
| "learning_rate": 1.1096537250786988e-05, |
| "loss": 0.3, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.4005661712668083, |
| "grad_norm": 0.20291991491379344, |
| "learning_rate": 1.1070304302203569e-05, |
| "loss": 0.2831, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.401981599433829, |
| "grad_norm": 0.20138578529128853, |
| "learning_rate": 1.1044071353620147e-05, |
| "loss": 0.2968, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.403397027600849, |
| "grad_norm": 0.19224993627720924, |
| "learning_rate": 1.1017838405036726e-05, |
| "loss": 0.2655, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.40481245576787, |
| "grad_norm": 0.19719007475383227, |
| "learning_rate": 1.0991605456453306e-05, |
| "loss": 0.2792, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.40622788393489, |
| "grad_norm": 0.21831494195525009, |
| "learning_rate": 1.0965372507869885e-05, |
| "loss": 0.2985, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.4076433121019107, |
| "grad_norm": 0.19020846891844465, |
| "learning_rate": 1.0939139559286463e-05, |
| "loss": 0.2893, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.4090587402689314, |
| "grad_norm": 0.18289524936418528, |
| "learning_rate": 1.0912906610703043e-05, |
| "loss": 0.2851, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.410474168435952, |
| "grad_norm": 0.1974186204137678, |
| "learning_rate": 1.0886673662119624e-05, |
| "loss": 0.2874, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.4118895966029723, |
| "grad_norm": 0.18629571329619682, |
| "learning_rate": 1.0860440713536202e-05, |
| "loss": 0.2865, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.413305024769993, |
| "grad_norm": 0.19587811877631692, |
| "learning_rate": 1.0834207764952781e-05, |
| "loss": 0.3, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.414720452937013, |
| "grad_norm": 0.19078404188550765, |
| "learning_rate": 1.0807974816369361e-05, |
| "loss": 0.2805, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.416135881104034, |
| "grad_norm": 0.18334271635230978, |
| "learning_rate": 1.078174186778594e-05, |
| "loss": 0.2879, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.4175513092710545, |
| "grad_norm": 0.2099964101240209, |
| "learning_rate": 1.0755508919202518e-05, |
| "loss": 0.2964, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.418966737438075, |
| "grad_norm": 0.18778422256175833, |
| "learning_rate": 1.0729275970619099e-05, |
| "loss": 0.2811, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.4203821656050954, |
| "grad_norm": 0.19030408167368482, |
| "learning_rate": 1.0703043022035677e-05, |
| "loss": 0.2849, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.421797593772116, |
| "grad_norm": 0.3319292426618223, |
| "learning_rate": 1.0676810073452256e-05, |
| "loss": 0.2809, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.4232130219391363, |
| "grad_norm": 0.20408958072566255, |
| "learning_rate": 1.0650577124868834e-05, |
| "loss": 0.2842, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.424628450106157, |
| "grad_norm": 0.20386837831449744, |
| "learning_rate": 1.0624344176285415e-05, |
| "loss": 0.2868, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.4260438782731777, |
| "grad_norm": 0.18784192812808573, |
| "learning_rate": 1.0598111227701995e-05, |
| "loss": 0.2794, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.4274593064401984, |
| "grad_norm": 0.1925027128688008, |
| "learning_rate": 1.0571878279118574e-05, |
| "loss": 0.2854, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.4288747346072186, |
| "grad_norm": 0.1865697450118313, |
| "learning_rate": 1.0545645330535154e-05, |
| "loss": 0.2825, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.4302901627742393, |
| "grad_norm": 0.19960749213574797, |
| "learning_rate": 1.0519412381951733e-05, |
| "loss": 0.2879, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.4317055909412595, |
| "grad_norm": 0.20678638934566496, |
| "learning_rate": 1.0493179433368311e-05, |
| "loss": 0.2828, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.43312101910828, |
| "grad_norm": 0.19170488624048732, |
| "learning_rate": 1.046694648478489e-05, |
| "loss": 0.2825, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.434536447275301, |
| "grad_norm": 0.21365876250706559, |
| "learning_rate": 1.044071353620147e-05, |
| "loss": 0.2855, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.4359518754423215, |
| "grad_norm": 0.20008633956473132, |
| "learning_rate": 1.0414480587618049e-05, |
| "loss": 0.2826, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.4373673036093417, |
| "grad_norm": 0.2033193173356251, |
| "learning_rate": 1.0388247639034627e-05, |
| "loss": 0.2825, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.4387827317763624, |
| "grad_norm": 0.18756102875592565, |
| "learning_rate": 1.0362014690451207e-05, |
| "loss": 0.2983, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.4401981599433826, |
| "grad_norm": 0.2177383215119078, |
| "learning_rate": 1.0335781741867786e-05, |
| "loss": 0.2867, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.4416135881104033, |
| "grad_norm": 0.19994004702139706, |
| "learning_rate": 1.0309548793284365e-05, |
| "loss": 0.2782, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.443029016277424, |
| "grad_norm": 0.19165837212395614, |
| "learning_rate": 1.0283315844700945e-05, |
| "loss": 0.2776, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.4444444444444446, |
| "grad_norm": 0.19445758983775668, |
| "learning_rate": 1.0257082896117525e-05, |
| "loss": 0.3013, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.445859872611465, |
| "grad_norm": 0.2007145447916944, |
| "learning_rate": 1.0230849947534104e-05, |
| "loss": 0.2962, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.4472753007784855, |
| "grad_norm": 0.21240393799613172, |
| "learning_rate": 1.0204616998950682e-05, |
| "loss": 0.2841, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.4486907289455058, |
| "grad_norm": 0.19228717160213668, |
| "learning_rate": 1.0178384050367263e-05, |
| "loss": 0.2866, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.4501061571125264, |
| "grad_norm": 0.1854903433710835, |
| "learning_rate": 1.0152151101783841e-05, |
| "loss": 0.2769, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.451521585279547, |
| "grad_norm": 0.20102508537794403, |
| "learning_rate": 1.012591815320042e-05, |
| "loss": 0.2861, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.452937013446568, |
| "grad_norm": 0.1941395854546883, |
| "learning_rate": 1.0099685204616998e-05, |
| "loss": 0.2836, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.454352441613588, |
| "grad_norm": 0.2028174742597168, |
| "learning_rate": 1.0073452256033579e-05, |
| "loss": 0.294, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.4557678697806087, |
| "grad_norm": 0.1981830261166878, |
| "learning_rate": 1.0047219307450157e-05, |
| "loss": 0.277, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.457183297947629, |
| "grad_norm": 0.20093369675985923, |
| "learning_rate": 1.0020986358866736e-05, |
| "loss": 0.2955, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.4585987261146496, |
| "grad_norm": 0.2001686805200334, |
| "learning_rate": 9.994753410283316e-06, |
| "loss": 0.299, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.4600141542816703, |
| "grad_norm": 0.19054589682313114, |
| "learning_rate": 9.968520461699895e-06, |
| "loss": 0.2722, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.461429582448691, |
| "grad_norm": 0.17379130432862766, |
| "learning_rate": 9.942287513116475e-06, |
| "loss": 0.2694, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.462845010615711, |
| "grad_norm": 0.18743548991095602, |
| "learning_rate": 9.916054564533055e-06, |
| "loss": 0.2811, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.464260438782732, |
| "grad_norm": 0.19612228488550978, |
| "learning_rate": 9.889821615949634e-06, |
| "loss": 0.2828, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.4656758669497525, |
| "grad_norm": 0.19257011195751622, |
| "learning_rate": 9.863588667366213e-06, |
| "loss": 0.2823, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.4670912951167727, |
| "grad_norm": 0.19551678907161937, |
| "learning_rate": 9.837355718782791e-06, |
| "loss": 0.2702, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.4685067232837934, |
| "grad_norm": 0.19172581322744886, |
| "learning_rate": 9.811122770199371e-06, |
| "loss": 0.288, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.469922151450814, |
| "grad_norm": 0.1774729926191194, |
| "learning_rate": 9.78488982161595e-06, |
| "loss": 0.2771, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.4713375796178343, |
| "grad_norm": 0.18709764194534617, |
| "learning_rate": 9.758656873032529e-06, |
| "loss": 0.279, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.472753007784855, |
| "grad_norm": 0.1783885544112287, |
| "learning_rate": 9.732423924449109e-06, |
| "loss": 0.2776, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.4741684359518756, |
| "grad_norm": 0.19330769319927216, |
| "learning_rate": 9.706190975865687e-06, |
| "loss": 0.3033, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.475583864118896, |
| "grad_norm": 0.1951366237517273, |
| "learning_rate": 9.679958027282266e-06, |
| "loss": 0.2829, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.4769992922859165, |
| "grad_norm": 0.18665621657459827, |
| "learning_rate": 9.653725078698846e-06, |
| "loss": 0.2905, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.478414720452937, |
| "grad_norm": 0.18424743703731458, |
| "learning_rate": 9.627492130115427e-06, |
| "loss": 0.2693, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.4798301486199574, |
| "grad_norm": 0.19089555732541547, |
| "learning_rate": 9.601259181532005e-06, |
| "loss": 0.2867, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.481245576786978, |
| "grad_norm": 0.17861854716249345, |
| "learning_rate": 9.575026232948584e-06, |
| "loss": 0.288, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.4826610049539988, |
| "grad_norm": 0.1721315445775327, |
| "learning_rate": 9.548793284365164e-06, |
| "loss": 0.2746, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.484076433121019, |
| "grad_norm": 0.187265615065389, |
| "learning_rate": 9.522560335781743e-06, |
| "loss": 0.2776, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.4854918612880397, |
| "grad_norm": 0.1906652371264402, |
| "learning_rate": 9.496327387198321e-06, |
| "loss": 0.2834, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.4869072894550603, |
| "grad_norm": 0.18973806755256659, |
| "learning_rate": 9.4700944386149e-06, |
| "loss": 0.2734, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.4883227176220806, |
| "grad_norm": 0.18680172089985692, |
| "learning_rate": 9.44386149003148e-06, |
| "loss": 0.278, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.4897381457891012, |
| "grad_norm": 0.18989404461557566, |
| "learning_rate": 9.417628541448059e-06, |
| "loss": 0.282, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.491153573956122, |
| "grad_norm": 0.1947137202882916, |
| "learning_rate": 9.391395592864637e-06, |
| "loss": 0.2874, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.492569002123142, |
| "grad_norm": 0.18019379349151343, |
| "learning_rate": 9.365162644281218e-06, |
| "loss": 0.2825, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.493984430290163, |
| "grad_norm": 0.18529175062442796, |
| "learning_rate": 9.338929695697796e-06, |
| "loss": 0.2697, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.4953998584571835, |
| "grad_norm": 0.19215544617973443, |
| "learning_rate": 9.312696747114377e-06, |
| "loss": 0.2839, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.4968152866242037, |
| "grad_norm": 0.18458485693360022, |
| "learning_rate": 9.286463798530955e-06, |
| "loss": 0.2752, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.4982307147912244, |
| "grad_norm": 0.1947758838586988, |
| "learning_rate": 9.260230849947535e-06, |
| "loss": 0.2835, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.499646142958245, |
| "grad_norm": 0.1989466258135159, |
| "learning_rate": 9.233997901364114e-06, |
| "loss": 0.31, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.5010615711252653, |
| "grad_norm": 0.19676251384745466, |
| "learning_rate": 9.207764952780693e-06, |
| "loss": 0.2937, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.502476999292286, |
| "grad_norm": 0.2013785603803912, |
| "learning_rate": 9.181532004197273e-06, |
| "loss": 0.2927, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.5038924274593066, |
| "grad_norm": 0.1907466490422871, |
| "learning_rate": 9.155299055613851e-06, |
| "loss": 0.2915, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.505307855626327, |
| "grad_norm": 0.18137168182898722, |
| "learning_rate": 9.12906610703043e-06, |
| "loss": 0.2846, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.5067232837933475, |
| "grad_norm": 0.19295147501733936, |
| "learning_rate": 9.10283315844701e-06, |
| "loss": 0.2879, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.5081387119603678, |
| "grad_norm": 0.1956545760836981, |
| "learning_rate": 9.076600209863589e-06, |
| "loss": 0.2784, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.5095541401273884, |
| "grad_norm": 0.1813866948626585, |
| "learning_rate": 9.050367261280168e-06, |
| "loss": 0.275, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.510969568294409, |
| "grad_norm": 0.1889014444987169, |
| "learning_rate": 9.024134312696748e-06, |
| "loss": 0.2794, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.5123849964614298, |
| "grad_norm": 0.17823356530652573, |
| "learning_rate": 8.997901364113328e-06, |
| "loss": 0.2755, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.51380042462845, |
| "grad_norm": 0.20194077440692024, |
| "learning_rate": 8.971668415529907e-06, |
| "loss": 0.2939, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.5152158527954707, |
| "grad_norm": 0.19442054303306802, |
| "learning_rate": 8.945435466946485e-06, |
| "loss": 0.2916, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.516631280962491, |
| "grad_norm": 0.18917905930574533, |
| "learning_rate": 8.919202518363066e-06, |
| "loss": 0.2954, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.5180467091295116, |
| "grad_norm": 0.17858071110586585, |
| "learning_rate": 8.892969569779644e-06, |
| "loss": 0.2915, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.5194621372965322, |
| "grad_norm": 0.1737991334501271, |
| "learning_rate": 8.866736621196223e-06, |
| "loss": 0.2884, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.520877565463553, |
| "grad_norm": 0.1787321629946045, |
| "learning_rate": 8.840503672612801e-06, |
| "loss": 0.2781, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.522292993630573, |
| "grad_norm": 0.18487310998562898, |
| "learning_rate": 8.814270724029382e-06, |
| "loss": 0.2876, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.523708421797594, |
| "grad_norm": 0.19011181620372405, |
| "learning_rate": 8.78803777544596e-06, |
| "loss": 0.2827, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.525123849964614, |
| "grad_norm": 0.18458161214077176, |
| "learning_rate": 8.761804826862539e-06, |
| "loss": 0.2888, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.5265392781316347, |
| "grad_norm": 0.19197191697122068, |
| "learning_rate": 8.735571878279119e-06, |
| "loss": 0.2889, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.5279547062986554, |
| "grad_norm": 0.18520101528729924, |
| "learning_rate": 8.709338929695698e-06, |
| "loss": 0.2923, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.529370134465676, |
| "grad_norm": 0.19625949244610194, |
| "learning_rate": 8.683105981112278e-06, |
| "loss": 0.2921, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.5307855626326963, |
| "grad_norm": 0.18483412184029158, |
| "learning_rate": 8.656873032528857e-06, |
| "loss": 0.2821, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.532200990799717, |
| "grad_norm": 0.1913109404131173, |
| "learning_rate": 8.630640083945437e-06, |
| "loss": 0.2971, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.533616418966737, |
| "grad_norm": 0.18453330441031862, |
| "learning_rate": 8.604407135362015e-06, |
| "loss": 0.2936, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.535031847133758, |
| "grad_norm": 0.18572328873024224, |
| "learning_rate": 8.578174186778594e-06, |
| "loss": 0.2849, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.5364472753007785, |
| "grad_norm": 0.17746223874171665, |
| "learning_rate": 8.551941238195174e-06, |
| "loss": 0.2737, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.537862703467799, |
| "grad_norm": 0.1948047969977976, |
| "learning_rate": 8.525708289611753e-06, |
| "loss": 0.2929, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.5392781316348194, |
| "grad_norm": 0.1812728957254174, |
| "learning_rate": 8.499475341028331e-06, |
| "loss": 0.2831, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.54069355980184, |
| "grad_norm": 0.17664206051208176, |
| "learning_rate": 8.47324239244491e-06, |
| "loss": 0.2932, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.5421089879688603, |
| "grad_norm": 0.19364101658694155, |
| "learning_rate": 8.44700944386149e-06, |
| "loss": 0.2814, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.543524416135881, |
| "grad_norm": 0.19920710210422615, |
| "learning_rate": 8.420776495278069e-06, |
| "loss": 0.2902, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.5449398443029017, |
| "grad_norm": 0.19178070631225227, |
| "learning_rate": 8.39454354669465e-06, |
| "loss": 0.2889, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.5463552724699223, |
| "grad_norm": 0.19420951217064594, |
| "learning_rate": 8.368310598111228e-06, |
| "loss": 0.2986, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.5477707006369426, |
| "grad_norm": 0.20395759310322037, |
| "learning_rate": 8.342077649527808e-06, |
| "loss": 0.3069, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.5491861288039632, |
| "grad_norm": 0.19759484712292424, |
| "learning_rate": 8.315844700944387e-06, |
| "loss": 0.2883, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.5506015569709835, |
| "grad_norm": 0.1790110297887989, |
| "learning_rate": 8.289611752360965e-06, |
| "loss": 0.2837, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.552016985138004, |
| "grad_norm": 0.19674318163208754, |
| "learning_rate": 8.263378803777546e-06, |
| "loss": 0.2864, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.553432413305025, |
| "grad_norm": 0.18444891681361106, |
| "learning_rate": 8.237145855194124e-06, |
| "loss": 0.2925, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.5548478414720455, |
| "grad_norm": 0.17804797600094827, |
| "learning_rate": 8.210912906610703e-06, |
| "loss": 0.294, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.5562632696390657, |
| "grad_norm": 0.18656197397743954, |
| "learning_rate": 8.184679958027283e-06, |
| "loss": 0.2892, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.5576786978060864, |
| "grad_norm": 0.18207746960029497, |
| "learning_rate": 8.158447009443862e-06, |
| "loss": 0.2881, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.5590941259731066, |
| "grad_norm": 0.18558652365935788, |
| "learning_rate": 8.13221406086044e-06, |
| "loss": 0.2925, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.5605095541401273, |
| "grad_norm": 0.1806740707048797, |
| "learning_rate": 8.10598111227702e-06, |
| "loss": 0.2978, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.561924982307148, |
| "grad_norm": 0.18513945362124973, |
| "learning_rate": 8.079748163693599e-06, |
| "loss": 0.2819, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.5633404104741686, |
| "grad_norm": 0.19188799707417797, |
| "learning_rate": 8.05351521511018e-06, |
| "loss": 0.3017, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.564755838641189, |
| "grad_norm": 0.18467584871999182, |
| "learning_rate": 8.027282266526758e-06, |
| "loss": 0.2801, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.5661712668082095, |
| "grad_norm": 0.17336259123772044, |
| "learning_rate": 8.001049317943338e-06, |
| "loss": 0.2818, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.5675866949752297, |
| "grad_norm": 0.18518477739723296, |
| "learning_rate": 7.974816369359917e-06, |
| "loss": 0.2744, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.5690021231422504, |
| "grad_norm": 0.184374472353793, |
| "learning_rate": 7.948583420776495e-06, |
| "loss": 0.2962, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.570417551309271, |
| "grad_norm": 0.18811552381445062, |
| "learning_rate": 7.922350472193076e-06, |
| "loss": 0.286, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.5718329794762917, |
| "grad_norm": 0.1850722906635902, |
| "learning_rate": 7.896117523609654e-06, |
| "loss": 0.2795, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.573248407643312, |
| "grad_norm": 0.1872232911377214, |
| "learning_rate": 7.869884575026233e-06, |
| "loss": 0.2805, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.5746638358103326, |
| "grad_norm": 0.17884119975176793, |
| "learning_rate": 7.843651626442811e-06, |
| "loss": 0.281, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.576079263977353, |
| "grad_norm": 0.18130817208207814, |
| "learning_rate": 7.817418677859392e-06, |
| "loss": 0.2771, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.5774946921443735, |
| "grad_norm": 0.1798684636187942, |
| "learning_rate": 7.79118572927597e-06, |
| "loss": 0.2848, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.578910120311394, |
| "grad_norm": 0.17825232480124747, |
| "learning_rate": 7.76495278069255e-06, |
| "loss": 0.2736, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.580325548478415, |
| "grad_norm": 0.18390615555070572, |
| "learning_rate": 7.73871983210913e-06, |
| "loss": 0.2823, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.581740976645435, |
| "grad_norm": 0.1953773122629701, |
| "learning_rate": 7.71248688352571e-06, |
| "loss": 0.2958, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.583156404812456, |
| "grad_norm": 0.1976335826419702, |
| "learning_rate": 7.686253934942288e-06, |
| "loss": 0.2879, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.5845718329794765, |
| "grad_norm": 0.19169910287214764, |
| "learning_rate": 7.660020986358867e-06, |
| "loss": 0.2873, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.5859872611464967, |
| "grad_norm": 0.1786869386826253, |
| "learning_rate": 7.633788037775447e-06, |
| "loss": 0.2725, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.5874026893135174, |
| "grad_norm": 0.18415450922430598, |
| "learning_rate": 7.607555089192026e-06, |
| "loss": 0.2865, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.588818117480538, |
| "grad_norm": 0.18500654761858334, |
| "learning_rate": 7.581322140608604e-06, |
| "loss": 0.3046, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.5902335456475583, |
| "grad_norm": 0.18940801943582591, |
| "learning_rate": 7.5550891920251845e-06, |
| "loss": 0.2983, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.591648973814579, |
| "grad_norm": 0.19171549447045327, |
| "learning_rate": 7.528856243441763e-06, |
| "loss": 0.2933, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.5930644019815996, |
| "grad_norm": 0.17434424948793265, |
| "learning_rate": 7.5026232948583425e-06, |
| "loss": 0.2847, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.59447983014862, |
| "grad_norm": 0.1956790129381184, |
| "learning_rate": 7.476390346274921e-06, |
| "loss": 0.2908, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.5958952583156405, |
| "grad_norm": 0.191386449253825, |
| "learning_rate": 7.450157397691501e-06, |
| "loss": 0.2866, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.597310686482661, |
| "grad_norm": 0.19813004589544073, |
| "learning_rate": 7.42392444910808e-06, |
| "loss": 0.2841, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.5987261146496814, |
| "grad_norm": 0.1832862502583108, |
| "learning_rate": 7.3976915005246586e-06, |
| "loss": 0.2962, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.600141542816702, |
| "grad_norm": 0.1925834407695736, |
| "learning_rate": 7.371458551941239e-06, |
| "loss": 0.2901, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.6015569709837227, |
| "grad_norm": 0.19516481876501107, |
| "learning_rate": 7.345225603357818e-06, |
| "loss": 0.2819, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.602972399150743, |
| "grad_norm": 0.1766600106048532, |
| "learning_rate": 7.318992654774397e-06, |
| "loss": 0.2691, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.6043878273177636, |
| "grad_norm": 0.19019561454678408, |
| "learning_rate": 7.2927597061909755e-06, |
| "loss": 0.2911, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.6058032554847843, |
| "grad_norm": 0.18281180121904445, |
| "learning_rate": 7.266526757607556e-06, |
| "loss": 0.2992, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.6072186836518045, |
| "grad_norm": 0.18956913138535492, |
| "learning_rate": 7.240293809024134e-06, |
| "loss": 0.2899, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.608634111818825, |
| "grad_norm": 0.18216296469351154, |
| "learning_rate": 7.214060860440714e-06, |
| "loss": 0.2846, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.610049539985846, |
| "grad_norm": 0.1864964375261479, |
| "learning_rate": 7.187827911857293e-06, |
| "loss": 0.2862, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.611464968152866, |
| "grad_norm": 0.17721283274245875, |
| "learning_rate": 7.161594963273873e-06, |
| "loss": 0.2924, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.6128803963198868, |
| "grad_norm": 0.1838086426191061, |
| "learning_rate": 7.135362014690451e-06, |
| "loss": 0.2913, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.6142958244869074, |
| "grad_norm": 0.1913187649654714, |
| "learning_rate": 7.1091290661070315e-06, |
| "loss": 0.2908, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.6157112526539277, |
| "grad_norm": 0.18822206075681172, |
| "learning_rate": 7.08289611752361e-06, |
| "loss": 0.2916, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.6171266808209483, |
| "grad_norm": 0.1827073739968568, |
| "learning_rate": 7.056663168940189e-06, |
| "loss": 0.3065, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.618542108987969, |
| "grad_norm": 0.19522047436385434, |
| "learning_rate": 7.030430220356768e-06, |
| "loss": 0.2955, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.6199575371549892, |
| "grad_norm": 0.18022638550628511, |
| "learning_rate": 7.0041972717733484e-06, |
| "loss": 0.2875, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.62137296532201, |
| "grad_norm": 0.21508080661024662, |
| "learning_rate": 6.977964323189927e-06, |
| "loss": 0.2855, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.6227883934890306, |
| "grad_norm": 0.1747002634441143, |
| "learning_rate": 6.951731374606506e-06, |
| "loss": 0.2793, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.624203821656051, |
| "grad_norm": 0.18125593556978656, |
| "learning_rate": 6.925498426023086e-06, |
| "loss": 0.2849, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.6256192498230715, |
| "grad_norm": 0.18951545885331525, |
| "learning_rate": 6.8992654774396645e-06, |
| "loss": 0.2937, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.627034677990092, |
| "grad_norm": 0.1855323965413897, |
| "learning_rate": 6.873032528856244e-06, |
| "loss": 0.2935, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.6284501061571124, |
| "grad_norm": 0.17210946333714922, |
| "learning_rate": 6.8467995802728225e-06, |
| "loss": 0.2741, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.629865534324133, |
| "grad_norm": 0.18252709891494256, |
| "learning_rate": 6.820566631689403e-06, |
| "loss": 0.2877, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.6312809624911537, |
| "grad_norm": 0.18013341848747635, |
| "learning_rate": 6.794333683105981e-06, |
| "loss": 0.2742, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.632696390658174, |
| "grad_norm": 0.18568461347186016, |
| "learning_rate": 6.76810073452256e-06, |
| "loss": 0.3012, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.6341118188251946, |
| "grad_norm": 0.19734367213602771, |
| "learning_rate": 6.74186778593914e-06, |
| "loss": 0.3028, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.6355272469922153, |
| "grad_norm": 0.18309094784045296, |
| "learning_rate": 6.71563483735572e-06, |
| "loss": 0.2839, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.6369426751592355, |
| "grad_norm": 0.17072703179456888, |
| "learning_rate": 6.689401888772298e-06, |
| "loss": 0.2784, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.638358103326256, |
| "grad_norm": 0.18582678531448524, |
| "learning_rate": 6.663168940188877e-06, |
| "loss": 0.2815, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.639773531493277, |
| "grad_norm": 0.1924396916188198, |
| "learning_rate": 6.636935991605457e-06, |
| "loss": 0.2853, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.641188959660297, |
| "grad_norm": 0.18207577185241333, |
| "learning_rate": 6.610703043022036e-06, |
| "loss": 0.2894, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.6426043878273178, |
| "grad_norm": 0.19551476595141212, |
| "learning_rate": 6.584470094438614e-06, |
| "loss": 0.2873, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.6440198159943384, |
| "grad_norm": 0.16976395480923093, |
| "learning_rate": 6.558237145855195e-06, |
| "loss": 0.2845, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.6454352441613587, |
| "grad_norm": 0.1794667115720783, |
| "learning_rate": 6.532004197271774e-06, |
| "loss": 0.2861, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.6468506723283793, |
| "grad_norm": 0.17986008928354258, |
| "learning_rate": 6.505771248688353e-06, |
| "loss": 0.2781, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.6482661004954, |
| "grad_norm": 0.19873992835420157, |
| "learning_rate": 6.479538300104931e-06, |
| "loss": 0.2813, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.6496815286624202, |
| "grad_norm": 0.21479283680707595, |
| "learning_rate": 6.4533053515215116e-06, |
| "loss": 0.2912, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.651096956829441, |
| "grad_norm": 0.18424082693483232, |
| "learning_rate": 6.42707240293809e-06, |
| "loss": 0.2777, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.6525123849964616, |
| "grad_norm": 0.18232234087078958, |
| "learning_rate": 6.40083945435467e-06, |
| "loss": 0.2985, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.653927813163482, |
| "grad_norm": 0.1848666204181274, |
| "learning_rate": 6.37460650577125e-06, |
| "loss": 0.2823, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.6553432413305025, |
| "grad_norm": 0.18856393761385729, |
| "learning_rate": 6.3483735571878285e-06, |
| "loss": 0.2932, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.656758669497523, |
| "grad_norm": 0.17971858451183984, |
| "learning_rate": 6.322140608604407e-06, |
| "loss": 0.293, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.6581740976645434, |
| "grad_norm": 0.17066349292261504, |
| "learning_rate": 6.295907660020986e-06, |
| "loss": 0.2845, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.659589525831564, |
| "grad_norm": 0.23408224675290515, |
| "learning_rate": 6.269674711437566e-06, |
| "loss": 0.2834, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.6610049539985847, |
| "grad_norm": 0.18778554084997837, |
| "learning_rate": 6.243441762854145e-06, |
| "loss": 0.3077, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.662420382165605, |
| "grad_norm": 0.18402603177297536, |
| "learning_rate": 6.217208814270725e-06, |
| "loss": 0.2923, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.6638358103326256, |
| "grad_norm": 0.1790808754907691, |
| "learning_rate": 6.190975865687303e-06, |
| "loss": 0.2877, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.6652512384996463, |
| "grad_norm": 0.18117254341267883, |
| "learning_rate": 6.164742917103883e-06, |
| "loss": 0.298, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.18133341165545064, |
| "learning_rate": 6.1385099685204614e-06, |
| "loss": 0.285, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.668082094833687, |
| "grad_norm": 0.18694177265565673, |
| "learning_rate": 6.112277019937041e-06, |
| "loss": 0.2888, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.669497523000708, |
| "grad_norm": 0.17536912203009425, |
| "learning_rate": 6.08604407135362e-06, |
| "loss": 0.2879, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.670912951167728, |
| "grad_norm": 0.1776523188094862, |
| "learning_rate": 6.0598111227702e-06, |
| "loss": 0.2815, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.6723283793347488, |
| "grad_norm": 0.17913360450470825, |
| "learning_rate": 6.033578174186779e-06, |
| "loss": 0.2915, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.6737438075017694, |
| "grad_norm": 0.19344378917017407, |
| "learning_rate": 6.007345225603358e-06, |
| "loss": 0.2891, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.6751592356687897, |
| "grad_norm": 0.18365487875162972, |
| "learning_rate": 5.981112277019937e-06, |
| "loss": 0.285, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.6765746638358103, |
| "grad_norm": 0.18008101002043897, |
| "learning_rate": 5.954879328436517e-06, |
| "loss": 0.2919, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.677990092002831, |
| "grad_norm": 0.18236055900255962, |
| "learning_rate": 5.928646379853096e-06, |
| "loss": 0.2879, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.6794055201698512, |
| "grad_norm": 0.1782327309337629, |
| "learning_rate": 5.9024134312696755e-06, |
| "loss": 0.287, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.680820948336872, |
| "grad_norm": 0.18375618710095973, |
| "learning_rate": 5.876180482686254e-06, |
| "loss": 0.2883, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.6822363765038926, |
| "grad_norm": 0.1802953324961345, |
| "learning_rate": 5.8499475341028336e-06, |
| "loss": 0.2815, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.683651804670913, |
| "grad_norm": 0.18838302801802298, |
| "learning_rate": 5.823714585519412e-06, |
| "loss": 0.3063, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.6850672328379335, |
| "grad_norm": 0.1850222907843767, |
| "learning_rate": 5.797481636935992e-06, |
| "loss": 0.2917, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.686482661004954, |
| "grad_norm": 0.17707903423488489, |
| "learning_rate": 5.771248688352571e-06, |
| "loss": 0.2993, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.6878980891719744, |
| "grad_norm": 0.17923211297671582, |
| "learning_rate": 5.7450157397691505e-06, |
| "loss": 0.2841, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.689313517338995, |
| "grad_norm": 0.18554093813240305, |
| "learning_rate": 5.71878279118573e-06, |
| "loss": 0.277, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.6907289455060157, |
| "grad_norm": 0.18768414892060284, |
| "learning_rate": 5.6925498426023085e-06, |
| "loss": 0.2865, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.692144373673036, |
| "grad_norm": 0.1857402006462927, |
| "learning_rate": 5.666316894018888e-06, |
| "loss": 0.2973, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.6935598018400566, |
| "grad_norm": 0.17132007260096363, |
| "learning_rate": 5.6400839454354665e-06, |
| "loss": 0.2776, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.6949752300070773, |
| "grad_norm": 0.17720084601471012, |
| "learning_rate": 5.613850996852047e-06, |
| "loss": 0.2701, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.6963906581740975, |
| "grad_norm": 0.20231472960856142, |
| "learning_rate": 5.587618048268626e-06, |
| "loss": 0.2879, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.697806086341118, |
| "grad_norm": 0.1948076173661505, |
| "learning_rate": 5.561385099685205e-06, |
| "loss": 0.3071, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.699221514508139, |
| "grad_norm": 0.19277975165339786, |
| "learning_rate": 5.535152151101784e-06, |
| "loss": 0.288, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.700636942675159, |
| "grad_norm": 0.18941875379657241, |
| "learning_rate": 5.508919202518363e-06, |
| "loss": 0.3003, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.7020523708421798, |
| "grad_norm": 0.17808309503947614, |
| "learning_rate": 5.482686253934942e-06, |
| "loss": 0.285, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.7034677990092004, |
| "grad_norm": 0.17738627483023298, |
| "learning_rate": 5.456453305351522e-06, |
| "loss": 0.2726, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.7048832271762207, |
| "grad_norm": 0.1870343549277759, |
| "learning_rate": 5.430220356768101e-06, |
| "loss": 0.2847, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.7062986553432413, |
| "grad_norm": 0.18336127786092873, |
| "learning_rate": 5.403987408184681e-06, |
| "loss": 0.2894, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.707714083510262, |
| "grad_norm": 0.18002245752166193, |
| "learning_rate": 5.377754459601259e-06, |
| "loss": 0.2987, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.709129511677282, |
| "grad_norm": 0.17536975786409212, |
| "learning_rate": 5.351521511017839e-06, |
| "loss": 0.2812, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.710544939844303, |
| "grad_norm": 0.17030964776321728, |
| "learning_rate": 5.325288562434417e-06, |
| "loss": 0.2845, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.7119603680113236, |
| "grad_norm": 0.17298737950738244, |
| "learning_rate": 5.2990556138509975e-06, |
| "loss": 0.276, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.713375796178344, |
| "grad_norm": 0.18792212139908102, |
| "learning_rate": 5.272822665267577e-06, |
| "loss": 0.2901, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.7147912243453645, |
| "grad_norm": 0.17302303670598543, |
| "learning_rate": 5.2465897166841556e-06, |
| "loss": 0.2866, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.716206652512385, |
| "grad_norm": 0.29187006960632644, |
| "learning_rate": 5.220356768100735e-06, |
| "loss": 0.3027, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.7176220806794054, |
| "grad_norm": 0.18511802906000652, |
| "learning_rate": 5.194123819517314e-06, |
| "loss": 0.2853, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.719037508846426, |
| "grad_norm": 0.1800424289490244, |
| "learning_rate": 5.167890870933893e-06, |
| "loss": 0.2707, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.7204529370134467, |
| "grad_norm": 0.18131224935839205, |
| "learning_rate": 5.1416579223504725e-06, |
| "loss": 0.2754, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.721868365180467, |
| "grad_norm": 0.1923264676927521, |
| "learning_rate": 5.115424973767052e-06, |
| "loss": 0.2795, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.7232837933474876, |
| "grad_norm": 0.1741853780638869, |
| "learning_rate": 5.089192025183631e-06, |
| "loss": 0.2762, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.7246992215145083, |
| "grad_norm": 0.1872875625453876, |
| "learning_rate": 5.06295907660021e-06, |
| "loss": 0.2895, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.7261146496815285, |
| "grad_norm": 0.17331130432224612, |
| "learning_rate": 5.036726128016789e-06, |
| "loss": 0.2806, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.727530077848549, |
| "grad_norm": 0.18165497532503466, |
| "learning_rate": 5.010493179433368e-06, |
| "loss": 0.2821, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.72894550601557, |
| "grad_norm": 0.18825902755069426, |
| "learning_rate": 4.984260230849947e-06, |
| "loss": 0.2912, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.73036093418259, |
| "grad_norm": 0.1856572192632043, |
| "learning_rate": 4.958027282266528e-06, |
| "loss": 0.2808, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.7317763623496107, |
| "grad_norm": 0.18010845986339513, |
| "learning_rate": 4.931794333683106e-06, |
| "loss": 0.2922, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.7331917905166314, |
| "grad_norm": 0.1766883022479923, |
| "learning_rate": 4.905561385099686e-06, |
| "loss": 0.2875, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.7346072186836516, |
| "grad_norm": 0.18382795448047617, |
| "learning_rate": 4.879328436516264e-06, |
| "loss": 0.302, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.7360226468506723, |
| "grad_norm": 0.1825510343747918, |
| "learning_rate": 4.853095487932844e-06, |
| "loss": 0.2892, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.737438075017693, |
| "grad_norm": 0.17940605284014863, |
| "learning_rate": 4.826862539349423e-06, |
| "loss": 0.2918, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.738853503184713, |
| "grad_norm": 0.18781804698819057, |
| "learning_rate": 4.800629590766003e-06, |
| "loss": 0.2856, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.740268931351734, |
| "grad_norm": 0.17054774170847606, |
| "learning_rate": 4.774396642182582e-06, |
| "loss": 0.2856, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.7416843595187546, |
| "grad_norm": 0.1730177383569187, |
| "learning_rate": 4.748163693599161e-06, |
| "loss": 0.2862, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.743099787685775, |
| "grad_norm": 0.17655836178940787, |
| "learning_rate": 4.72193074501574e-06, |
| "loss": 0.2804, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.7445152158527955, |
| "grad_norm": 0.18955604901001924, |
| "learning_rate": 4.695697796432319e-06, |
| "loss": 0.2867, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.745930644019816, |
| "grad_norm": 0.17533273828919024, |
| "learning_rate": 4.669464847848898e-06, |
| "loss": 0.2855, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.7473460721868364, |
| "grad_norm": 0.18292716308367937, |
| "learning_rate": 4.6432318992654776e-06, |
| "loss": 0.2938, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.748761500353857, |
| "grad_norm": 0.1857497386208829, |
| "learning_rate": 4.616998950682057e-06, |
| "loss": 0.2995, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.7501769285208777, |
| "grad_norm": 0.18732398873952333, |
| "learning_rate": 4.5907660020986364e-06, |
| "loss": 0.2892, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.7515923566878984, |
| "grad_norm": 0.18475276685355677, |
| "learning_rate": 4.564533053515215e-06, |
| "loss": 0.2973, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.7530077848549186, |
| "grad_norm": 0.1838772729638837, |
| "learning_rate": 4.5383001049317945e-06, |
| "loss": 0.2938, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.7544232130219393, |
| "grad_norm": 0.17828112084647768, |
| "learning_rate": 4.512067156348374e-06, |
| "loss": 0.2808, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.7558386411889595, |
| "grad_norm": 0.17472294949009795, |
| "learning_rate": 4.485834207764953e-06, |
| "loss": 0.2855, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.75725406935598, |
| "grad_norm": 0.19968711710429163, |
| "learning_rate": 4.459601259181533e-06, |
| "loss": 0.2859, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.758669497523001, |
| "grad_norm": 0.18481764504677684, |
| "learning_rate": 4.433368310598111e-06, |
| "loss": 0.2875, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.7600849256900215, |
| "grad_norm": 0.18806160443408587, |
| "learning_rate": 4.407135362014691e-06, |
| "loss": 0.2774, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.7615003538570417, |
| "grad_norm": 0.1868899501278574, |
| "learning_rate": 4.380902413431269e-06, |
| "loss": 0.2962, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.7629157820240624, |
| "grad_norm": 0.19836249564818045, |
| "learning_rate": 4.354669464847849e-06, |
| "loss": 0.2958, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.7643312101910826, |
| "grad_norm": 0.18067134134728374, |
| "learning_rate": 4.328436516264428e-06, |
| "loss": 0.2851, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.7657466383581033, |
| "grad_norm": 0.18152054180820446, |
| "learning_rate": 4.302203567681008e-06, |
| "loss": 0.2817, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.767162066525124, |
| "grad_norm": 0.17651808878217853, |
| "learning_rate": 4.275970619097587e-06, |
| "loss": 0.283, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.7685774946921446, |
| "grad_norm": 0.16912999089035635, |
| "learning_rate": 4.249737670514166e-06, |
| "loss": 0.2776, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.769992922859165, |
| "grad_norm": 0.18341343085799097, |
| "learning_rate": 4.223504721930745e-06, |
| "loss": 0.2934, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.7714083510261855, |
| "grad_norm": 0.19078243306578607, |
| "learning_rate": 4.197271773347325e-06, |
| "loss": 0.2841, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.7728237791932058, |
| "grad_norm": 0.17903784125053218, |
| "learning_rate": 4.171038824763904e-06, |
| "loss": 0.2847, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.7742392073602264, |
| "grad_norm": 0.18382338913876073, |
| "learning_rate": 4.144805876180483e-06, |
| "loss": 0.2857, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.775654635527247, |
| "grad_norm": 0.17684932630528774, |
| "learning_rate": 4.118572927597062e-06, |
| "loss": 0.2791, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.777070063694268, |
| "grad_norm": 0.1744776834452007, |
| "learning_rate": 4.0923399790136415e-06, |
| "loss": 0.2786, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.778485491861288, |
| "grad_norm": 0.17909606156140384, |
| "learning_rate": 4.06610703043022e-06, |
| "loss": 0.284, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.7799009200283087, |
| "grad_norm": 0.18050373442345014, |
| "learning_rate": 4.0398740818467995e-06, |
| "loss": 0.2852, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.781316348195329, |
| "grad_norm": 0.17504325599037854, |
| "learning_rate": 4.013641133263379e-06, |
| "loss": 0.2925, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.7827317763623496, |
| "grad_norm": 0.1783461491146812, |
| "learning_rate": 3.9874081846799584e-06, |
| "loss": 0.2859, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.7841472045293703, |
| "grad_norm": 0.1793103082043295, |
| "learning_rate": 3.961175236096538e-06, |
| "loss": 0.2649, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.785562632696391, |
| "grad_norm": 0.1693521387678535, |
| "learning_rate": 3.9349422875131165e-06, |
| "loss": 0.2871, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.786978060863411, |
| "grad_norm": 0.17525165546726384, |
| "learning_rate": 3.908709338929696e-06, |
| "loss": 0.2798, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.788393489030432, |
| "grad_norm": 0.17652384705179844, |
| "learning_rate": 3.882476390346275e-06, |
| "loss": 0.2842, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.789808917197452, |
| "grad_norm": 0.17048800995777114, |
| "learning_rate": 3.856243441762855e-06, |
| "loss": 0.2797, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.7912243453644727, |
| "grad_norm": 0.19232225549379658, |
| "learning_rate": 3.830010493179433e-06, |
| "loss": 0.2949, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.7926397735314934, |
| "grad_norm": 0.18158427098508625, |
| "learning_rate": 3.803777544596013e-06, |
| "loss": 0.2919, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.794055201698514, |
| "grad_norm": 0.17039888361757377, |
| "learning_rate": 3.7775445960125922e-06, |
| "loss": 0.2789, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.7954706298655343, |
| "grad_norm": 0.1710460774216996, |
| "learning_rate": 3.7513116474291713e-06, |
| "loss": 0.283, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.796886058032555, |
| "grad_norm": 0.18678374857417665, |
| "learning_rate": 3.7250786988457507e-06, |
| "loss": 0.2865, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.798301486199575, |
| "grad_norm": 0.17676886545179707, |
| "learning_rate": 3.6988457502623293e-06, |
| "loss": 0.2928, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.799716914366596, |
| "grad_norm": 0.17821278496624815, |
| "learning_rate": 3.672612801678909e-06, |
| "loss": 0.2825, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.8011323425336165, |
| "grad_norm": 0.1807914249066901, |
| "learning_rate": 3.6463798530954877e-06, |
| "loss": 0.2713, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.802547770700637, |
| "grad_norm": 0.18384108470044716, |
| "learning_rate": 3.620146904512067e-06, |
| "loss": 0.273, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.8039631988676574, |
| "grad_norm": 0.17353001772169335, |
| "learning_rate": 3.5939139559286466e-06, |
| "loss": 0.2825, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.805378627034678, |
| "grad_norm": 0.1840707179807425, |
| "learning_rate": 3.5676810073452256e-06, |
| "loss": 0.2872, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.8067940552016983, |
| "grad_norm": 0.17514367701031278, |
| "learning_rate": 3.541448058761805e-06, |
| "loss": 0.2869, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.808209483368719, |
| "grad_norm": 0.17928029059399161, |
| "learning_rate": 3.515215110178384e-06, |
| "loss": 0.2831, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.8096249115357397, |
| "grad_norm": 0.17848612415415777, |
| "learning_rate": 3.4889821615949635e-06, |
| "loss": 0.2797, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.8110403397027603, |
| "grad_norm": 0.17737711539709516, |
| "learning_rate": 3.462749213011543e-06, |
| "loss": 0.2949, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.8124557678697806, |
| "grad_norm": 0.17399136200106763, |
| "learning_rate": 3.436516264428122e-06, |
| "loss": 0.2937, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.8138711960368012, |
| "grad_norm": 0.18242700749152266, |
| "learning_rate": 3.4102833158447014e-06, |
| "loss": 0.2868, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.8152866242038215, |
| "grad_norm": 0.1771122346213806, |
| "learning_rate": 3.38405036726128e-06, |
| "loss": 0.2884, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.816702052370842, |
| "grad_norm": 0.17652808422220528, |
| "learning_rate": 3.35781741867786e-06, |
| "loss": 0.2836, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.818117480537863, |
| "grad_norm": 0.17830076323575594, |
| "learning_rate": 3.3315844700944385e-06, |
| "loss": 0.2817, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.8195329087048835, |
| "grad_norm": 0.18223032359971642, |
| "learning_rate": 3.305351521511018e-06, |
| "loss": 0.2943, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.8209483368719037, |
| "grad_norm": 0.17657096233286845, |
| "learning_rate": 3.2791185729275973e-06, |
| "loss": 0.2907, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.8223637650389244, |
| "grad_norm": 0.17075541903115724, |
| "learning_rate": 3.2528856243441763e-06, |
| "loss": 0.2749, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.8237791932059446, |
| "grad_norm": 0.17384883418147812, |
| "learning_rate": 3.2266526757607558e-06, |
| "loss": 0.294, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.8251946213729653, |
| "grad_norm": 0.18292539916744055, |
| "learning_rate": 3.200419727177335e-06, |
| "loss": 0.2781, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.826610049539986, |
| "grad_norm": 0.17339033962692266, |
| "learning_rate": 3.1741867785939142e-06, |
| "loss": 0.2765, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.8280254777070066, |
| "grad_norm": 0.17691626215998835, |
| "learning_rate": 3.147953830010493e-06, |
| "loss": 0.2962, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.829440905874027, |
| "grad_norm": 0.1766712989401266, |
| "learning_rate": 3.1217208814270727e-06, |
| "loss": 0.2796, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.8308563340410475, |
| "grad_norm": 0.18888976742039038, |
| "learning_rate": 3.0954879328436517e-06, |
| "loss": 0.2871, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.8322717622080678, |
| "grad_norm": 0.18792724511405656, |
| "learning_rate": 3.0692549842602307e-06, |
| "loss": 0.2902, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.8336871903750884, |
| "grad_norm": 0.16425281618847395, |
| "learning_rate": 3.04302203567681e-06, |
| "loss": 0.273, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.835102618542109, |
| "grad_norm": 0.1746146438734197, |
| "learning_rate": 3.0167890870933896e-06, |
| "loss": 0.2826, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.8365180467091298, |
| "grad_norm": 0.19294227092682006, |
| "learning_rate": 2.9905561385099686e-06, |
| "loss": 0.2871, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.83793347487615, |
| "grad_norm": 0.1776014193267871, |
| "learning_rate": 2.964323189926548e-06, |
| "loss": 0.2886, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.8393489030431707, |
| "grad_norm": 0.1798288722394478, |
| "learning_rate": 2.938090241343127e-06, |
| "loss": 0.2929, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.840764331210191, |
| "grad_norm": 0.18006032181040652, |
| "learning_rate": 2.911857292759706e-06, |
| "loss": 0.2716, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.8421797593772116, |
| "grad_norm": 0.18136764338845685, |
| "learning_rate": 2.8856243441762855e-06, |
| "loss": 0.2804, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.8435951875442322, |
| "grad_norm": 0.1702942902210057, |
| "learning_rate": 2.859391395592865e-06, |
| "loss": 0.283, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.845010615711253, |
| "grad_norm": 0.16858535738863575, |
| "learning_rate": 2.833158447009444e-06, |
| "loss": 0.2715, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.846426043878273, |
| "grad_norm": 0.17249587169123212, |
| "learning_rate": 2.8069254984260234e-06, |
| "loss": 0.2727, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.847841472045294, |
| "grad_norm": 0.1854098483141046, |
| "learning_rate": 2.7806925498426024e-06, |
| "loss": 0.2958, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.849256900212314, |
| "grad_norm": 0.18003906868221864, |
| "learning_rate": 2.7544596012591814e-06, |
| "loss": 0.2899, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.8506723283793347, |
| "grad_norm": 0.1848795205341624, |
| "learning_rate": 2.728226652675761e-06, |
| "loss": 0.2906, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.8520877565463554, |
| "grad_norm": 0.18063285127831286, |
| "learning_rate": 2.7019937040923403e-06, |
| "loss": 0.2775, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.853503184713376, |
| "grad_norm": 0.17428351528968997, |
| "learning_rate": 2.6757607555089193e-06, |
| "loss": 0.2847, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.8549186128803963, |
| "grad_norm": 0.1948698715579363, |
| "learning_rate": 2.6495278069254988e-06, |
| "loss": 0.2971, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.856334041047417, |
| "grad_norm": 0.17185235351608957, |
| "learning_rate": 2.6232948583420778e-06, |
| "loss": 0.2815, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.857749469214437, |
| "grad_norm": 0.16970422017649608, |
| "learning_rate": 2.597061909758657e-06, |
| "loss": 0.271, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.859164897381458, |
| "grad_norm": 0.18560638216370043, |
| "learning_rate": 2.5708289611752362e-06, |
| "loss": 0.2865, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.8605803255484785, |
| "grad_norm": 0.18118828426589775, |
| "learning_rate": 2.5445960125918157e-06, |
| "loss": 0.2957, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.861995753715499, |
| "grad_norm": 0.1766585834822367, |
| "learning_rate": 2.5183630640083947e-06, |
| "loss": 0.2756, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.8634111818825194, |
| "grad_norm": 0.1717726611092561, |
| "learning_rate": 2.4921301154249737e-06, |
| "loss": 0.2811, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.86482661004954, |
| "grad_norm": 0.17868368522308714, |
| "learning_rate": 2.465897166841553e-06, |
| "loss": 0.2955, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.8662420382165603, |
| "grad_norm": 0.17728811150135537, |
| "learning_rate": 2.439664218258132e-06, |
| "loss": 0.2925, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.867657466383581, |
| "grad_norm": 0.1690730724900412, |
| "learning_rate": 2.4134312696747116e-06, |
| "loss": 0.284, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.8690728945506017, |
| "grad_norm": 0.17088543157293817, |
| "learning_rate": 2.387198321091291e-06, |
| "loss": 0.2959, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.8704883227176223, |
| "grad_norm": 0.17345729443031038, |
| "learning_rate": 2.36096537250787e-06, |
| "loss": 0.2806, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.8719037508846426, |
| "grad_norm": 0.17168864907383077, |
| "learning_rate": 2.334732423924449e-06, |
| "loss": 0.2899, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.8733191790516632, |
| "grad_norm": 0.16892911484230724, |
| "learning_rate": 2.3084994753410285e-06, |
| "loss": 0.2868, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.8747346072186835, |
| "grad_norm": 0.1870298401137464, |
| "learning_rate": 2.2822665267576075e-06, |
| "loss": 0.2787, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.876150035385704, |
| "grad_norm": 0.17388283426752, |
| "learning_rate": 2.256033578174187e-06, |
| "loss": 0.2784, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.877565463552725, |
| "grad_norm": 0.18035167623450257, |
| "learning_rate": 2.2298006295907664e-06, |
| "loss": 0.2962, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.8789808917197455, |
| "grad_norm": 0.17859035676811588, |
| "learning_rate": 2.2035676810073454e-06, |
| "loss": 0.2866, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.8803963198867657, |
| "grad_norm": 0.17367103802023898, |
| "learning_rate": 2.1773347324239244e-06, |
| "loss": 0.295, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.8818117480537864, |
| "grad_norm": 0.1735270033159891, |
| "learning_rate": 2.151101783840504e-06, |
| "loss": 0.277, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.8832271762208066, |
| "grad_norm": 0.17076648095639954, |
| "learning_rate": 2.124868835257083e-06, |
| "loss": 0.2779, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.8846426043878273, |
| "grad_norm": 0.17769982420123154, |
| "learning_rate": 2.0986358866736623e-06, |
| "loss": 0.2773, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.886058032554848, |
| "grad_norm": 0.17716108684876158, |
| "learning_rate": 2.0724029380902413e-06, |
| "loss": 0.2899, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.8874734607218686, |
| "grad_norm": 0.20152356634552215, |
| "learning_rate": 2.0461699895068208e-06, |
| "loss": 0.3025, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.888888888888889, |
| "grad_norm": 0.18105580605108734, |
| "learning_rate": 2.0199370409233998e-06, |
| "loss": 0.2854, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.8903043170559095, |
| "grad_norm": 0.1701357726201586, |
| "learning_rate": 1.9937040923399792e-06, |
| "loss": 0.2747, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.8917197452229297, |
| "grad_norm": 0.17670981194170432, |
| "learning_rate": 1.9674711437565582e-06, |
| "loss": 0.2811, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.8931351733899504, |
| "grad_norm": 0.18421121623096698, |
| "learning_rate": 1.9412381951731377e-06, |
| "loss": 0.2819, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.894550601556971, |
| "grad_norm": 0.18030333227492373, |
| "learning_rate": 1.9150052465897167e-06, |
| "loss": 0.2819, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.8959660297239918, |
| "grad_norm": 0.17001697349326303, |
| "learning_rate": 1.8887722980062961e-06, |
| "loss": 0.2768, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.897381457891012, |
| "grad_norm": 0.17961651630100461, |
| "learning_rate": 1.8625393494228753e-06, |
| "loss": 0.2985, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.8987968860580327, |
| "grad_norm": 0.17265583195377293, |
| "learning_rate": 1.8363064008394546e-06, |
| "loss": 0.2845, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.900212314225053, |
| "grad_norm": 0.17269811076363084, |
| "learning_rate": 1.8100734522560336e-06, |
| "loss": 0.2775, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.9016277423920736, |
| "grad_norm": 0.18776579560972845, |
| "learning_rate": 1.7838405036726128e-06, |
| "loss": 0.2838, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.903043170559094, |
| "grad_norm": 0.17867741388267788, |
| "learning_rate": 1.757607555089192e-06, |
| "loss": 0.2891, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.904458598726115, |
| "grad_norm": 0.17846859530717932, |
| "learning_rate": 1.7313746065057715e-06, |
| "loss": 0.3017, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.905874026893135, |
| "grad_norm": 0.18241316656654646, |
| "learning_rate": 1.7051416579223507e-06, |
| "loss": 0.306, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.907289455060156, |
| "grad_norm": 0.17810706516304647, |
| "learning_rate": 1.67890870933893e-06, |
| "loss": 0.2936, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.908704883227176, |
| "grad_norm": 0.17545134113009844, |
| "learning_rate": 1.652675760755509e-06, |
| "loss": 0.2759, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.9101203113941967, |
| "grad_norm": 0.16912469809935698, |
| "learning_rate": 1.6264428121720882e-06, |
| "loss": 0.2697, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.9115357395612174, |
| "grad_norm": 0.17386103881783685, |
| "learning_rate": 1.6002098635886674e-06, |
| "loss": 0.2802, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.912951167728238, |
| "grad_norm": 0.16935816411411495, |
| "learning_rate": 1.5739769150052464e-06, |
| "loss": 0.2733, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.9143665958952583, |
| "grad_norm": 0.17409526578818196, |
| "learning_rate": 1.5477439664218259e-06, |
| "loss": 0.2806, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.915782024062279, |
| "grad_norm": 0.17528174653087739, |
| "learning_rate": 1.521511017838405e-06, |
| "loss": 0.2805, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.917197452229299, |
| "grad_norm": 0.17415870629933414, |
| "learning_rate": 1.4952780692549843e-06, |
| "loss": 0.2721, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.91861288039632, |
| "grad_norm": 0.1674190257626401, |
| "learning_rate": 1.4690451206715635e-06, |
| "loss": 0.2777, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.9200283085633405, |
| "grad_norm": 0.18890631457900856, |
| "learning_rate": 1.4428121720881428e-06, |
| "loss": 0.2888, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.921443736730361, |
| "grad_norm": 0.17650409521486077, |
| "learning_rate": 1.416579223504722e-06, |
| "loss": 0.2848, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.9228591648973814, |
| "grad_norm": 0.17415093004971588, |
| "learning_rate": 1.3903462749213012e-06, |
| "loss": 0.266, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.924274593064402, |
| "grad_norm": 0.17889620724483848, |
| "learning_rate": 1.3641133263378804e-06, |
| "loss": 0.2856, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.9256900212314223, |
| "grad_norm": 0.1747134497606467, |
| "learning_rate": 1.3378803777544597e-06, |
| "loss": 0.2939, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.927105449398443, |
| "grad_norm": 0.16967566671723144, |
| "learning_rate": 1.3116474291710389e-06, |
| "loss": 0.2892, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.9285208775654636, |
| "grad_norm": 0.17429815642737306, |
| "learning_rate": 1.2854144805876181e-06, |
| "loss": 0.2825, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.9299363057324843, |
| "grad_norm": 0.17334968710760035, |
| "learning_rate": 1.2591815320041973e-06, |
| "loss": 0.2878, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.9313517338995045, |
| "grad_norm": 0.17764237449975134, |
| "learning_rate": 1.2329485834207766e-06, |
| "loss": 0.2822, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.932767162066525, |
| "grad_norm": 0.1742807974773749, |
| "learning_rate": 1.2067156348373558e-06, |
| "loss": 0.2999, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.9341825902335454, |
| "grad_norm": 0.1720479125811444, |
| "learning_rate": 1.180482686253935e-06, |
| "loss": 0.2843, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.935598018400566, |
| "grad_norm": 0.17424562122848383, |
| "learning_rate": 1.1542497376705142e-06, |
| "loss": 0.2824, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.937013446567587, |
| "grad_norm": 0.1702802994215502, |
| "learning_rate": 1.1280167890870935e-06, |
| "loss": 0.2827, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.9384288747346075, |
| "grad_norm": 0.17724938854296238, |
| "learning_rate": 1.1017838405036727e-06, |
| "loss": 0.2665, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.9398443029016277, |
| "grad_norm": 0.16881102462161476, |
| "learning_rate": 1.075550891920252e-06, |
| "loss": 0.2875, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.9412597310686484, |
| "grad_norm": 0.17586128182531433, |
| "learning_rate": 1.0493179433368312e-06, |
| "loss": 0.2918, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.9426751592356686, |
| "grad_norm": 0.17880544066965168, |
| "learning_rate": 1.0230849947534104e-06, |
| "loss": 0.2848, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.9440905874026893, |
| "grad_norm": 0.17612939411552903, |
| "learning_rate": 9.968520461699896e-07, |
| "loss": 0.2989, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.94550601556971, |
| "grad_norm": 0.17079837970890843, |
| "learning_rate": 9.706190975865688e-07, |
| "loss": 0.2826, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.9469214437367306, |
| "grad_norm": 0.17576729615022918, |
| "learning_rate": 9.443861490031481e-07, |
| "loss": 0.294, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.948336871903751, |
| "grad_norm": 0.1759809699335125, |
| "learning_rate": 9.181532004197273e-07, |
| "loss": 0.2708, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.9497523000707715, |
| "grad_norm": 0.17217197283212832, |
| "learning_rate": 8.919202518363064e-07, |
| "loss": 0.2884, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.9511677282377917, |
| "grad_norm": 0.17418729257127177, |
| "learning_rate": 8.656873032528857e-07, |
| "loss": 0.2817, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.9525831564048124, |
| "grad_norm": 0.17445904450889047, |
| "learning_rate": 8.39454354669465e-07, |
| "loss": 0.2913, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.953998584571833, |
| "grad_norm": 0.17258868036349787, |
| "learning_rate": 8.132214060860441e-07, |
| "loss": 0.2907, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.9554140127388537, |
| "grad_norm": 0.17168645858086345, |
| "learning_rate": 7.869884575026232e-07, |
| "loss": 0.2807, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.956829440905874, |
| "grad_norm": 0.1709694601992102, |
| "learning_rate": 7.607555089192025e-07, |
| "loss": 0.286, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.9582448690728946, |
| "grad_norm": 0.18076593969351598, |
| "learning_rate": 7.345225603357818e-07, |
| "loss": 0.2935, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.959660297239915, |
| "grad_norm": 0.16122100248321902, |
| "learning_rate": 7.08289611752361e-07, |
| "loss": 0.2537, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.9610757254069355, |
| "grad_norm": 0.17460588873108918, |
| "learning_rate": 6.820566631689402e-07, |
| "loss": 0.288, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.962491153573956, |
| "grad_norm": 0.17928398628197456, |
| "learning_rate": 6.558237145855194e-07, |
| "loss": 0.2734, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.963906581740977, |
| "grad_norm": 0.17212227085116813, |
| "learning_rate": 6.295907660020987e-07, |
| "loss": 0.271, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.965322009907997, |
| "grad_norm": 0.16846021432755692, |
| "learning_rate": 6.033578174186779e-07, |
| "loss": 0.2882, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.9667374380750178, |
| "grad_norm": 0.16842469893670636, |
| "learning_rate": 5.771248688352571e-07, |
| "loss": 0.2709, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.968152866242038, |
| "grad_norm": 0.17418546182047925, |
| "learning_rate": 5.508919202518364e-07, |
| "loss": 0.2824, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.9695682944090587, |
| "grad_norm": 0.1755673694382143, |
| "learning_rate": 5.246589716684156e-07, |
| "loss": 0.2913, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.9709837225760793, |
| "grad_norm": 0.1739443277389086, |
| "learning_rate": 4.984260230849948e-07, |
| "loss": 0.2842, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.9723991507431, |
| "grad_norm": 0.16962826263262715, |
| "learning_rate": 4.7219307450157403e-07, |
| "loss": 0.2786, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.9738145789101202, |
| "grad_norm": 0.1844668822431492, |
| "learning_rate": 4.459601259181532e-07, |
| "loss": 0.3197, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.975230007077141, |
| "grad_norm": 0.17038166826214368, |
| "learning_rate": 4.197271773347325e-07, |
| "loss": 0.2863, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.976645435244161, |
| "grad_norm": 0.1654973461554926, |
| "learning_rate": 3.934942287513116e-07, |
| "loss": 0.2733, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.978060863411182, |
| "grad_norm": 0.17118209967441136, |
| "learning_rate": 3.672612801678909e-07, |
| "loss": 0.2771, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.9794762915782025, |
| "grad_norm": 0.17596578468972426, |
| "learning_rate": 3.410283315844701e-07, |
| "loss": 0.2783, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.980891719745223, |
| "grad_norm": 0.1683430471571756, |
| "learning_rate": 3.1479538300104934e-07, |
| "loss": 0.2735, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.9823071479122434, |
| "grad_norm": 0.16686184103722593, |
| "learning_rate": 2.8856243441762856e-07, |
| "loss": 0.2687, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.983722576079264, |
| "grad_norm": 0.17598665900195548, |
| "learning_rate": 2.623294858342078e-07, |
| "loss": 0.2975, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.9851380042462843, |
| "grad_norm": 0.19097911943519666, |
| "learning_rate": 2.3609653725078701e-07, |
| "loss": 0.3143, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.986553432413305, |
| "grad_norm": 0.18505478775984915, |
| "learning_rate": 2.0986358866736624e-07, |
| "loss": 0.267, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.9879688605803256, |
| "grad_norm": 0.16498905166042235, |
| "learning_rate": 1.8363064008394544e-07, |
| "loss": 0.2798, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.9893842887473463, |
| "grad_norm": 0.17010577207806246, |
| "learning_rate": 1.5739769150052467e-07, |
| "loss": 0.2779, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.9907997169143665, |
| "grad_norm": 0.1670793382426729, |
| "learning_rate": 1.311647429171039e-07, |
| "loss": 0.2849, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.992215145081387, |
| "grad_norm": 0.17153417278807456, |
| "learning_rate": 1.0493179433368312e-07, |
| "loss": 0.2767, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.9936305732484074, |
| "grad_norm": 0.16578146300653185, |
| "learning_rate": 7.869884575026233e-08, |
| "loss": 0.2841, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.995046001415428, |
| "grad_norm": 0.16854873982311497, |
| "learning_rate": 5.246589716684156e-08, |
| "loss": 0.2929, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.9964614295824488, |
| "grad_norm": 0.18959798822634213, |
| "learning_rate": 2.623294858342078e-08, |
| "loss": 0.2741, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.9964614295824488, |
| "step": 2118, |
| "total_flos": 2.3544511201506492e+19, |
| "train_loss": 0.4426687185470511, |
| "train_runtime": 62711.6278, |
| "train_samples_per_second": 0.541, |
| "train_steps_per_second": 0.034 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2118, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.3544511201506492e+19, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|