| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.994311717861206, |
| "eval_steps": 500, |
| "global_step": 2195, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0022753128555176336, |
| "grad_norm": 5.685660859809189, |
| "learning_rate": 1.8181818181818183e-07, |
| "loss": 0.8913, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.004550625711035267, |
| "grad_norm": 5.711076889258604, |
| "learning_rate": 3.6363636363636366e-07, |
| "loss": 0.8872, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.006825938566552901, |
| "grad_norm": 5.704778542595518, |
| "learning_rate": 5.454545454545455e-07, |
| "loss": 0.8818, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.009101251422070534, |
| "grad_norm": 5.822601641411562, |
| "learning_rate": 7.272727272727273e-07, |
| "loss": 0.9099, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.011376564277588168, |
| "grad_norm": 5.779461616420256, |
| "learning_rate": 9.090909090909091e-07, |
| "loss": 0.9119, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.013651877133105802, |
| "grad_norm": 5.729877153470166, |
| "learning_rate": 1.090909090909091e-06, |
| "loss": 0.8553, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.015927189988623434, |
| "grad_norm": 5.281427413434935, |
| "learning_rate": 1.2727272727272728e-06, |
| "loss": 0.8673, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01820250284414107, |
| "grad_norm": 5.052471409802485, |
| "learning_rate": 1.4545454545454546e-06, |
| "loss": 0.8509, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.020477815699658702, |
| "grad_norm": 4.389244479229915, |
| "learning_rate": 1.6363636363636365e-06, |
| "loss": 0.8566, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.022753128555176336, |
| "grad_norm": 4.094630788604825, |
| "learning_rate": 1.8181818181818183e-06, |
| "loss": 0.8302, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02502844141069397, |
| "grad_norm": 2.4624238627507267, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.7387, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.027303754266211604, |
| "grad_norm": 2.456278255907781, |
| "learning_rate": 2.181818181818182e-06, |
| "loss": 0.7747, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02957906712172924, |
| "grad_norm": 2.072663924996278, |
| "learning_rate": 2.363636363636364e-06, |
| "loss": 0.7314, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.03185437997724687, |
| "grad_norm": 2.051452583818761, |
| "learning_rate": 2.5454545454545456e-06, |
| "loss": 0.7661, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.034129692832764506, |
| "grad_norm": 2.837150084118399, |
| "learning_rate": 2.7272727272727272e-06, |
| "loss": 0.8176, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.03640500568828214, |
| "grad_norm": 3.2676216662546578, |
| "learning_rate": 2.9090909090909093e-06, |
| "loss": 0.7332, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.038680318543799774, |
| "grad_norm": 3.478330510465565, |
| "learning_rate": 3.090909090909091e-06, |
| "loss": 0.8074, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.040955631399317405, |
| "grad_norm": 3.3365962406701617, |
| "learning_rate": 3.272727272727273e-06, |
| "loss": 0.7319, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.04323094425483504, |
| "grad_norm": 3.172713062908951, |
| "learning_rate": 3.454545454545455e-06, |
| "loss": 0.7348, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.04550625711035267, |
| "grad_norm": 2.647469572423114, |
| "learning_rate": 3.6363636363636366e-06, |
| "loss": 0.6978, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.04778156996587031, |
| "grad_norm": 1.9123731673559052, |
| "learning_rate": 3.818181818181819e-06, |
| "loss": 0.6676, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.05005688282138794, |
| "grad_norm": 1.5607974881146613, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.6623, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.05233219567690557, |
| "grad_norm": 1.3192020195203946, |
| "learning_rate": 4.181818181818182e-06, |
| "loss": 0.6766, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.05460750853242321, |
| "grad_norm": 1.1681629362926533, |
| "learning_rate": 4.363636363636364e-06, |
| "loss": 0.6282, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.05688282138794084, |
| "grad_norm": 1.4253417702456757, |
| "learning_rate": 4.5454545454545455e-06, |
| "loss": 0.6829, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.05915813424345848, |
| "grad_norm": 1.304707249259586, |
| "learning_rate": 4.727272727272728e-06, |
| "loss": 0.6547, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.06143344709897611, |
| "grad_norm": 1.229005210666558, |
| "learning_rate": 4.90909090909091e-06, |
| "loss": 0.6336, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.06370875995449374, |
| "grad_norm": 1.3050185528089813, |
| "learning_rate": 5.090909090909091e-06, |
| "loss": 0.6571, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.06598407281001138, |
| "grad_norm": 1.1603661646747794, |
| "learning_rate": 5.272727272727273e-06, |
| "loss": 0.6185, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.06825938566552901, |
| "grad_norm": 0.975017107538546, |
| "learning_rate": 5.4545454545454545e-06, |
| "loss": 0.6272, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.07053469852104664, |
| "grad_norm": 0.9775508605964778, |
| "learning_rate": 5.636363636363636e-06, |
| "loss": 0.5826, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.07281001137656427, |
| "grad_norm": 1.1490334435580931, |
| "learning_rate": 5.8181818181818185e-06, |
| "loss": 0.6514, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.07508532423208192, |
| "grad_norm": 1.0553018624073731, |
| "learning_rate": 6e-06, |
| "loss": 0.6011, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.07736063708759955, |
| "grad_norm": 0.6887889050426664, |
| "learning_rate": 6.181818181818182e-06, |
| "loss": 0.589, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.07963594994311718, |
| "grad_norm": 0.6451648576496538, |
| "learning_rate": 6.363636363636364e-06, |
| "loss": 0.5828, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.08191126279863481, |
| "grad_norm": 0.769304174505827, |
| "learning_rate": 6.545454545454546e-06, |
| "loss": 0.5921, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.08418657565415244, |
| "grad_norm": 0.8058219749803411, |
| "learning_rate": 6.7272727272727275e-06, |
| "loss": 0.5817, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.08646188850967008, |
| "grad_norm": 0.6959095038632431, |
| "learning_rate": 6.90909090909091e-06, |
| "loss": 0.5872, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.08873720136518772, |
| "grad_norm": 0.6998025818497815, |
| "learning_rate": 7.0909090909090916e-06, |
| "loss": 0.6163, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.09101251422070535, |
| "grad_norm": 0.6481637742829409, |
| "learning_rate": 7.272727272727273e-06, |
| "loss": 0.596, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.09328782707622298, |
| "grad_norm": 0.6701911156282011, |
| "learning_rate": 7.454545454545456e-06, |
| "loss": 0.5605, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.09556313993174062, |
| "grad_norm": 0.621382473643511, |
| "learning_rate": 7.636363636363638e-06, |
| "loss": 0.5656, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.09783845278725825, |
| "grad_norm": 0.6366513729903114, |
| "learning_rate": 7.81818181818182e-06, |
| "loss": 0.5677, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.10011376564277588, |
| "grad_norm": 0.6530354813151484, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.5768, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.10238907849829351, |
| "grad_norm": 0.5250961965862125, |
| "learning_rate": 8.181818181818183e-06, |
| "loss": 0.571, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.10466439135381114, |
| "grad_norm": 0.5862346211984556, |
| "learning_rate": 8.363636363636365e-06, |
| "loss": 0.5502, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.10693970420932879, |
| "grad_norm": 0.5645831108750284, |
| "learning_rate": 8.545454545454546e-06, |
| "loss": 0.5612, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.10921501706484642, |
| "grad_norm": 0.5428516306287078, |
| "learning_rate": 8.727272727272728e-06, |
| "loss": 0.5941, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.11149032992036405, |
| "grad_norm": 0.5470221016318203, |
| "learning_rate": 8.90909090909091e-06, |
| "loss": 0.5478, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.11376564277588168, |
| "grad_norm": 0.5670301758284394, |
| "learning_rate": 9.090909090909091e-06, |
| "loss": 0.568, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.11604095563139932, |
| "grad_norm": 0.5238687690636948, |
| "learning_rate": 9.272727272727273e-06, |
| "loss": 0.5352, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.11831626848691695, |
| "grad_norm": 0.494334648795746, |
| "learning_rate": 9.454545454545456e-06, |
| "loss": 0.573, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.12059158134243458, |
| "grad_norm": 0.6418508884639001, |
| "learning_rate": 9.636363636363638e-06, |
| "loss": 0.5716, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.12286689419795221, |
| "grad_norm": 0.5544219094970814, |
| "learning_rate": 9.81818181818182e-06, |
| "loss": 0.5671, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.12514220705346984, |
| "grad_norm": 0.5044191975148502, |
| "learning_rate": 1e-05, |
| "loss": 0.5649, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.12741751990898748, |
| "grad_norm": 0.5352497333561101, |
| "learning_rate": 1.0181818181818182e-05, |
| "loss": 0.5418, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.1296928327645051, |
| "grad_norm": 0.5683305195263919, |
| "learning_rate": 1.0363636363636364e-05, |
| "loss": 0.5346, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.13196814562002276, |
| "grad_norm": 0.8310946445690662, |
| "learning_rate": 1.0545454545454546e-05, |
| "loss": 0.5503, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1342434584755404, |
| "grad_norm": 0.5692386390346372, |
| "learning_rate": 1.0727272727272729e-05, |
| "loss": 0.5641, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.13651877133105803, |
| "grad_norm": 0.6552853434740772, |
| "learning_rate": 1.0909090909090909e-05, |
| "loss": 0.5555, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.13879408418657566, |
| "grad_norm": 0.5558986591762781, |
| "learning_rate": 1.1090909090909092e-05, |
| "loss": 0.544, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.1410693970420933, |
| "grad_norm": 0.5898414892780288, |
| "learning_rate": 1.1272727272727272e-05, |
| "loss": 0.5564, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.14334470989761092, |
| "grad_norm": 0.506476126864649, |
| "learning_rate": 1.1454545454545455e-05, |
| "loss": 0.5765, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.14562002275312855, |
| "grad_norm": 0.5291323959711722, |
| "learning_rate": 1.1636363636363637e-05, |
| "loss": 0.5618, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.14789533560864618, |
| "grad_norm": 0.6080011056361084, |
| "learning_rate": 1.181818181818182e-05, |
| "loss": 0.53, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.15017064846416384, |
| "grad_norm": 0.5685623206194781, |
| "learning_rate": 1.2e-05, |
| "loss": 0.5719, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.15244596131968147, |
| "grad_norm": 0.6292150667659716, |
| "learning_rate": 1.2181818181818184e-05, |
| "loss": 0.5512, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.1547212741751991, |
| "grad_norm": 0.5914023995005103, |
| "learning_rate": 1.2363636363636364e-05, |
| "loss": 0.5624, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.15699658703071673, |
| "grad_norm": 0.561334886574656, |
| "learning_rate": 1.2545454545454547e-05, |
| "loss": 0.5481, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.15927189988623436, |
| "grad_norm": 0.6735343236499104, |
| "learning_rate": 1.2727272727272728e-05, |
| "loss": 0.5932, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.161547212741752, |
| "grad_norm": 0.5906206771821041, |
| "learning_rate": 1.2909090909090912e-05, |
| "loss": 0.5337, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.16382252559726962, |
| "grad_norm": 0.6355464197194023, |
| "learning_rate": 1.3090909090909092e-05, |
| "loss": 0.5842, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.16609783845278725, |
| "grad_norm": 0.6048043966163964, |
| "learning_rate": 1.3272727272727275e-05, |
| "loss": 0.5668, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.16837315130830488, |
| "grad_norm": 0.7298080325337586, |
| "learning_rate": 1.3454545454545455e-05, |
| "loss": 0.5796, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.17064846416382254, |
| "grad_norm": 0.6230989396592326, |
| "learning_rate": 1.3636363636363637e-05, |
| "loss": 0.5466, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.17292377701934017, |
| "grad_norm": 0.6218921985816483, |
| "learning_rate": 1.381818181818182e-05, |
| "loss": 0.5137, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.1751990898748578, |
| "grad_norm": 0.5506392277633055, |
| "learning_rate": 1.4e-05, |
| "loss": 0.5271, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.17747440273037543, |
| "grad_norm": 0.709091172729563, |
| "learning_rate": 1.4181818181818183e-05, |
| "loss": 0.5275, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.17974971558589306, |
| "grad_norm": 0.7052601004280128, |
| "learning_rate": 1.4363636363636365e-05, |
| "loss": 0.5706, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.1820250284414107, |
| "grad_norm": 0.5238245701618371, |
| "learning_rate": 1.4545454545454546e-05, |
| "loss": 0.5656, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.18430034129692832, |
| "grad_norm": 0.6687841687625173, |
| "learning_rate": 1.4727272727272728e-05, |
| "loss": 0.5197, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.18657565415244595, |
| "grad_norm": 0.550803915665101, |
| "learning_rate": 1.4909090909090911e-05, |
| "loss": 0.4971, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.18885096700796358, |
| "grad_norm": 0.5247163009385345, |
| "learning_rate": 1.5090909090909091e-05, |
| "loss": 0.5307, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.19112627986348124, |
| "grad_norm": 0.6578165456867408, |
| "learning_rate": 1.5272727272727276e-05, |
| "loss": 0.5316, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.19340159271899887, |
| "grad_norm": 0.6381427629949843, |
| "learning_rate": 1.5454545454545454e-05, |
| "loss": 0.5406, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.1956769055745165, |
| "grad_norm": 0.6201059926158639, |
| "learning_rate": 1.563636363636364e-05, |
| "loss": 0.5359, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.19795221843003413, |
| "grad_norm": 0.7319265448424545, |
| "learning_rate": 1.5818181818181818e-05, |
| "loss": 0.5494, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.20022753128555176, |
| "grad_norm": 0.8254922009620973, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.5519, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.2025028441410694, |
| "grad_norm": 0.6596770624786563, |
| "learning_rate": 1.6181818181818184e-05, |
| "loss": 0.5491, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.20477815699658702, |
| "grad_norm": 0.7612010560559346, |
| "learning_rate": 1.6363636363636366e-05, |
| "loss": 0.57, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.20705346985210465, |
| "grad_norm": 0.6052327879452324, |
| "learning_rate": 1.6545454545454548e-05, |
| "loss": 0.5323, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.20932878270762229, |
| "grad_norm": 0.6173795688483125, |
| "learning_rate": 1.672727272727273e-05, |
| "loss": 0.5033, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.21160409556313994, |
| "grad_norm": 0.592019998832286, |
| "learning_rate": 1.690909090909091e-05, |
| "loss": 0.5387, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.21387940841865757, |
| "grad_norm": 0.5933082150936967, |
| "learning_rate": 1.7090909090909092e-05, |
| "loss": 0.4999, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.2161547212741752, |
| "grad_norm": 0.5173687814283479, |
| "learning_rate": 1.7272727272727274e-05, |
| "loss": 0.5254, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.21843003412969283, |
| "grad_norm": 0.6135332216240957, |
| "learning_rate": 1.7454545454545456e-05, |
| "loss": 0.5215, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.22070534698521047, |
| "grad_norm": 0.6176285527248535, |
| "learning_rate": 1.7636363636363637e-05, |
| "loss": 0.5283, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.2229806598407281, |
| "grad_norm": 0.51907123478383, |
| "learning_rate": 1.781818181818182e-05, |
| "loss": 0.5077, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.22525597269624573, |
| "grad_norm": 0.5985820930171443, |
| "learning_rate": 1.8e-05, |
| "loss": 0.5314, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.22753128555176336, |
| "grad_norm": 0.5990332411759242, |
| "learning_rate": 1.8181818181818182e-05, |
| "loss": 0.5608, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.229806598407281, |
| "grad_norm": 0.539437803681028, |
| "learning_rate": 1.8363636363636367e-05, |
| "loss": 0.5294, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.23208191126279865, |
| "grad_norm": 0.6583588524475489, |
| "learning_rate": 1.8545454545454545e-05, |
| "loss": 0.5217, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.23435722411831628, |
| "grad_norm": 0.6082065004111803, |
| "learning_rate": 1.872727272727273e-05, |
| "loss": 0.5735, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.2366325369738339, |
| "grad_norm": 0.6830805339768707, |
| "learning_rate": 1.8909090909090912e-05, |
| "loss": 0.4806, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.23890784982935154, |
| "grad_norm": 0.7127039583647218, |
| "learning_rate": 1.9090909090909094e-05, |
| "loss": 0.5464, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.24118316268486917, |
| "grad_norm": 0.5799258813203299, |
| "learning_rate": 1.9272727272727275e-05, |
| "loss": 0.5456, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.2434584755403868, |
| "grad_norm": 0.626832121785901, |
| "learning_rate": 1.9454545454545457e-05, |
| "loss": 0.5155, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.24573378839590443, |
| "grad_norm": 0.626545364067402, |
| "learning_rate": 1.963636363636364e-05, |
| "loss": 0.5728, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.24800910125142206, |
| "grad_norm": 0.6756228608832343, |
| "learning_rate": 1.981818181818182e-05, |
| "loss": 0.499, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.2502844141069397, |
| "grad_norm": 0.6597283708892084, |
| "learning_rate": 2e-05, |
| "loss": 0.5033, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.2525597269624573, |
| "grad_norm": 0.5982551151532118, |
| "learning_rate": 2.0181818181818183e-05, |
| "loss": 0.5017, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.25483503981797495, |
| "grad_norm": 0.744281625979201, |
| "learning_rate": 2.0363636363636365e-05, |
| "loss": 0.5251, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.2571103526734926, |
| "grad_norm": 0.6016672894534352, |
| "learning_rate": 2.054545454545455e-05, |
| "loss": 0.5551, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.2593856655290102, |
| "grad_norm": 0.587932618261952, |
| "learning_rate": 2.0727272727272728e-05, |
| "loss": 0.5354, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.2616609783845279, |
| "grad_norm": 0.708698769469988, |
| "learning_rate": 2.090909090909091e-05, |
| "loss": 0.5264, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.26393629124004553, |
| "grad_norm": 0.5828643101576894, |
| "learning_rate": 2.109090909090909e-05, |
| "loss": 0.5196, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.26621160409556316, |
| "grad_norm": 0.6139046114610022, |
| "learning_rate": 2.1272727272727276e-05, |
| "loss": 0.5304, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.2684869169510808, |
| "grad_norm": 0.5890111951541503, |
| "learning_rate": 2.1454545454545458e-05, |
| "loss": 0.5259, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.2707622298065984, |
| "grad_norm": 0.5627119961934509, |
| "learning_rate": 2.1636363636363636e-05, |
| "loss": 0.544, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.27303754266211605, |
| "grad_norm": 0.562616066387851, |
| "learning_rate": 2.1818181818181818e-05, |
| "loss": 0.5183, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.2753128555176337, |
| "grad_norm": 0.640542051027252, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 0.5378, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.2775881683731513, |
| "grad_norm": 0.8129933150812961, |
| "learning_rate": 2.2181818181818184e-05, |
| "loss": 0.5439, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.27986348122866894, |
| "grad_norm": 0.6024730739925737, |
| "learning_rate": 2.2363636363636366e-05, |
| "loss": 0.5456, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.2821387940841866, |
| "grad_norm": 0.5556244301627785, |
| "learning_rate": 2.2545454545454544e-05, |
| "loss": 0.498, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.2844141069397042, |
| "grad_norm": 0.5805044282934706, |
| "learning_rate": 2.2727272727272733e-05, |
| "loss": 0.4867, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.28668941979522183, |
| "grad_norm": 0.7621407399983073, |
| "learning_rate": 2.290909090909091e-05, |
| "loss": 0.5054, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.28896473265073946, |
| "grad_norm": 0.6072052484471204, |
| "learning_rate": 2.3090909090909093e-05, |
| "loss": 0.5057, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.2912400455062571, |
| "grad_norm": 0.750258183088792, |
| "learning_rate": 2.3272727272727274e-05, |
| "loss": 0.4932, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.2935153583617747, |
| "grad_norm": 0.7769802914476007, |
| "learning_rate": 2.3454545454545456e-05, |
| "loss": 0.5352, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.29579067121729236, |
| "grad_norm": 0.6654153752837011, |
| "learning_rate": 2.363636363636364e-05, |
| "loss": 0.5173, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.29806598407281, |
| "grad_norm": 0.7441767179318566, |
| "learning_rate": 2.381818181818182e-05, |
| "loss": 0.5169, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.3003412969283277, |
| "grad_norm": 0.7967429150652531, |
| "learning_rate": 2.4e-05, |
| "loss": 0.5314, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.3026166097838453, |
| "grad_norm": 0.8000301237971132, |
| "learning_rate": 2.4181818181818182e-05, |
| "loss": 0.5533, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.30489192263936293, |
| "grad_norm": 0.939455311035287, |
| "learning_rate": 2.4363636363636367e-05, |
| "loss": 0.5511, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.30716723549488056, |
| "grad_norm": 0.9480823859443015, |
| "learning_rate": 2.454545454545455e-05, |
| "loss": 0.5384, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.3094425483503982, |
| "grad_norm": 0.7533352675603919, |
| "learning_rate": 2.4727272727272727e-05, |
| "loss": 0.5401, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.3117178612059158, |
| "grad_norm": 0.9474216004746516, |
| "learning_rate": 2.490909090909091e-05, |
| "loss": 0.5068, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.31399317406143346, |
| "grad_norm": 0.8947273707720739, |
| "learning_rate": 2.5090909090909094e-05, |
| "loss": 0.5472, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.3162684869169511, |
| "grad_norm": 0.9068890803831923, |
| "learning_rate": 2.5272727272727275e-05, |
| "loss": 0.5413, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.3185437997724687, |
| "grad_norm": 0.7543198462881926, |
| "learning_rate": 2.5454545454545457e-05, |
| "loss": 0.5188, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.32081911262798635, |
| "grad_norm": 0.7278501214873285, |
| "learning_rate": 2.563636363636364e-05, |
| "loss": 0.4928, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.323094425483504, |
| "grad_norm": 0.8008535696995278, |
| "learning_rate": 2.5818181818181824e-05, |
| "loss": 0.5598, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.3253697383390216, |
| "grad_norm": 0.9543254203313492, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 0.5543, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.32764505119453924, |
| "grad_norm": 0.58106713770697, |
| "learning_rate": 2.6181818181818183e-05, |
| "loss": 0.4953, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.32992036405005687, |
| "grad_norm": 0.8674304291572694, |
| "learning_rate": 2.6363636363636365e-05, |
| "loss": 0.5151, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.3321956769055745, |
| "grad_norm": 0.8493822180548894, |
| "learning_rate": 2.654545454545455e-05, |
| "loss": 0.5163, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.33447098976109213, |
| "grad_norm": 0.8187615665950908, |
| "learning_rate": 2.672727272727273e-05, |
| "loss": 0.5566, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.33674630261660976, |
| "grad_norm": 0.9950204016308096, |
| "learning_rate": 2.690909090909091e-05, |
| "loss": 0.5371, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.3390216154721274, |
| "grad_norm": 0.8808677119498094, |
| "learning_rate": 2.709090909090909e-05, |
| "loss": 0.5233, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.3412969283276451, |
| "grad_norm": 0.8457240122266904, |
| "learning_rate": 2.7272727272727273e-05, |
| "loss": 0.516, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.3435722411831627, |
| "grad_norm": 0.8283847683700987, |
| "learning_rate": 2.7454545454545458e-05, |
| "loss": 0.503, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.34584755403868034, |
| "grad_norm": 0.8742562793745727, |
| "learning_rate": 2.763636363636364e-05, |
| "loss": 0.5842, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.34812286689419797, |
| "grad_norm": 0.73368286028384, |
| "learning_rate": 2.781818181818182e-05, |
| "loss": 0.4992, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.3503981797497156, |
| "grad_norm": 0.9115501625619696, |
| "learning_rate": 2.8e-05, |
| "loss": 0.544, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.35267349260523323, |
| "grad_norm": 0.7823471567359226, |
| "learning_rate": 2.8181818181818185e-05, |
| "loss": 0.5255, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.35494880546075086, |
| "grad_norm": 0.8817254934737524, |
| "learning_rate": 2.8363636363636366e-05, |
| "loss": 0.5192, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.3572241183162685, |
| "grad_norm": 0.6669781410669484, |
| "learning_rate": 2.8545454545454548e-05, |
| "loss": 0.4928, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.3594994311717861, |
| "grad_norm": 0.7564492150228576, |
| "learning_rate": 2.872727272727273e-05, |
| "loss": 0.4826, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.36177474402730375, |
| "grad_norm": 0.8455103889650585, |
| "learning_rate": 2.8909090909090914e-05, |
| "loss": 0.4954, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.3640500568828214, |
| "grad_norm": 0.8961703984285883, |
| "learning_rate": 2.9090909090909093e-05, |
| "loss": 0.4845, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.366325369738339, |
| "grad_norm": 0.6730757645383194, |
| "learning_rate": 2.9272727272727274e-05, |
| "loss": 0.5241, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.36860068259385664, |
| "grad_norm": 1.0257851109411285, |
| "learning_rate": 2.9454545454545456e-05, |
| "loss": 0.4895, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.3708759954493743, |
| "grad_norm": 0.9565249938146152, |
| "learning_rate": 2.963636363636364e-05, |
| "loss": 0.5158, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.3731513083048919, |
| "grad_norm": 0.8580475728945989, |
| "learning_rate": 2.9818181818181823e-05, |
| "loss": 0.5131, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.37542662116040953, |
| "grad_norm": 1.0407795891737115, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.5551, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.37770193401592717, |
| "grad_norm": 0.9461477598875092, |
| "learning_rate": 3.0181818181818182e-05, |
| "loss": 0.5128, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.3799772468714448, |
| "grad_norm": 1.1750520423935205, |
| "learning_rate": 3.0363636363636364e-05, |
| "loss": 0.4988, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.3822525597269625, |
| "grad_norm": 0.9619657535301448, |
| "learning_rate": 3.054545454545455e-05, |
| "loss": 0.5443, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.3845278725824801, |
| "grad_norm": 0.8084615561755145, |
| "learning_rate": 3.072727272727273e-05, |
| "loss": 0.5538, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.38680318543799774, |
| "grad_norm": 0.9166403915982619, |
| "learning_rate": 3.090909090909091e-05, |
| "loss": 0.5025, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.3890784982935154, |
| "grad_norm": 0.706974515649925, |
| "learning_rate": 3.1090909090909094e-05, |
| "loss": 0.5192, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.391353811149033, |
| "grad_norm": 0.744738485367252, |
| "learning_rate": 3.127272727272728e-05, |
| "loss": 0.5452, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.39362912400455063, |
| "grad_norm": 1.185609845688004, |
| "learning_rate": 3.145454545454546e-05, |
| "loss": 0.5392, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.39590443686006827, |
| "grad_norm": 0.6812824017739959, |
| "learning_rate": 3.1636363636363635e-05, |
| "loss": 0.538, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.3981797497155859, |
| "grad_norm": 0.8921095831871404, |
| "learning_rate": 3.181818181818182e-05, |
| "loss": 0.5047, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.4004550625711035, |
| "grad_norm": 1.0272787280734288, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 0.5, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.40273037542662116, |
| "grad_norm": 0.7627496866984136, |
| "learning_rate": 3.2181818181818184e-05, |
| "loss": 0.5271, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.4050056882821388, |
| "grad_norm": 0.783437681835504, |
| "learning_rate": 3.236363636363637e-05, |
| "loss": 0.5053, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.4072810011376564, |
| "grad_norm": 0.9198593954163056, |
| "learning_rate": 3.254545454545455e-05, |
| "loss": 0.526, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.40955631399317405, |
| "grad_norm": 0.990543263500737, |
| "learning_rate": 3.272727272727273e-05, |
| "loss": 0.5486, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.4118316268486917, |
| "grad_norm": 0.7674137416061684, |
| "learning_rate": 3.290909090909091e-05, |
| "loss": 0.542, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.4141069397042093, |
| "grad_norm": 0.7097774464178122, |
| "learning_rate": 3.3090909090909095e-05, |
| "loss": 0.5414, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.41638225255972694, |
| "grad_norm": 0.7301988738783638, |
| "learning_rate": 3.327272727272727e-05, |
| "loss": 0.4837, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.41865756541524457, |
| "grad_norm": 0.6586636192320678, |
| "learning_rate": 3.345454545454546e-05, |
| "loss": 0.5704, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.42093287827076226, |
| "grad_norm": 0.7051322380639373, |
| "learning_rate": 3.363636363636364e-05, |
| "loss": 0.5087, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.4232081911262799, |
| "grad_norm": 0.6614159680980038, |
| "learning_rate": 3.381818181818182e-05, |
| "loss": 0.5356, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.4254835039817975, |
| "grad_norm": 0.6616643424557804, |
| "learning_rate": 3.4e-05, |
| "loss": 0.5236, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.42775881683731515, |
| "grad_norm": 0.7001439685159849, |
| "learning_rate": 3.4181818181818185e-05, |
| "loss": 0.4978, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.4300341296928328, |
| "grad_norm": 1.0004953566379289, |
| "learning_rate": 3.436363636363637e-05, |
| "loss": 0.5361, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.4323094425483504, |
| "grad_norm": 0.9763247007839354, |
| "learning_rate": 3.454545454545455e-05, |
| "loss": 0.5032, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.43458475540386804, |
| "grad_norm": 0.7796740868074972, |
| "learning_rate": 3.4727272727272726e-05, |
| "loss": 0.5303, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.43686006825938567, |
| "grad_norm": 0.933786254284513, |
| "learning_rate": 3.490909090909091e-05, |
| "loss": 0.481, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.4391353811149033, |
| "grad_norm": 1.1059297147921487, |
| "learning_rate": 3.5090909090909096e-05, |
| "loss": 0.5371, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.44141069397042093, |
| "grad_norm": 0.9239737001173246, |
| "learning_rate": 3.5272727272727274e-05, |
| "loss": 0.5298, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.44368600682593856, |
| "grad_norm": 0.7224979164330604, |
| "learning_rate": 3.545454545454546e-05, |
| "loss": 0.4723, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.4459613196814562, |
| "grad_norm": 0.8163841859106732, |
| "learning_rate": 3.563636363636364e-05, |
| "loss": 0.5182, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.4482366325369738, |
| "grad_norm": 0.7793873365990543, |
| "learning_rate": 3.581818181818182e-05, |
| "loss": 0.5175, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.45051194539249145, |
| "grad_norm": 0.9106557759854739, |
| "learning_rate": 3.6e-05, |
| "loss": 0.5099, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.4527872582480091, |
| "grad_norm": 0.8616846220679973, |
| "learning_rate": 3.6181818181818186e-05, |
| "loss": 0.5269, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.4550625711035267, |
| "grad_norm": 0.9492125869073801, |
| "learning_rate": 3.6363636363636364e-05, |
| "loss": 0.4923, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.45733788395904434, |
| "grad_norm": 0.9382423826449089, |
| "learning_rate": 3.654545454545455e-05, |
| "loss": 0.5067, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.459613196814562, |
| "grad_norm": 0.853602390271063, |
| "learning_rate": 3.6727272727272734e-05, |
| "loss": 0.5372, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.46188850967007966, |
| "grad_norm": 0.8967497126984583, |
| "learning_rate": 3.690909090909091e-05, |
| "loss": 0.4968, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.4641638225255973, |
| "grad_norm": 0.738267328983595, |
| "learning_rate": 3.709090909090909e-05, |
| "loss": 0.4992, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.4664391353811149, |
| "grad_norm": 0.7188663644275407, |
| "learning_rate": 3.7272727272727276e-05, |
| "loss": 0.4772, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.46871444823663255, |
| "grad_norm": 0.6515238182183732, |
| "learning_rate": 3.745454545454546e-05, |
| "loss": 0.5144, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.4709897610921502, |
| "grad_norm": 0.8468875442175026, |
| "learning_rate": 3.763636363636364e-05, |
| "loss": 0.5449, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.4732650739476678, |
| "grad_norm": 0.9072495234464792, |
| "learning_rate": 3.7818181818181824e-05, |
| "loss": 0.4696, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.47554038680318544, |
| "grad_norm": 0.7954574695888201, |
| "learning_rate": 3.8e-05, |
| "loss": 0.4927, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.4778156996587031, |
| "grad_norm": 0.8327389753927068, |
| "learning_rate": 3.818181818181819e-05, |
| "loss": 0.521, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.4800910125142207, |
| "grad_norm": 0.722228238156621, |
| "learning_rate": 3.8363636363636365e-05, |
| "loss": 0.4953, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.48236632536973834, |
| "grad_norm": 0.8705537435893771, |
| "learning_rate": 3.854545454545455e-05, |
| "loss": 0.5362, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.48464163822525597, |
| "grad_norm": 0.8338745144603515, |
| "learning_rate": 3.872727272727273e-05, |
| "loss": 0.4901, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.4869169510807736, |
| "grad_norm": 1.0078139079315223, |
| "learning_rate": 3.8909090909090914e-05, |
| "loss": 0.5112, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.4891922639362912, |
| "grad_norm": 1.0811992758776074, |
| "learning_rate": 3.909090909090909e-05, |
| "loss": 0.4931, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.49146757679180886, |
| "grad_norm": 0.8803716614397517, |
| "learning_rate": 3.927272727272728e-05, |
| "loss": 0.4868, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.4937428896473265, |
| "grad_norm": 0.803266647344771, |
| "learning_rate": 3.9454545454545455e-05, |
| "loss": 0.4902, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.4960182025028441, |
| "grad_norm": 1.00833874965682, |
| "learning_rate": 3.963636363636364e-05, |
| "loss": 0.464, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.49829351535836175, |
| "grad_norm": 0.96083659528256, |
| "learning_rate": 3.9818181818181825e-05, |
| "loss": 0.4919, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.5005688282138794, |
| "grad_norm": 0.7790449810495192, |
| "learning_rate": 4e-05, |
| "loss": 0.4965, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.502844141069397, |
| "grad_norm": 0.8905458268931085, |
| "learning_rate": 3.9999974697382296e-05, |
| "loss": 0.53, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.5051194539249146, |
| "grad_norm": 0.7625724515485385, |
| "learning_rate": 3.9999898789593194e-05, |
| "loss": 0.5027, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.5073947667804323, |
| "grad_norm": 1.0155453849101541, |
| "learning_rate": 3.999977227682476e-05, |
| "loss": 0.508, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.5096700796359499, |
| "grad_norm": 0.6681764192465899, |
| "learning_rate": 3.99995951593971e-05, |
| "loss": 0.5049, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.5119453924914675, |
| "grad_norm": 0.9099406965338225, |
| "learning_rate": 3.999936743775839e-05, |
| "loss": 0.5252, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.5142207053469852, |
| "grad_norm": 0.6923524004307658, |
| "learning_rate": 3.999908911248481e-05, |
| "loss": 0.4751, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.5164960182025028, |
| "grad_norm": 0.881151320708067, |
| "learning_rate": 3.999876018428059e-05, |
| "loss": 0.5008, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.5187713310580204, |
| "grad_norm": 0.8293172103871226, |
| "learning_rate": 3.999838065397801e-05, |
| "loss": 0.5198, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.5210466439135382, |
| "grad_norm": 0.7096703324834885, |
| "learning_rate": 3.9997950522537385e-05, |
| "loss": 0.5272, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.5233219567690558, |
| "grad_norm": 0.7127876942337593, |
| "learning_rate": 3.999746979104705e-05, |
| "loss": 0.5225, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.5255972696245734, |
| "grad_norm": 0.7792466358382388, |
| "learning_rate": 3.999693846072339e-05, |
| "loss": 0.5182, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.5278725824800911, |
| "grad_norm": 0.779669905296092, |
| "learning_rate": 3.9996356532910814e-05, |
| "loss": 0.4981, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.5301478953356087, |
| "grad_norm": 0.800598136561164, |
| "learning_rate": 3.9995724009081745e-05, |
| "loss": 0.5603, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.5324232081911263, |
| "grad_norm": 0.9619367505895898, |
| "learning_rate": 3.9995040890836635e-05, |
| "loss": 0.5583, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.534698521046644, |
| "grad_norm": 0.9453874433476237, |
| "learning_rate": 3.999430717990395e-05, |
| "loss": 0.5082, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.5369738339021616, |
| "grad_norm": 0.9382827233401629, |
| "learning_rate": 3.9993522878140174e-05, |
| "loss": 0.4989, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.5392491467576792, |
| "grad_norm": 0.78252208920523, |
| "learning_rate": 3.999268798752979e-05, |
| "loss": 0.5234, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.5415244596131968, |
| "grad_norm": 0.8195622765128195, |
| "learning_rate": 3.9991802510185296e-05, |
| "loss": 0.556, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.5437997724687145, |
| "grad_norm": 0.8208206901010974, |
| "learning_rate": 3.999086644834719e-05, |
| "loss": 0.4887, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.5460750853242321, |
| "grad_norm": 0.7551681915194937, |
| "learning_rate": 3.998987980438393e-05, |
| "loss": 0.4936, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.5483503981797497, |
| "grad_norm": 0.7495264868649975, |
| "learning_rate": 3.998884258079199e-05, |
| "loss": 0.4627, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.5506257110352674, |
| "grad_norm": 0.8060313156395092, |
| "learning_rate": 3.998775478019584e-05, |
| "loss": 0.5042, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.552901023890785, |
| "grad_norm": 0.7867672669067826, |
| "learning_rate": 3.998661640534787e-05, |
| "loss": 0.5115, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.5551763367463026, |
| "grad_norm": 0.7557722856697018, |
| "learning_rate": 3.9985427459128496e-05, |
| "loss": 0.5077, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.5574516496018203, |
| "grad_norm": 0.7390570724379956, |
| "learning_rate": 3.998418794454604e-05, |
| "loss": 0.5583, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.5597269624573379, |
| "grad_norm": 0.8305288417970496, |
| "learning_rate": 3.998289786473681e-05, |
| "loss": 0.5342, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.5620022753128555, |
| "grad_norm": 0.7858042344970474, |
| "learning_rate": 3.998155722296504e-05, |
| "loss": 0.5082, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.5642775881683731, |
| "grad_norm": 0.7423633752656493, |
| "learning_rate": 3.99801660226229e-05, |
| "loss": 0.511, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.5665529010238908, |
| "grad_norm": 0.9185613255353122, |
| "learning_rate": 3.9978724267230495e-05, |
| "loss": 0.5457, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.5688282138794084, |
| "grad_norm": 1.0074100879377919, |
| "learning_rate": 3.997723196043585e-05, |
| "loss": 0.5201, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.571103526734926, |
| "grad_norm": 0.8713246831114032, |
| "learning_rate": 3.997568910601489e-05, |
| "loss": 0.541, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.5733788395904437, |
| "grad_norm": 1.0445942432402573, |
| "learning_rate": 3.997409570787144e-05, |
| "loss": 0.5242, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.5756541524459613, |
| "grad_norm": 0.7312036738233352, |
| "learning_rate": 3.997245177003721e-05, |
| "loss": 0.4896, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.5779294653014789, |
| "grad_norm": 0.8072632910129269, |
| "learning_rate": 3.9970757296671795e-05, |
| "loss": 0.4972, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.5802047781569966, |
| "grad_norm": 1.0752550763412985, |
| "learning_rate": 3.9969012292062655e-05, |
| "loss": 0.4786, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.5824800910125142, |
| "grad_norm": 0.7203656254008417, |
| "learning_rate": 3.9967216760625113e-05, |
| "loss": 0.5189, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.5847554038680318, |
| "grad_norm": 0.6613667360116985, |
| "learning_rate": 3.9965370706902324e-05, |
| "loss": 0.4769, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.5870307167235495, |
| "grad_norm": 0.7822122386302286, |
| "learning_rate": 3.99634741355653e-05, |
| "loss": 0.5005, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.5893060295790671, |
| "grad_norm": 0.9385427267856482, |
| "learning_rate": 3.9961527051412854e-05, |
| "loss": 0.5257, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.5915813424345847, |
| "grad_norm": 0.6735881526097904, |
| "learning_rate": 3.9959529459371624e-05, |
| "loss": 0.5049, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.5938566552901023, |
| "grad_norm": 0.8457373957055475, |
| "learning_rate": 3.9957481364496044e-05, |
| "loss": 0.5102, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.59613196814562, |
| "grad_norm": 1.0973039674832767, |
| "learning_rate": 3.9955382771968316e-05, |
| "loss": 0.5099, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.5984072810011376, |
| "grad_norm": 0.8203260675634886, |
| "learning_rate": 3.9953233687098435e-05, |
| "loss": 0.5309, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.6006825938566553, |
| "grad_norm": 0.7874965178599942, |
| "learning_rate": 3.9951034115324156e-05, |
| "loss": 0.568, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.602957906712173, |
| "grad_norm": 1.2512774514947018, |
| "learning_rate": 3.994878406221097e-05, |
| "loss": 0.5319, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.6052332195676906, |
| "grad_norm": 0.8249352132277762, |
| "learning_rate": 3.994648353345208e-05, |
| "loss": 0.5227, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.6075085324232082, |
| "grad_norm": 1.034821442073306, |
| "learning_rate": 3.994413253486846e-05, |
| "loss": 0.4714, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.6097838452787259, |
| "grad_norm": 1.3292869026587193, |
| "learning_rate": 3.994173107240872e-05, |
| "loss": 0.4962, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.6120591581342435, |
| "grad_norm": 0.8212065544130984, |
| "learning_rate": 3.9939279152149216e-05, |
| "loss": 0.5242, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.6143344709897611, |
| "grad_norm": 1.5092198444566256, |
| "learning_rate": 3.993677678029392e-05, |
| "loss": 0.5343, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.6166097838452788, |
| "grad_norm": 0.9359978887113274, |
| "learning_rate": 3.993422396317451e-05, |
| "loss": 0.5203, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.6188850967007964, |
| "grad_norm": 1.4237602720094844, |
| "learning_rate": 3.993162070725027e-05, |
| "loss": 0.5501, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.621160409556314, |
| "grad_norm": 0.9726057260183041, |
| "learning_rate": 3.992896701910813e-05, |
| "loss": 0.5163, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.6234357224118316, |
| "grad_norm": 1.4550093737222574, |
| "learning_rate": 3.99262629054626e-05, |
| "loss": 0.5206, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.6257110352673493, |
| "grad_norm": 0.9555548052797809, |
| "learning_rate": 3.992350837315581e-05, |
| "loss": 0.5023, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.6279863481228669, |
| "grad_norm": 1.369245874866738, |
| "learning_rate": 3.9920703429157436e-05, |
| "loss": 0.4886, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.6302616609783845, |
| "grad_norm": 1.1851345853900797, |
| "learning_rate": 3.991784808056473e-05, |
| "loss": 0.5148, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.6325369738339022, |
| "grad_norm": 1.179793591277137, |
| "learning_rate": 3.9914942334602464e-05, |
| "loss": 0.5318, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.6348122866894198, |
| "grad_norm": 1.199565301581228, |
| "learning_rate": 3.991198619862294e-05, |
| "loss": 0.5194, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.6370875995449374, |
| "grad_norm": 0.9094867274398518, |
| "learning_rate": 3.990897968010596e-05, |
| "loss": 0.5411, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.6393629124004551, |
| "grad_norm": 1.1094913036582985, |
| "learning_rate": 3.99059227866588e-05, |
| "loss": 0.5076, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.6416382252559727, |
| "grad_norm": 0.826685561156455, |
| "learning_rate": 3.9902815526016196e-05, |
| "loss": 0.5031, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.6439135381114903, |
| "grad_norm": 0.9716995105435796, |
| "learning_rate": 3.989965790604033e-05, |
| "loss": 0.4603, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.646188850967008, |
| "grad_norm": 0.900885856862581, |
| "learning_rate": 3.9896449934720814e-05, |
| "loss": 0.5107, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.6484641638225256, |
| "grad_norm": 0.8778940794295121, |
| "learning_rate": 3.989319162017465e-05, |
| "loss": 0.5502, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.6507394766780432, |
| "grad_norm": 0.9622322529521133, |
| "learning_rate": 3.988988297064623e-05, |
| "loss": 0.5083, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.6530147895335608, |
| "grad_norm": 0.7756328265585737, |
| "learning_rate": 3.98865239945073e-05, |
| "loss": 0.5028, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.6552901023890785, |
| "grad_norm": 0.6822282919532825, |
| "learning_rate": 3.988311470025695e-05, |
| "loss": 0.5036, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.6575654152445961, |
| "grad_norm": 0.8959561359324182, |
| "learning_rate": 3.987965509652159e-05, |
| "loss": 0.4908, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.6598407281001137, |
| "grad_norm": 0.6601859038879857, |
| "learning_rate": 3.987614519205493e-05, |
| "loss": 0.478, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.6621160409556314, |
| "grad_norm": 0.9825085087311318, |
| "learning_rate": 3.987258499573792e-05, |
| "loss": 0.5338, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.664391353811149, |
| "grad_norm": 1.022289539818651, |
| "learning_rate": 3.986897451657882e-05, |
| "loss": 0.5124, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.659127791072242, |
| "learning_rate": 3.986531376371307e-05, |
| "loss": 0.4858, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.6689419795221843, |
| "grad_norm": 0.9679175230064438, |
| "learning_rate": 3.9861602746403336e-05, |
| "loss": 0.5302, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.6712172923777019, |
| "grad_norm": 0.7180146822347161, |
| "learning_rate": 3.985784147403947e-05, |
| "loss": 0.4594, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.6734926052332195, |
| "grad_norm": 0.8559576954351592, |
| "learning_rate": 3.985402995613847e-05, |
| "loss": 0.4986, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.6757679180887372, |
| "grad_norm": 0.9453576621806624, |
| "learning_rate": 3.985016820234447e-05, |
| "loss": 0.4866, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.6780432309442548, |
| "grad_norm": 0.857442638332296, |
| "learning_rate": 3.984625622242872e-05, |
| "loss": 0.4996, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.6803185437997725, |
| "grad_norm": 0.7396924998396898, |
| "learning_rate": 3.9842294026289565e-05, |
| "loss": 0.4805, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.6825938566552902, |
| "grad_norm": 1.0151633646700187, |
| "learning_rate": 3.983828162395238e-05, |
| "loss": 0.4821, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.6848691695108078, |
| "grad_norm": 0.9979683962499621, |
| "learning_rate": 3.98342190255696e-05, |
| "loss": 0.527, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.6871444823663254, |
| "grad_norm": 0.899885752733397, |
| "learning_rate": 3.9830106241420666e-05, |
| "loss": 0.5062, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.689419795221843, |
| "grad_norm": 0.8778242686510422, |
| "learning_rate": 3.9825943281912005e-05, |
| "loss": 0.5062, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.6916951080773607, |
| "grad_norm": 0.637806414801961, |
| "learning_rate": 3.9821730157576975e-05, |
| "loss": 0.4887, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.6939704209328783, |
| "grad_norm": 0.7447436139595571, |
| "learning_rate": 3.98174668790759e-05, |
| "loss": 0.5404, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.6962457337883959, |
| "grad_norm": 0.5406588274310511, |
| "learning_rate": 3.9813153457195986e-05, |
| "loss": 0.4759, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.6985210466439136, |
| "grad_norm": 0.776438137829388, |
| "learning_rate": 3.980878990285132e-05, |
| "loss": 0.5379, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.7007963594994312, |
| "grad_norm": 0.6179783876468353, |
| "learning_rate": 3.9804376227082834e-05, |
| "loss": 0.5152, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.7030716723549488, |
| "grad_norm": 0.6195544387076716, |
| "learning_rate": 3.9799912441058286e-05, |
| "loss": 0.5232, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.7053469852104665, |
| "grad_norm": 0.6482103136431522, |
| "learning_rate": 3.979539855607222e-05, |
| "loss": 0.5045, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.7076222980659841, |
| "grad_norm": 0.7357097196921872, |
| "learning_rate": 3.9790834583545946e-05, |
| "loss": 0.4881, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.7098976109215017, |
| "grad_norm": 0.6461268041598386, |
| "learning_rate": 3.978622053502751e-05, |
| "loss": 0.4773, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.7121729237770194, |
| "grad_norm": 0.6986308188983689, |
| "learning_rate": 3.978155642219167e-05, |
| "loss": 0.5217, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.714448236632537, |
| "grad_norm": 0.568430245151543, |
| "learning_rate": 3.977684225683984e-05, |
| "loss": 0.5206, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.7167235494880546, |
| "grad_norm": 0.736787180700768, |
| "learning_rate": 3.9772078050900105e-05, |
| "loss": 0.5388, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.7189988623435722, |
| "grad_norm": 0.6623240192179631, |
| "learning_rate": 3.9767263816427146e-05, |
| "loss": 0.4748, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.7212741751990899, |
| "grad_norm": 0.7564926165936612, |
| "learning_rate": 3.9762399565602233e-05, |
| "loss": 0.4889, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.7235494880546075, |
| "grad_norm": 0.6534761363373418, |
| "learning_rate": 3.97574853107332e-05, |
| "loss": 0.5083, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.7258248009101251, |
| "grad_norm": 0.7537961040113483, |
| "learning_rate": 3.97525210642544e-05, |
| "loss": 0.4951, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.7281001137656428, |
| "grad_norm": 0.7578706031427168, |
| "learning_rate": 3.974750683872667e-05, |
| "loss": 0.4905, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.7303754266211604, |
| "grad_norm": 0.7260807303288004, |
| "learning_rate": 3.9742442646837316e-05, |
| "loss": 0.5322, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.732650739476678, |
| "grad_norm": 0.9173353438904912, |
| "learning_rate": 3.973732850140007e-05, |
| "loss": 0.52, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.7349260523321957, |
| "grad_norm": 0.6310646643036116, |
| "learning_rate": 3.973216441535506e-05, |
| "loss": 0.532, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.7372013651877133, |
| "grad_norm": 0.7197572927145501, |
| "learning_rate": 3.972695040176877e-05, |
| "loss": 0.5008, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.7394766780432309, |
| "grad_norm": 0.7374860781338815, |
| "learning_rate": 3.972168647383402e-05, |
| "loss": 0.5578, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.7417519908987485, |
| "grad_norm": 0.6128071895701607, |
| "learning_rate": 3.971637264486993e-05, |
| "loss": 0.5155, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.7440273037542662, |
| "grad_norm": 0.5637948735002289, |
| "learning_rate": 3.971100892832188e-05, |
| "loss": 0.4719, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.7463026166097838, |
| "grad_norm": 0.5424079474042907, |
| "learning_rate": 3.970559533776147e-05, |
| "loss": 0.4922, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.7485779294653014, |
| "grad_norm": 0.6317180275054568, |
| "learning_rate": 3.9700131886886506e-05, |
| "loss": 0.5046, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.7508532423208191, |
| "grad_norm": 0.7049941743535045, |
| "learning_rate": 3.9694618589520945e-05, |
| "loss": 0.5153, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.7531285551763367, |
| "grad_norm": 0.7526126287558034, |
| "learning_rate": 3.968905545961487e-05, |
| "loss": 0.4978, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.7554038680318543, |
| "grad_norm": 0.6914871893079282, |
| "learning_rate": 3.968344251124447e-05, |
| "loss": 0.5221, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.757679180887372, |
| "grad_norm": 0.8497636918088499, |
| "learning_rate": 3.967777975861196e-05, |
| "loss": 0.495, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.7599544937428896, |
| "grad_norm": 0.6810417416080219, |
| "learning_rate": 3.96720672160456e-05, |
| "loss": 0.538, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.7622298065984073, |
| "grad_norm": 0.8161126575125076, |
| "learning_rate": 3.966630489799959e-05, |
| "loss": 0.5085, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.764505119453925, |
| "grad_norm": 0.7738738143828285, |
| "learning_rate": 3.966049281905414e-05, |
| "loss": 0.4873, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.7667804323094426, |
| "grad_norm": 0.8764493288845413, |
| "learning_rate": 3.9654630993915305e-05, |
| "loss": 0.4971, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.7690557451649602, |
| "grad_norm": 0.7805156720414773, |
| "learning_rate": 3.964871943741504e-05, |
| "loss": 0.5122, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.7713310580204779, |
| "grad_norm": 0.7201535220586814, |
| "learning_rate": 3.964275816451115e-05, |
| "loss": 0.5161, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.7736063708759955, |
| "grad_norm": 0.7396481006919741, |
| "learning_rate": 3.96367471902872e-05, |
| "loss": 0.5104, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.7758816837315131, |
| "grad_norm": 0.7123311898098197, |
| "learning_rate": 3.963068652995252e-05, |
| "loss": 0.5148, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.7781569965870307, |
| "grad_norm": 0.6652575492753041, |
| "learning_rate": 3.962457619884218e-05, |
| "loss": 0.471, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.7804323094425484, |
| "grad_norm": 0.8105783570316155, |
| "learning_rate": 3.961841621241692e-05, |
| "loss": 0.5463, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.782707622298066, |
| "grad_norm": 0.606651260022466, |
| "learning_rate": 3.9612206586263104e-05, |
| "loss": 0.5066, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.7849829351535836, |
| "grad_norm": 1.0361515753308552, |
| "learning_rate": 3.960594733609273e-05, |
| "loss": 0.4976, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.7872582480091013, |
| "grad_norm": 0.6631800027893998, |
| "learning_rate": 3.959963847774332e-05, |
| "loss": 0.4864, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.7895335608646189, |
| "grad_norm": 0.748533983699474, |
| "learning_rate": 3.959328002717795e-05, |
| "loss": 0.4753, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.7918088737201365, |
| "grad_norm": 0.5971622138969933, |
| "learning_rate": 3.958687200048516e-05, |
| "loss": 0.4916, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.7940841865756542, |
| "grad_norm": 0.660067189965713, |
| "learning_rate": 3.958041441387894e-05, |
| "loss": 0.5161, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.7963594994311718, |
| "grad_norm": 0.5986830293577159, |
| "learning_rate": 3.957390728369867e-05, |
| "loss": 0.4895, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.7986348122866894, |
| "grad_norm": 0.6004693311643707, |
| "learning_rate": 3.9567350626409094e-05, |
| "loss": 0.4915, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.800910125142207, |
| "grad_norm": 0.6276703295004309, |
| "learning_rate": 3.956074445860027e-05, |
| "loss": 0.487, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.8031854379977247, |
| "grad_norm": 0.6103277468343838, |
| "learning_rate": 3.955408879698753e-05, |
| "loss": 0.507, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.8054607508532423, |
| "grad_norm": 0.6799400586797437, |
| "learning_rate": 3.954738365841144e-05, |
| "loss": 0.5003, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.8077360637087599, |
| "grad_norm": 0.6505285949994575, |
| "learning_rate": 3.9540629059837767e-05, |
| "loss": 0.4729, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.8100113765642776, |
| "grad_norm": 0.6586392259013478, |
| "learning_rate": 3.95338250183574e-05, |
| "loss": 0.5229, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.8122866894197952, |
| "grad_norm": 0.7567869042648286, |
| "learning_rate": 3.952697155118635e-05, |
| "loss": 0.5088, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.8145620022753128, |
| "grad_norm": 0.6347889342427536, |
| "learning_rate": 3.952006867566569e-05, |
| "loss": 0.4869, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.8168373151308305, |
| "grad_norm": 0.6893249132631182, |
| "learning_rate": 3.951311640926148e-05, |
| "loss": 0.5289, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.8191126279863481, |
| "grad_norm": 0.7138269635664551, |
| "learning_rate": 3.95061147695648e-05, |
| "loss": 0.4777, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.8213879408418657, |
| "grad_norm": 0.7649893274903273, |
| "learning_rate": 3.949906377429162e-05, |
| "loss": 0.5008, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.8236632536973834, |
| "grad_norm": 0.7777457016748831, |
| "learning_rate": 3.94919634412828e-05, |
| "loss": 0.5241, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.825938566552901, |
| "grad_norm": 0.7206642217902817, |
| "learning_rate": 3.948481378850405e-05, |
| "loss": 0.4912, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.8282138794084186, |
| "grad_norm": 1.1425011384581647, |
| "learning_rate": 3.947761483404585e-05, |
| "loss": 0.486, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.8304891922639362, |
| "grad_norm": 0.7431045570776554, |
| "learning_rate": 3.947036659612345e-05, |
| "loss": 0.4874, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.8327645051194539, |
| "grad_norm": 0.9260901341723518, |
| "learning_rate": 3.94630690930768e-05, |
| "loss": 0.5323, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.8350398179749715, |
| "grad_norm": 0.6461587891988468, |
| "learning_rate": 3.945572234337046e-05, |
| "loss": 0.4979, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.8373151308304891, |
| "grad_norm": 1.2599484128221388, |
| "learning_rate": 3.944832636559366e-05, |
| "loss": 0.551, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.8395904436860068, |
| "grad_norm": 0.9305399142519398, |
| "learning_rate": 3.944088117846015e-05, |
| "loss": 0.5008, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.8418657565415245, |
| "grad_norm": 1.121526764606612, |
| "learning_rate": 3.94333868008082e-05, |
| "loss": 0.5148, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.8441410693970421, |
| "grad_norm": 0.9052152771663639, |
| "learning_rate": 3.9425843251600555e-05, |
| "loss": 0.491, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.8464163822525598, |
| "grad_norm": 1.0494031846799934, |
| "learning_rate": 3.9418250549924355e-05, |
| "loss": 0.5177, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.8486916951080774, |
| "grad_norm": 0.9888631313574925, |
| "learning_rate": 3.9410608714991136e-05, |
| "loss": 0.5107, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.850967007963595, |
| "grad_norm": 1.1055928703416604, |
| "learning_rate": 3.9402917766136735e-05, |
| "loss": 0.5397, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.8532423208191127, |
| "grad_norm": 1.115912993227309, |
| "learning_rate": 3.939517772282127e-05, |
| "loss": 0.4829, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.8555176336746303, |
| "grad_norm": 0.9099392894966472, |
| "learning_rate": 3.938738860462907e-05, |
| "loss": 0.5003, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.8577929465301479, |
| "grad_norm": 1.0706866454722113, |
| "learning_rate": 3.937955043126864e-05, |
| "loss": 0.5284, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.8600682593856656, |
| "grad_norm": 0.909125878536022, |
| "learning_rate": 3.9371663222572625e-05, |
| "loss": 0.5121, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.8623435722411832, |
| "grad_norm": 0.8423121951286772, |
| "learning_rate": 3.936372699849772e-05, |
| "loss": 0.4996, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.8646188850967008, |
| "grad_norm": 0.8332263490241194, |
| "learning_rate": 3.935574177912465e-05, |
| "loss": 0.5245, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.8668941979522184, |
| "grad_norm": 0.7530064300634705, |
| "learning_rate": 3.93477075846581e-05, |
| "loss": 0.5342, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.8691695108077361, |
| "grad_norm": 0.819669590890119, |
| "learning_rate": 3.93396244354267e-05, |
| "loss": 0.4866, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.8714448236632537, |
| "grad_norm": 0.6333438826999286, |
| "learning_rate": 3.933149235188294e-05, |
| "loss": 0.5043, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.8737201365187713, |
| "grad_norm": 0.7459251590464333, |
| "learning_rate": 3.9323311354603086e-05, |
| "loss": 0.4805, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.875995449374289, |
| "grad_norm": 0.6564470081159227, |
| "learning_rate": 3.931508146428724e-05, |
| "loss": 0.5067, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.8782707622298066, |
| "grad_norm": 0.6998497277576152, |
| "learning_rate": 3.930680270175915e-05, |
| "loss": 0.5271, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.8805460750853242, |
| "grad_norm": 0.5812403223556097, |
| "learning_rate": 3.929847508796628e-05, |
| "loss": 0.5124, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.8828213879408419, |
| "grad_norm": 0.7056380734319831, |
| "learning_rate": 3.9290098643979654e-05, |
| "loss": 0.5205, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.8850967007963595, |
| "grad_norm": 0.6313474071640174, |
| "learning_rate": 3.928167339099387e-05, |
| "loss": 0.503, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.8873720136518771, |
| "grad_norm": 0.6315874701337566, |
| "learning_rate": 3.927319935032703e-05, |
| "loss": 0.4765, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.8896473265073948, |
| "grad_norm": 0.7098100329539186, |
| "learning_rate": 3.926467654342067e-05, |
| "loss": 0.485, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.8919226393629124, |
| "grad_norm": 0.5044092995092366, |
| "learning_rate": 3.9256104991839724e-05, |
| "loss": 0.4695, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.89419795221843, |
| "grad_norm": 0.7272226279219421, |
| "learning_rate": 3.924748471727246e-05, |
| "loss": 0.4862, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.8964732650739476, |
| "grad_norm": 0.5283682029284336, |
| "learning_rate": 3.923881574153043e-05, |
| "loss": 0.4937, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.8987485779294653, |
| "grad_norm": 0.6049934119270775, |
| "learning_rate": 3.9230098086548414e-05, |
| "loss": 0.4581, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.9010238907849829, |
| "grad_norm": 0.6190064097442485, |
| "learning_rate": 3.9221331774384356e-05, |
| "loss": 0.4966, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.9032992036405005, |
| "grad_norm": 0.5778786608492407, |
| "learning_rate": 3.9212516827219323e-05, |
| "loss": 0.4709, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.9055745164960182, |
| "grad_norm": 0.721457490979815, |
| "learning_rate": 3.920365326735745e-05, |
| "loss": 0.4943, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.9078498293515358, |
| "grad_norm": 0.598488555520672, |
| "learning_rate": 3.919474111722585e-05, |
| "loss": 0.4643, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.9101251422070534, |
| "grad_norm": 0.58655862603512, |
| "learning_rate": 3.918578039937459e-05, |
| "loss": 0.4839, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.9124004550625711, |
| "grad_norm": 0.6909733580750689, |
| "learning_rate": 3.917677113647665e-05, |
| "loss": 0.4816, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.9146757679180887, |
| "grad_norm": 0.7561177941628888, |
| "learning_rate": 3.916771335132781e-05, |
| "loss": 0.5565, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.9169510807736063, |
| "grad_norm": 0.6312069877060036, |
| "learning_rate": 3.915860706684664e-05, |
| "loss": 0.5043, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.919226393629124, |
| "grad_norm": 0.6867308599614804, |
| "learning_rate": 3.914945230607443e-05, |
| "loss": 0.4969, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.9215017064846417, |
| "grad_norm": 0.6167566851414519, |
| "learning_rate": 3.914024909217511e-05, |
| "loss": 0.5155, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.9237770193401593, |
| "grad_norm": 0.6703548395838873, |
| "learning_rate": 3.9130997448435235e-05, |
| "loss": 0.4922, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.926052332195677, |
| "grad_norm": 0.6127702650380229, |
| "learning_rate": 3.9121697398263874e-05, |
| "loss": 0.4862, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.9283276450511946, |
| "grad_norm": 0.6245821024752293, |
| "learning_rate": 3.911234896519259e-05, |
| "loss": 0.5229, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.9306029579067122, |
| "grad_norm": 0.6846116775019015, |
| "learning_rate": 3.910295217287537e-05, |
| "loss": 0.5235, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.9328782707622298, |
| "grad_norm": 0.6293963435933653, |
| "learning_rate": 3.909350704508856e-05, |
| "loss": 0.4991, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.9351535836177475, |
| "grad_norm": 0.5096912924686995, |
| "learning_rate": 3.90840136057308e-05, |
| "loss": 0.5074, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.9374288964732651, |
| "grad_norm": 0.6203975419670066, |
| "learning_rate": 3.9074471878822975e-05, |
| "loss": 0.4932, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.9397042093287827, |
| "grad_norm": 0.5517618128678503, |
| "learning_rate": 3.906488188850816e-05, |
| "loss": 0.5003, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.9419795221843004, |
| "grad_norm": 0.6999593057949012, |
| "learning_rate": 3.905524365905153e-05, |
| "loss": 0.4752, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.944254835039818, |
| "grad_norm": 0.6627867781403664, |
| "learning_rate": 3.904555721484034e-05, |
| "loss": 0.4711, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.9465301478953356, |
| "grad_norm": 0.5561928699593454, |
| "learning_rate": 3.903582258038382e-05, |
| "loss": 0.4742, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.9488054607508533, |
| "grad_norm": 0.752856160747916, |
| "learning_rate": 3.902603978031315e-05, |
| "loss": 0.506, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.9510807736063709, |
| "grad_norm": 0.7040158975065405, |
| "learning_rate": 3.901620883938137e-05, |
| "loss": 0.4762, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.9533560864618885, |
| "grad_norm": 0.9078750042793132, |
| "learning_rate": 3.900632978246334e-05, |
| "loss": 0.4787, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.9556313993174061, |
| "grad_norm": 0.6880784393167054, |
| "learning_rate": 3.899640263455566e-05, |
| "loss": 0.4921, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.9579067121729238, |
| "grad_norm": 0.9616382722279428, |
| "learning_rate": 3.8986427420776604e-05, |
| "loss": 0.5105, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.9601820250284414, |
| "grad_norm": 0.7137899331602579, |
| "learning_rate": 3.897640416636608e-05, |
| "loss": 0.4889, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.962457337883959, |
| "grad_norm": 0.8857115772260385, |
| "learning_rate": 3.8966332896685545e-05, |
| "loss": 0.4662, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.9647326507394767, |
| "grad_norm": 0.632315334400379, |
| "learning_rate": 3.895621363721795e-05, |
| "loss": 0.5228, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.9670079635949943, |
| "grad_norm": 0.8832642529331565, |
| "learning_rate": 3.894604641356767e-05, |
| "loss": 0.5191, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.9692832764505119, |
| "grad_norm": 0.6474869552718459, |
| "learning_rate": 3.893583125146043e-05, |
| "loss": 0.4918, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.9715585893060296, |
| "grad_norm": 0.79708282939505, |
| "learning_rate": 3.892556817674328e-05, |
| "loss": 0.5068, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.9738339021615472, |
| "grad_norm": 0.5979739475341149, |
| "learning_rate": 3.8915257215384485e-05, |
| "loss": 0.4915, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.9761092150170648, |
| "grad_norm": 0.7710593720070672, |
| "learning_rate": 3.890489839347347e-05, |
| "loss": 0.4941, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.9783845278725825, |
| "grad_norm": 0.5526776697686373, |
| "learning_rate": 3.889449173722077e-05, |
| "loss": 0.4752, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.9806598407281001, |
| "grad_norm": 0.7049025640472182, |
| "learning_rate": 3.8884037272957936e-05, |
| "loss": 0.5074, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.9829351535836177, |
| "grad_norm": 0.6095293538766196, |
| "learning_rate": 3.887353502713752e-05, |
| "loss": 0.5254, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.9852104664391353, |
| "grad_norm": 0.5384079248688551, |
| "learning_rate": 3.886298502633294e-05, |
| "loss": 0.4802, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.987485779294653, |
| "grad_norm": 0.679283569969809, |
| "learning_rate": 3.885238729723847e-05, |
| "loss": 0.4922, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.9897610921501706, |
| "grad_norm": 0.6518320059827871, |
| "learning_rate": 3.8841741866669126e-05, |
| "loss": 0.5277, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.9920364050056882, |
| "grad_norm": 0.790824660703856, |
| "learning_rate": 3.883104876156064e-05, |
| "loss": 0.5036, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.9943117178612059, |
| "grad_norm": 0.512397127737013, |
| "learning_rate": 3.882030800896937e-05, |
| "loss": 0.4993, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.9965870307167235, |
| "grad_norm": 0.7387832742784267, |
| "learning_rate": 3.880951963607222e-05, |
| "loss": 0.5484, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.9988623435722411, |
| "grad_norm": 0.5472823524136017, |
| "learning_rate": 3.879868367016662e-05, |
| "loss": 0.4769, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.0011376564277588, |
| "grad_norm": 1.1121215401964364, |
| "learning_rate": 3.878780013867038e-05, |
| "loss": 0.722, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.0034129692832765, |
| "grad_norm": 0.5303547489884288, |
| "learning_rate": 3.877686906912168e-05, |
| "loss": 0.3795, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.005688282138794, |
| "grad_norm": 0.6536985280427839, |
| "learning_rate": 3.876589048917901e-05, |
| "loss": 0.3871, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.0079635949943118, |
| "grad_norm": 0.6972091381993293, |
| "learning_rate": 3.8754864426621045e-05, |
| "loss": 0.5004, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.0102389078498293, |
| "grad_norm": 0.9103464551846269, |
| "learning_rate": 3.874379090934659e-05, |
| "loss": 0.448, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.012514220705347, |
| "grad_norm": 0.7346432061843909, |
| "learning_rate": 3.873266996537456e-05, |
| "loss": 0.4681, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.0147895335608645, |
| "grad_norm": 0.8594704129747518, |
| "learning_rate": 3.8721501622843846e-05, |
| "loss": 0.4278, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.0170648464163823, |
| "grad_norm": 0.6520097950263782, |
| "learning_rate": 3.871028591001329e-05, |
| "loss": 0.4239, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.0193401592718998, |
| "grad_norm": 0.9043382585956159, |
| "learning_rate": 3.869902285526157e-05, |
| "loss": 0.4701, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.0216154721274175, |
| "grad_norm": 0.6783134828580611, |
| "learning_rate": 3.868771248708717e-05, |
| "loss": 0.4754, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.023890784982935, |
| "grad_norm": 0.805096661530093, |
| "learning_rate": 3.867635483410827e-05, |
| "loss": 0.421, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.0261660978384528, |
| "grad_norm": 0.6119555640585732, |
| "learning_rate": 3.8664949925062724e-05, |
| "loss": 0.4133, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.0284414106939703, |
| "grad_norm": 0.940012119317623, |
| "learning_rate": 3.8653497788807926e-05, |
| "loss": 0.4146, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.030716723549488, |
| "grad_norm": 0.5910709582131054, |
| "learning_rate": 3.864199845432077e-05, |
| "loss": 0.4616, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.0329920364050056, |
| "grad_norm": 0.7239603105969694, |
| "learning_rate": 3.8630451950697605e-05, |
| "loss": 0.4484, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.0352673492605233, |
| "grad_norm": 0.771047188430309, |
| "learning_rate": 3.8618858307154085e-05, |
| "loss": 0.4237, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.0375426621160408, |
| "grad_norm": 0.5924033036723514, |
| "learning_rate": 3.8607217553025174e-05, |
| "loss": 0.4687, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.0398179749715586, |
| "grad_norm": 0.7867388739225945, |
| "learning_rate": 3.859552971776503e-05, |
| "loss": 0.4366, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.0420932878270763, |
| "grad_norm": 0.5435581352639793, |
| "learning_rate": 3.858379483094693e-05, |
| "loss": 0.3981, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.0443686006825939, |
| "grad_norm": 0.7516548808366503, |
| "learning_rate": 3.857201292226322e-05, |
| "loss": 0.4714, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.0466439135381116, |
| "grad_norm": 0.672487728953046, |
| "learning_rate": 3.8560184021525194e-05, |
| "loss": 0.4314, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.0489192263936291, |
| "grad_norm": 0.721824130030649, |
| "learning_rate": 3.854830815866308e-05, |
| "loss": 0.4752, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.0511945392491469, |
| "grad_norm": 0.554588452712049, |
| "learning_rate": 3.8536385363725914e-05, |
| "loss": 0.4099, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.0534698521046644, |
| "grad_norm": 0.582636238778067, |
| "learning_rate": 3.8524415666881495e-05, |
| "loss": 0.4301, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.0557451649601821, |
| "grad_norm": 0.6063203313474805, |
| "learning_rate": 3.851239909841629e-05, |
| "loss": 0.4351, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.0580204778156996, |
| "grad_norm": 0.6530051032333519, |
| "learning_rate": 3.850033568873536e-05, |
| "loss": 0.4823, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.0602957906712174, |
| "grad_norm": 0.5246299248110554, |
| "learning_rate": 3.8488225468362284e-05, |
| "loss": 0.3294, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.062571103526735, |
| "grad_norm": 0.5622129573629325, |
| "learning_rate": 3.847606846793909e-05, |
| "loss": 0.4526, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.0648464163822526, |
| "grad_norm": 0.5890629032138155, |
| "learning_rate": 3.846386471822618e-05, |
| "loss": 0.3856, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.0671217292377702, |
| "grad_norm": 0.5275742634395962, |
| "learning_rate": 3.8451614250102234e-05, |
| "loss": 0.444, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.069397042093288, |
| "grad_norm": 0.6842481934322107, |
| "learning_rate": 3.843931709456414e-05, |
| "loss": 0.4241, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.0716723549488054, |
| "grad_norm": 0.48419973116649284, |
| "learning_rate": 3.8426973282726915e-05, |
| "loss": 0.3856, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.0739476678043232, |
| "grad_norm": 0.6812528323951617, |
| "learning_rate": 3.841458284582364e-05, |
| "loss": 0.464, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.0762229806598407, |
| "grad_norm": 0.4606611667546373, |
| "learning_rate": 3.8402145815205365e-05, |
| "loss": 0.4649, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.0784982935153584, |
| "grad_norm": 0.6711071802996837, |
| "learning_rate": 3.838966222234104e-05, |
| "loss": 0.4304, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.080773606370876, |
| "grad_norm": 0.5366181070379958, |
| "learning_rate": 3.83771320988174e-05, |
| "loss": 0.3844, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.0830489192263937, |
| "grad_norm": 0.7505606804041826, |
| "learning_rate": 3.836455547633896e-05, |
| "loss": 0.4522, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.0853242320819112, |
| "grad_norm": 0.5556699911110706, |
| "learning_rate": 3.835193238672786e-05, |
| "loss": 0.4654, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.087599544937429, |
| "grad_norm": 0.7648233560530554, |
| "learning_rate": 3.833926286192382e-05, |
| "loss": 0.4287, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.0898748577929465, |
| "grad_norm": 0.5346556144583974, |
| "learning_rate": 3.832654693398404e-05, |
| "loss": 0.4287, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.0921501706484642, |
| "grad_norm": 0.6147545474740819, |
| "learning_rate": 3.831378463508318e-05, |
| "loss": 0.4292, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.0944254835039817, |
| "grad_norm": 0.49199739831216216, |
| "learning_rate": 3.830097599751317e-05, |
| "loss": 0.3876, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.0967007963594995, |
| "grad_norm": 0.6500922100346228, |
| "learning_rate": 3.828812105368323e-05, |
| "loss": 0.4149, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.098976109215017, |
| "grad_norm": 0.46632381947018536, |
| "learning_rate": 3.8275219836119715e-05, |
| "loss": 0.3727, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.1012514220705347, |
| "grad_norm": 0.8163109016449187, |
| "learning_rate": 3.82622723774661e-05, |
| "loss": 0.4258, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.1035267349260522, |
| "grad_norm": 0.5606775924641534, |
| "learning_rate": 3.824927871048284e-05, |
| "loss": 0.4673, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.10580204778157, |
| "grad_norm": 0.7046753680729659, |
| "learning_rate": 3.8236238868047315e-05, |
| "loss": 0.4333, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.1080773606370875, |
| "grad_norm": 0.5036730787338999, |
| "learning_rate": 3.822315288315373e-05, |
| "loss": 0.4384, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.1103526734926052, |
| "grad_norm": 0.6306392041070552, |
| "learning_rate": 3.821002078891307e-05, |
| "loss": 0.4187, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.1126279863481228, |
| "grad_norm": 0.5202241380540945, |
| "learning_rate": 3.8196842618552953e-05, |
| "loss": 0.4482, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.1149032992036405, |
| "grad_norm": 0.8084338237424731, |
| "learning_rate": 3.818361840541761e-05, |
| "loss": 0.4956, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.117178612059158, |
| "grad_norm": 0.5612996846599624, |
| "learning_rate": 3.8170348182967764e-05, |
| "loss": 0.4056, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.1194539249146758, |
| "grad_norm": 0.5924393050486125, |
| "learning_rate": 3.815703198478054e-05, |
| "loss": 0.4334, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.1217292377701935, |
| "grad_norm": 0.5345743904225937, |
| "learning_rate": 3.814366984454941e-05, |
| "loss": 0.3805, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.124004550625711, |
| "grad_norm": 0.5734287248659643, |
| "learning_rate": 3.81302617960841e-05, |
| "loss": 0.4137, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.1262798634812285, |
| "grad_norm": 0.6624763213978735, |
| "learning_rate": 3.811680787331047e-05, |
| "loss": 0.4593, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.1285551763367463, |
| "grad_norm": 0.8598311483755152, |
| "learning_rate": 3.810330811027046e-05, |
| "loss": 0.4163, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.130830489192264, |
| "grad_norm": 0.5811168048723918, |
| "learning_rate": 3.8089762541122016e-05, |
| "loss": 0.3961, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.1331058020477816, |
| "grad_norm": 0.6067424161701718, |
| "learning_rate": 3.807617120013897e-05, |
| "loss": 0.3828, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.1353811149032993, |
| "grad_norm": 0.6027276686195756, |
| "learning_rate": 3.8062534121710974e-05, |
| "loss": 0.4223, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.1376564277588168, |
| "grad_norm": 0.5265528740905795, |
| "learning_rate": 3.80488513403434e-05, |
| "loss": 0.3269, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.1399317406143346, |
| "grad_norm": 0.619718442290863, |
| "learning_rate": 3.8035122890657276e-05, |
| "loss": 0.4754, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.142207053469852, |
| "grad_norm": 0.6342725333529051, |
| "learning_rate": 3.802134880738916e-05, |
| "loss": 0.4512, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.1444823663253698, |
| "grad_norm": 0.530485201522393, |
| "learning_rate": 3.80075291253911e-05, |
| "loss": 0.4104, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.1467576791808873, |
| "grad_norm": 0.6627840837725466, |
| "learning_rate": 3.7993663879630516e-05, |
| "loss": 0.4043, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.149032992036405, |
| "grad_norm": 0.5379023996005294, |
| "learning_rate": 3.797975310519009e-05, |
| "loss": 0.4411, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.1513083048919226, |
| "grad_norm": 0.588214045835851, |
| "learning_rate": 3.796579683726774e-05, |
| "loss": 0.4187, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.1535836177474403, |
| "grad_norm": 0.5678190724068664, |
| "learning_rate": 3.795179511117647e-05, |
| "loss": 0.4289, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.1558589306029579, |
| "grad_norm": 0.688457625822847, |
| "learning_rate": 3.7937747962344295e-05, |
| "loss": 0.4943, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.1581342434584756, |
| "grad_norm": 0.6814989607077934, |
| "learning_rate": 3.792365542631421e-05, |
| "loss": 0.4214, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.1604095563139931, |
| "grad_norm": 0.5942736027664269, |
| "learning_rate": 3.7909517538744e-05, |
| "loss": 0.456, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.1626848691695109, |
| "grad_norm": 0.7903713526208822, |
| "learning_rate": 3.789533433540623e-05, |
| "loss": 0.4608, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.1649601820250284, |
| "grad_norm": 0.5337770537219456, |
| "learning_rate": 3.788110585218811e-05, |
| "loss": 0.4365, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.1672354948805461, |
| "grad_norm": 0.5709386118048865, |
| "learning_rate": 3.7866832125091434e-05, |
| "loss": 0.3608, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.1695108077360636, |
| "grad_norm": 0.6258401342708682, |
| "learning_rate": 3.7852513190232466e-05, |
| "loss": 0.4832, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.1717861205915814, |
| "grad_norm": 0.5105926958234609, |
| "learning_rate": 3.7838149083841856e-05, |
| "loss": 0.4429, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.174061433447099, |
| "grad_norm": 0.5353872958700819, |
| "learning_rate": 3.782373984226456e-05, |
| "loss": 0.4411, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.1763367463026166, |
| "grad_norm": 0.49676244023632943, |
| "learning_rate": 3.7809285501959724e-05, |
| "loss": 0.3666, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.1786120591581342, |
| "grad_norm": 0.5862119954620956, |
| "learning_rate": 3.7794786099500616e-05, |
| "loss": 0.4018, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.180887372013652, |
| "grad_norm": 0.5036301639810965, |
| "learning_rate": 3.778024167157452e-05, |
| "loss": 0.4086, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.1831626848691694, |
| "grad_norm": 0.6783189411364502, |
| "learning_rate": 3.776565225498264e-05, |
| "loss": 0.4399, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.1854379977246872, |
| "grad_norm": 0.6912119898984184, |
| "learning_rate": 3.775101788664003e-05, |
| "loss": 0.4002, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.1877133105802047, |
| "grad_norm": 0.7307828897177655, |
| "learning_rate": 3.773633860357547e-05, |
| "loss": 0.4303, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.1899886234357224, |
| "grad_norm": 0.6451606846442599, |
| "learning_rate": 3.772161444293138e-05, |
| "loss": 0.4027, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.1922639362912402, |
| "grad_norm": 0.7272217599681045, |
| "learning_rate": 3.7706845441963746e-05, |
| "loss": 0.4485, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.1945392491467577, |
| "grad_norm": 0.6728295797405291, |
| "learning_rate": 3.769203163804202e-05, |
| "loss": 0.488, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.1968145620022752, |
| "grad_norm": 0.7295058753840463, |
| "learning_rate": 3.7677173068648976e-05, |
| "loss": 0.4111, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.199089874857793, |
| "grad_norm": 0.5720940993484606, |
| "learning_rate": 3.76622697713807e-05, |
| "loss": 0.457, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.2013651877133107, |
| "grad_norm": 0.8869022231692593, |
| "learning_rate": 3.764732178394645e-05, |
| "loss": 0.4342, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.2036405005688282, |
| "grad_norm": 0.43945315979194277, |
| "learning_rate": 3.763232914416851e-05, |
| "loss": 0.3231, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.2059158134243457, |
| "grad_norm": 0.7635031106439869, |
| "learning_rate": 3.761729188998222e-05, |
| "loss": 0.4597, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.2081911262798635, |
| "grad_norm": 0.6213178936947285, |
| "learning_rate": 3.760221005943575e-05, |
| "loss": 0.4321, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.2104664391353812, |
| "grad_norm": 0.6346015751325879, |
| "learning_rate": 3.758708369069009e-05, |
| "loss": 0.4653, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.2127417519908987, |
| "grad_norm": 0.8056204649209358, |
| "learning_rate": 3.75719128220189e-05, |
| "loss": 0.4568, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.2150170648464165, |
| "grad_norm": 0.6513216153599897, |
| "learning_rate": 3.7556697491808455e-05, |
| "loss": 0.4, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.217292377701934, |
| "grad_norm": 0.5983261337707148, |
| "learning_rate": 3.7541437738557524e-05, |
| "loss": 0.3973, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.2195676905574517, |
| "grad_norm": 0.7107199080738904, |
| "learning_rate": 3.7526133600877275e-05, |
| "loss": 0.4443, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.2218430034129693, |
| "grad_norm": 0.5139651203308719, |
| "learning_rate": 3.751078511749119e-05, |
| "loss": 0.4064, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.224118316268487, |
| "grad_norm": 0.636147405866365, |
| "learning_rate": 3.7495392327234935e-05, |
| "loss": 0.4653, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.2263936291240045, |
| "grad_norm": 0.7480412548964527, |
| "learning_rate": 3.747995526905632e-05, |
| "loss": 0.4806, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.2286689419795223, |
| "grad_norm": 0.5622250258722541, |
| "learning_rate": 3.746447398201512e-05, |
| "loss": 0.4358, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.2309442548350398, |
| "grad_norm": 0.5704468541719319, |
| "learning_rate": 3.744894850528306e-05, |
| "loss": 0.4279, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.2332195676905575, |
| "grad_norm": 0.5392907143564795, |
| "learning_rate": 3.7433378878143654e-05, |
| "loss": 0.4302, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.235494880546075, |
| "grad_norm": 0.6537941034798762, |
| "learning_rate": 3.741776513999214e-05, |
| "loss": 0.5026, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.2377701934015928, |
| "grad_norm": 0.5784636449062069, |
| "learning_rate": 3.7402107330335346e-05, |
| "loss": 0.4013, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.2400455062571103, |
| "grad_norm": 0.612882563229047, |
| "learning_rate": 3.738640548879166e-05, |
| "loss": 0.4175, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.242320819112628, |
| "grad_norm": 0.7047522050978076, |
| "learning_rate": 3.7370659655090815e-05, |
| "loss": 0.4813, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.2445961319681456, |
| "grad_norm": 0.5689652487980504, |
| "learning_rate": 3.7354869869073916e-05, |
| "loss": 0.4219, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.2468714448236633, |
| "grad_norm": 0.48323792892632456, |
| "learning_rate": 3.733903617069325e-05, |
| "loss": 0.4291, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.2491467576791808, |
| "grad_norm": 0.6225866345259096, |
| "learning_rate": 3.732315860001222e-05, |
| "loss": 0.4113, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.2514220705346986, |
| "grad_norm": 0.4521098482775458, |
| "learning_rate": 3.730723719720523e-05, |
| "loss": 0.3784, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.253697383390216, |
| "grad_norm": 0.5624605850411734, |
| "learning_rate": 3.72912720025576e-05, |
| "loss": 0.4234, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.2559726962457338, |
| "grad_norm": 0.6099617567525514, |
| "learning_rate": 3.727526305646546e-05, |
| "loss": 0.4719, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.2582480091012513, |
| "grad_norm": 0.6196907758321264, |
| "learning_rate": 3.725921039943561e-05, |
| "loss": 0.4149, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.260523321956769, |
| "grad_norm": 0.5265431890255476, |
| "learning_rate": 3.72431140720855e-05, |
| "loss": 0.4067, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.2627986348122868, |
| "grad_norm": 0.7141180841081363, |
| "learning_rate": 3.722697411514305e-05, |
| "loss": 0.4111, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.2650739476678043, |
| "grad_norm": 0.7054376658478285, |
| "learning_rate": 3.7210790569446554e-05, |
| "loss": 0.4834, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.2673492605233219, |
| "grad_norm": 0.544200246308691, |
| "learning_rate": 3.7194563475944645e-05, |
| "loss": 0.3724, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.2696245733788396, |
| "grad_norm": 0.6859827142678692, |
| "learning_rate": 3.71782928756961e-05, |
| "loss": 0.4445, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.2718998862343573, |
| "grad_norm": 0.49873706058808576, |
| "learning_rate": 3.7161978809869804e-05, |
| "loss": 0.377, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.2741751990898749, |
| "grad_norm": 0.6905192832820776, |
| "learning_rate": 3.7145621319744614e-05, |
| "loss": 0.4957, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.2764505119453924, |
| "grad_norm": 0.6674819555847167, |
| "learning_rate": 3.712922044670926e-05, |
| "loss": 0.4248, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.2787258248009101, |
| "grad_norm": 0.5531069807879404, |
| "learning_rate": 3.7112776232262244e-05, |
| "loss": 0.4162, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.2810011376564279, |
| "grad_norm": 0.793114505572383, |
| "learning_rate": 3.709628871801173e-05, |
| "loss": 0.4119, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.2832764505119454, |
| "grad_norm": 0.6061615937073604, |
| "learning_rate": 3.7079757945675456e-05, |
| "loss": 0.4509, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.285551763367463, |
| "grad_norm": 0.5801581834214365, |
| "learning_rate": 3.7063183957080594e-05, |
| "loss": 0.4292, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.2878270762229806, |
| "grad_norm": 0.6531156166646916, |
| "learning_rate": 3.704656679416368e-05, |
| "loss": 0.4211, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.2901023890784984, |
| "grad_norm": 0.4738840824742358, |
| "learning_rate": 3.702990649897047e-05, |
| "loss": 0.4046, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.292377701934016, |
| "grad_norm": 0.6127788436356492, |
| "learning_rate": 3.70132031136559e-05, |
| "loss": 0.4367, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.2946530147895334, |
| "grad_norm": 0.4464306345350417, |
| "learning_rate": 3.699645668048388e-05, |
| "loss": 0.407, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.2969283276450512, |
| "grad_norm": 0.5847521547698988, |
| "learning_rate": 3.697966724182729e-05, |
| "loss": 0.443, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.299203640500569, |
| "grad_norm": 0.5520684612319413, |
| "learning_rate": 3.6962834840167783e-05, |
| "loss": 0.4154, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.3014789533560864, |
| "grad_norm": 0.577929816119576, |
| "learning_rate": 3.694595951809576e-05, |
| "loss": 0.4258, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.3037542662116042, |
| "grad_norm": 0.6588178938708844, |
| "learning_rate": 3.6929041318310195e-05, |
| "loss": 0.4715, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.3060295790671217, |
| "grad_norm": 0.5910947193408698, |
| "learning_rate": 3.691208028361857e-05, |
| "loss": 0.4136, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.3083048919226394, |
| "grad_norm": 0.6437280595180613, |
| "learning_rate": 3.689507645693674e-05, |
| "loss": 0.4218, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.310580204778157, |
| "grad_norm": 0.5321019507324688, |
| "learning_rate": 3.6878029881288824e-05, |
| "loss": 0.4413, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.3128555176336747, |
| "grad_norm": 0.5655094990541256, |
| "learning_rate": 3.686094059980714e-05, |
| "loss": 0.4285, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.3151308304891922, |
| "grad_norm": 0.5994309074467306, |
| "learning_rate": 3.684380865573203e-05, |
| "loss": 0.4059, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.31740614334471, |
| "grad_norm": 0.5449339695520942, |
| "learning_rate": 3.6826634092411807e-05, |
| "loss": 0.4255, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.3196814562002275, |
| "grad_norm": 0.6193425203295791, |
| "learning_rate": 3.6809416953302606e-05, |
| "loss": 0.4753, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.3219567690557452, |
| "grad_norm": 0.5409147548110276, |
| "learning_rate": 3.6792157281968295e-05, |
| "loss": 0.3844, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.3242320819112627, |
| "grad_norm": 0.5590317046964576, |
| "learning_rate": 3.677485512208037e-05, |
| "loss": 0.4503, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.3265073947667805, |
| "grad_norm": 0.5496354734029336, |
| "learning_rate": 3.675751051741781e-05, |
| "loss": 0.4277, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.328782707622298, |
| "grad_norm": 0.5723750685999135, |
| "learning_rate": 3.674012351186702e-05, |
| "loss": 0.4235, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.3310580204778157, |
| "grad_norm": 0.5517032292934076, |
| "learning_rate": 3.672269414942166e-05, |
| "loss": 0.4246, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.6287202109839667, |
| "learning_rate": 3.670522247418259e-05, |
| "loss": 0.4237, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.335608646188851, |
| "grad_norm": 0.5113344624756003, |
| "learning_rate": 3.6687708530357726e-05, |
| "loss": 0.4012, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.3378839590443685, |
| "grad_norm": 0.5574487232141875, |
| "learning_rate": 3.667015236226191e-05, |
| "loss": 0.4086, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.3401592718998863, |
| "grad_norm": 0.5245768687215431, |
| "learning_rate": 3.665255401431687e-05, |
| "loss": 0.3862, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.342434584755404, |
| "grad_norm": 0.49618557229950566, |
| "learning_rate": 3.663491353105101e-05, |
| "loss": 0.4012, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.3447098976109215, |
| "grad_norm": 0.4983237488761633, |
| "learning_rate": 3.661723095709939e-05, |
| "loss": 0.3976, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.346985210466439, |
| "grad_norm": 0.5376215526462558, |
| "learning_rate": 3.659950633720354e-05, |
| "loss": 0.4458, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.3492605233219568, |
| "grad_norm": 0.5023259197769308, |
| "learning_rate": 3.658173971621139e-05, |
| "loss": 0.458, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.3515358361774745, |
| "grad_norm": 0.4989521415561501, |
| "learning_rate": 3.6563931139077134e-05, |
| "loss": 0.4332, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.353811149032992, |
| "grad_norm": 0.510072444206674, |
| "learning_rate": 3.654608065086115e-05, |
| "loss": 0.4081, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.3560864618885096, |
| "grad_norm": 0.5959408340108707, |
| "learning_rate": 3.652818829672983e-05, |
| "loss": 0.4354, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.3583617747440273, |
| "grad_norm": 0.5844735352057744, |
| "learning_rate": 3.651025412195552e-05, |
| "loss": 0.4262, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.360637087599545, |
| "grad_norm": 0.5677541553759599, |
| "learning_rate": 3.649227817191639e-05, |
| "loss": 0.3893, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.3629124004550626, |
| "grad_norm": 0.5729951500575374, |
| "learning_rate": 3.6474260492096274e-05, |
| "loss": 0.457, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.36518771331058, |
| "grad_norm": 0.7503620931885405, |
| "learning_rate": 3.645620112808464e-05, |
| "loss": 0.4421, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.3674630261660978, |
| "grad_norm": 0.7296856049994408, |
| "learning_rate": 3.643810012557639e-05, |
| "loss": 0.4508, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.3697383390216156, |
| "grad_norm": 0.6002401423186757, |
| "learning_rate": 3.641995753037182e-05, |
| "loss": 0.3956, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.372013651877133, |
| "grad_norm": 0.6180465269627985, |
| "learning_rate": 3.640177338837641e-05, |
| "loss": 0.4014, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.3742889647326506, |
| "grad_norm": 0.6333745280579856, |
| "learning_rate": 3.638354774560084e-05, |
| "loss": 0.497, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.3765642775881684, |
| "grad_norm": 0.6115951533043017, |
| "learning_rate": 3.636528064816073e-05, |
| "loss": 0.3805, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.378839590443686, |
| "grad_norm": 0.5930980745694563, |
| "learning_rate": 3.6346972142276625e-05, |
| "loss": 0.4619, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.3811149032992036, |
| "grad_norm": 0.634649972147651, |
| "learning_rate": 3.632862227427384e-05, |
| "loss": 0.4665, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.3833902161547214, |
| "grad_norm": 0.6272867545095703, |
| "learning_rate": 3.631023109058235e-05, |
| "loss": 0.4541, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.3856655290102389, |
| "grad_norm": 0.6317307195256453, |
| "learning_rate": 3.629179863773665e-05, |
| "loss": 0.4183, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.3879408418657566, |
| "grad_norm": 0.5434046371600441, |
| "learning_rate": 3.6273324962375676e-05, |
| "loss": 0.3855, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.3902161547212741, |
| "grad_norm": 0.7065598662541726, |
| "learning_rate": 3.625481011124267e-05, |
| "loss": 0.4573, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.3924914675767919, |
| "grad_norm": 0.5852587577597338, |
| "learning_rate": 3.6236254131185046e-05, |
| "loss": 0.4478, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.3947667804323094, |
| "grad_norm": 0.5653752038889909, |
| "learning_rate": 3.621765706915428e-05, |
| "loss": 0.4694, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.3970420932878271, |
| "grad_norm": 0.5162422369201265, |
| "learning_rate": 3.6199018972205836e-05, |
| "loss": 0.3594, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.3993174061433447, |
| "grad_norm": 0.6598966136350053, |
| "learning_rate": 3.6180339887498953e-05, |
| "loss": 0.5066, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.4015927189988624, |
| "grad_norm": 0.5817663831618288, |
| "learning_rate": 3.616161986229661e-05, |
| "loss": 0.375, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.40386803185438, |
| "grad_norm": 0.5876775258558006, |
| "learning_rate": 3.614285894396538e-05, |
| "loss": 0.4934, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.4061433447098977, |
| "grad_norm": 0.5385625038563776, |
| "learning_rate": 3.612405717997529e-05, |
| "loss": 0.4172, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.4084186575654152, |
| "grad_norm": 0.6459700228914548, |
| "learning_rate": 3.610521461789972e-05, |
| "loss": 0.4522, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.410693970420933, |
| "grad_norm": 0.4936465756853313, |
| "learning_rate": 3.60863313054153e-05, |
| "loss": 0.3558, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.4129692832764504, |
| "grad_norm": 0.5480391312766864, |
| "learning_rate": 3.606740729030174e-05, |
| "loss": 0.4218, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.4152445961319682, |
| "grad_norm": 0.5907103782399543, |
| "learning_rate": 3.6048442620441754e-05, |
| "loss": 0.4813, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.4175199089874857, |
| "grad_norm": 0.45735368041109864, |
| "learning_rate": 3.6029437343820925e-05, |
| "loss": 0.3461, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.4197952218430034, |
| "grad_norm": 0.5431566682948396, |
| "learning_rate": 3.601039150852758e-05, |
| "loss": 0.4243, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.4220705346985212, |
| "grad_norm": 0.5183924879891563, |
| "learning_rate": 3.599130516275266e-05, |
| "loss": 0.4595, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.4243458475540387, |
| "grad_norm": 0.5720004646057972, |
| "learning_rate": 3.597217835478962e-05, |
| "loss": 0.4515, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.4266211604095562, |
| "grad_norm": 0.47687964955658974, |
| "learning_rate": 3.595301113303429e-05, |
| "loss": 0.4063, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.428896473265074, |
| "grad_norm": 0.5493519780479028, |
| "learning_rate": 3.593380354598476e-05, |
| "loss": 0.3963, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.4311717861205917, |
| "grad_norm": 0.5656763414743938, |
| "learning_rate": 3.591455564224126e-05, |
| "loss": 0.417, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.4334470989761092, |
| "grad_norm": 0.6174302910133038, |
| "learning_rate": 3.589526747050601e-05, |
| "loss": 0.4714, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.4357224118316267, |
| "grad_norm": 0.5314357744461629, |
| "learning_rate": 3.587593907958314e-05, |
| "loss": 0.409, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.4379977246871445, |
| "grad_norm": 0.5959045877414539, |
| "learning_rate": 3.585657051837855e-05, |
| "loss": 0.3917, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.4402730375426622, |
| "grad_norm": 0.5925951139677259, |
| "learning_rate": 3.583716183589975e-05, |
| "loss": 0.4563, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.4425483503981797, |
| "grad_norm": 0.5461579158393477, |
| "learning_rate": 3.58177130812558e-05, |
| "loss": 0.4687, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.4448236632536973, |
| "grad_norm": 0.5705089648405642, |
| "learning_rate": 3.579822430365714e-05, |
| "loss": 0.4327, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.447098976109215, |
| "grad_norm": 0.645475831271653, |
| "learning_rate": 3.577869555241548e-05, |
| "loss": 0.4536, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.4493742889647327, |
| "grad_norm": 0.537595180808403, |
| "learning_rate": 3.5759126876943665e-05, |
| "loss": 0.4213, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.4516496018202503, |
| "grad_norm": 0.5476762610652368, |
| "learning_rate": 3.573951832675557e-05, |
| "loss": 0.3823, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.4539249146757678, |
| "grad_norm": 0.5303957445839604, |
| "learning_rate": 3.571986995146596e-05, |
| "loss": 0.451, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.4562002275312855, |
| "grad_norm": 0.5134907930348338, |
| "learning_rate": 3.570018180079037e-05, |
| "loss": 0.449, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.4584755403868033, |
| "grad_norm": 0.55728918847765, |
| "learning_rate": 3.568045392454498e-05, |
| "loss": 0.4201, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.4607508532423208, |
| "grad_norm": 0.48252879342351945, |
| "learning_rate": 3.566068637264647e-05, |
| "loss": 0.3453, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.4630261660978385, |
| "grad_norm": 0.6602277308990888, |
| "learning_rate": 3.564087919511193e-05, |
| "loss": 0.4534, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.465301478953356, |
| "grad_norm": 0.6004785281379483, |
| "learning_rate": 3.562103244205869e-05, |
| "loss": 0.4352, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.4675767918088738, |
| "grad_norm": 0.6718736260115368, |
| "learning_rate": 3.560114616370425e-05, |
| "loss": 0.4343, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.4698521046643913, |
| "grad_norm": 0.7520904707301957, |
| "learning_rate": 3.558122041036608e-05, |
| "loss": 0.5003, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.472127417519909, |
| "grad_norm": 0.589426691913399, |
| "learning_rate": 3.556125523246157e-05, |
| "loss": 0.3845, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.4744027303754266, |
| "grad_norm": 0.8524632940765311, |
| "learning_rate": 3.554125068050783e-05, |
| "loss": 0.4957, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.4766780432309443, |
| "grad_norm": 0.5164519133696035, |
| "learning_rate": 3.5521206805121626e-05, |
| "loss": 0.3878, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.4789533560864618, |
| "grad_norm": 0.6042089039732726, |
| "learning_rate": 3.550112365701921e-05, |
| "loss": 0.4325, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.4812286689419796, |
| "grad_norm": 0.5470513351715313, |
| "learning_rate": 3.548100128701619e-05, |
| "loss": 0.3819, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.483503981797497, |
| "grad_norm": 0.5931353822631468, |
| "learning_rate": 3.546083974602745e-05, |
| "loss": 0.4815, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.4857792946530148, |
| "grad_norm": 0.5390692476906889, |
| "learning_rate": 3.5440639085066944e-05, |
| "loss": 0.4074, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.4880546075085324, |
| "grad_norm": 0.5454314551786908, |
| "learning_rate": 3.542039935524765e-05, |
| "loss": 0.434, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.49032992036405, |
| "grad_norm": 0.6505407285781365, |
| "learning_rate": 3.540012060778137e-05, |
| "loss": 0.4386, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.4926052332195676, |
| "grad_norm": 0.5343082656896795, |
| "learning_rate": 3.537980289397866e-05, |
| "loss": 0.3717, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.4948805460750854, |
| "grad_norm": 0.641941733145819, |
| "learning_rate": 3.535944626524863e-05, |
| "loss": 0.4496, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.4971558589306029, |
| "grad_norm": 0.5002958448477494, |
| "learning_rate": 3.53390507730989e-05, |
| "loss": 0.439, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.4994311717861206, |
| "grad_norm": 0.7431898707356385, |
| "learning_rate": 3.53186164691354e-05, |
| "loss": 0.4425, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.5017064846416384, |
| "grad_norm": 0.5625161811682998, |
| "learning_rate": 3.529814340506226e-05, |
| "loss": 0.3554, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.5039817974971559, |
| "grad_norm": 0.7165022209644997, |
| "learning_rate": 3.52776316326817e-05, |
| "loss": 0.4264, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.5062571103526734, |
| "grad_norm": 0.5406684891638924, |
| "learning_rate": 3.525708120389387e-05, |
| "loss": 0.471, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.5085324232081911, |
| "grad_norm": 0.6878653332939912, |
| "learning_rate": 3.523649217069673e-05, |
| "loss": 0.4019, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.5108077360637089, |
| "grad_norm": 0.7435693445914054, |
| "learning_rate": 3.521586458518593e-05, |
| "loss": 0.5242, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.5130830489192264, |
| "grad_norm": 0.7244141769516639, |
| "learning_rate": 3.519519849955466e-05, |
| "loss": 0.3751, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.515358361774744, |
| "grad_norm": 0.9823209050438462, |
| "learning_rate": 3.517449396609353e-05, |
| "loss": 0.5272, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.5176336746302617, |
| "grad_norm": 0.9331506658748052, |
| "learning_rate": 3.515375103719042e-05, |
| "loss": 0.3914, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.5199089874857794, |
| "grad_norm": 0.9043246185269522, |
| "learning_rate": 3.5132969765330384e-05, |
| "loss": 0.5045, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.522184300341297, |
| "grad_norm": 0.8318586391712912, |
| "learning_rate": 3.5112150203095464e-05, |
| "loss": 0.4076, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.5244596131968144, |
| "grad_norm": 0.6027291468147227, |
| "learning_rate": 3.509129240316461e-05, |
| "loss": 0.4436, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.5267349260523322, |
| "grad_norm": 0.6823966178652825, |
| "learning_rate": 3.507039641831351e-05, |
| "loss": 0.43, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.52901023890785, |
| "grad_norm": 0.49401985184456687, |
| "learning_rate": 3.5049462301414485e-05, |
| "loss": 0.3709, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.5312855517633674, |
| "grad_norm": 0.7521950945695709, |
| "learning_rate": 3.502849010543633e-05, |
| "loss": 0.4482, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.533560864618885, |
| "grad_norm": 0.4865749920113331, |
| "learning_rate": 3.500747988344418e-05, |
| "loss": 0.3553, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.5358361774744027, |
| "grad_norm": 0.7833212324594446, |
| "learning_rate": 3.498643168859941e-05, |
| "loss": 0.4586, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.5381114903299204, |
| "grad_norm": 0.6615305501265996, |
| "learning_rate": 3.496534557415945e-05, |
| "loss": 0.4285, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.540386803185438, |
| "grad_norm": 0.9007038838265798, |
| "learning_rate": 3.49442215934777e-05, |
| "loss": 0.4671, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.5426621160409555, |
| "grad_norm": 0.6800624160939918, |
| "learning_rate": 3.492305980000336e-05, |
| "loss": 0.3994, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.5449374288964732, |
| "grad_norm": 0.7969418939760616, |
| "learning_rate": 3.49018602472813e-05, |
| "loss": 0.5462, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.547212741751991, |
| "grad_norm": 0.8878652763061456, |
| "learning_rate": 3.488062298895194e-05, |
| "loss": 0.4025, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.5494880546075085, |
| "grad_norm": 0.659068502289326, |
| "learning_rate": 3.4859348078751104e-05, |
| "loss": 0.4465, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.551763367463026, |
| "grad_norm": 1.0400850492606901, |
| "learning_rate": 3.483803557050989e-05, |
| "loss": 0.4568, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.5540386803185438, |
| "grad_norm": 0.5459464999028117, |
| "learning_rate": 3.481668551815451e-05, |
| "loss": 0.3927, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.5563139931740615, |
| "grad_norm": 0.9155796738843736, |
| "learning_rate": 3.47952979757062e-05, |
| "loss": 0.4053, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.558589306029579, |
| "grad_norm": 0.6377418061495952, |
| "learning_rate": 3.4773872997281026e-05, |
| "loss": 0.4688, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.5608646188850968, |
| "grad_norm": 0.7829625967353625, |
| "learning_rate": 3.47524106370898e-05, |
| "loss": 0.4194, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.5631399317406145, |
| "grad_norm": 0.5929924700099798, |
| "learning_rate": 3.473091094943791e-05, |
| "loss": 0.4175, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.565415244596132, |
| "grad_norm": 0.6712086411389321, |
| "learning_rate": 3.47093739887252e-05, |
| "loss": 0.4458, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.5676905574516495, |
| "grad_norm": 0.583321245931635, |
| "learning_rate": 3.468779980944581e-05, |
| "loss": 0.4219, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.5699658703071673, |
| "grad_norm": 0.6812907714793384, |
| "learning_rate": 3.466618846618806e-05, |
| "loss": 0.406, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.572241183162685, |
| "grad_norm": 0.5237917183630804, |
| "learning_rate": 3.4644540013634316e-05, |
| "loss": 0.3952, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.5745164960182025, |
| "grad_norm": 0.6517368564518353, |
| "learning_rate": 3.4622854506560815e-05, |
| "loss": 0.4399, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.57679180887372, |
| "grad_norm": 0.7673454679891567, |
| "learning_rate": 3.460113199983758e-05, |
| "loss": 0.4697, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.5790671217292378, |
| "grad_norm": 0.5095156725474326, |
| "learning_rate": 3.457937254842823e-05, |
| "loss": 0.4047, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.5813424345847555, |
| "grad_norm": 0.6908052191108716, |
| "learning_rate": 3.455757620738989e-05, |
| "loss": 0.4363, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.583617747440273, |
| "grad_norm": 0.4786305700260121, |
| "learning_rate": 3.453574303187298e-05, |
| "loss": 0.3975, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.5858930602957906, |
| "grad_norm": 0.7527935841962348, |
| "learning_rate": 3.451387307712117e-05, |
| "loss": 0.4569, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.5881683731513083, |
| "grad_norm": 0.550349132523598, |
| "learning_rate": 3.4491966398471175e-05, |
| "loss": 0.4999, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.590443686006826, |
| "grad_norm": 0.626223454214479, |
| "learning_rate": 3.447002305135261e-05, |
| "loss": 0.4665, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.5927189988623436, |
| "grad_norm": 0.44085264091799026, |
| "learning_rate": 3.444804309128789e-05, |
| "loss": 0.3904, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.594994311717861, |
| "grad_norm": 0.5320242737175317, |
| "learning_rate": 3.442602657389208e-05, |
| "loss": 0.4558, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.5972696245733788, |
| "grad_norm": 0.512740152485577, |
| "learning_rate": 3.440397355487272e-05, |
| "loss": 0.3967, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.5995449374288966, |
| "grad_norm": 0.586264919926901, |
| "learning_rate": 3.438188409002972e-05, |
| "loss": 0.4306, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.601820250284414, |
| "grad_norm": 0.5613994999872779, |
| "learning_rate": 3.435975823525523e-05, |
| "loss": 0.4682, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.6040955631399316, |
| "grad_norm": 0.48315507057673795, |
| "learning_rate": 3.4337596046533426e-05, |
| "loss": 0.3671, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.6063708759954494, |
| "grad_norm": 0.6819301484592192, |
| "learning_rate": 3.4315397579940466e-05, |
| "loss": 0.4571, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.608646188850967, |
| "grad_norm": 0.43527686529515514, |
| "learning_rate": 3.429316289164426e-05, |
| "loss": 0.4577, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.6109215017064846, |
| "grad_norm": 0.6082554343422415, |
| "learning_rate": 3.427089203790442e-05, |
| "loss": 0.4239, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.6131968145620021, |
| "grad_norm": 0.529694601924603, |
| "learning_rate": 3.424858507507202e-05, |
| "loss": 0.436, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.6154721274175199, |
| "grad_norm": 0.4767337527915039, |
| "learning_rate": 3.42262420595895e-05, |
| "loss": 0.3503, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.6177474402730376, |
| "grad_norm": 0.7631748467117545, |
| "learning_rate": 3.420386304799057e-05, |
| "loss": 0.4506, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.6200227531285551, |
| "grad_norm": 0.5475209507603683, |
| "learning_rate": 3.418144809689996e-05, |
| "loss": 0.3797, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.6222980659840727, |
| "grad_norm": 0.7219843622127368, |
| "learning_rate": 3.4158997263033384e-05, |
| "loss": 0.397, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.6245733788395904, |
| "grad_norm": 0.6229913232346951, |
| "learning_rate": 3.413651060319732e-05, |
| "loss": 0.4915, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.6268486916951082, |
| "grad_norm": 0.6116243374881276, |
| "learning_rate": 3.411398817428889e-05, |
| "loss": 0.4495, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.6291240045506257, |
| "grad_norm": 0.6874023779192933, |
| "learning_rate": 3.409143003329575e-05, |
| "loss": 0.4577, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.6313993174061432, |
| "grad_norm": 0.5331865010181484, |
| "learning_rate": 3.406883623729591e-05, |
| "loss": 0.389, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.633674630261661, |
| "grad_norm": 0.541208507385322, |
| "learning_rate": 3.4046206843457576e-05, |
| "loss": 0.425, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.6359499431171787, |
| "grad_norm": 0.5574749097907555, |
| "learning_rate": 3.4023541909039035e-05, |
| "loss": 0.4125, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.6382252559726962, |
| "grad_norm": 0.525379595046595, |
| "learning_rate": 3.400084149138851e-05, |
| "loss": 0.4011, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.640500568828214, |
| "grad_norm": 0.6349069920640864, |
| "learning_rate": 3.3978105647944e-05, |
| "loss": 0.5174, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.6427758816837317, |
| "grad_norm": 0.6791512167624842, |
| "learning_rate": 3.3955334436233145e-05, |
| "loss": 0.4595, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.6450511945392492, |
| "grad_norm": 0.5564269917656663, |
| "learning_rate": 3.393252791387306e-05, |
| "loss": 0.397, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.6473265073947667, |
| "grad_norm": 0.5391279772289733, |
| "learning_rate": 3.3909686138570226e-05, |
| "loss": 0.4144, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.6496018202502845, |
| "grad_norm": 0.6738091867356918, |
| "learning_rate": 3.388680916812031e-05, |
| "loss": 0.4434, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.6518771331058022, |
| "grad_norm": 0.6126156918760496, |
| "learning_rate": 3.3863897060408036e-05, |
| "loss": 0.4501, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.6541524459613197, |
| "grad_norm": 0.5590786991354324, |
| "learning_rate": 3.384094987340703e-05, |
| "loss": 0.4424, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.6564277588168372, |
| "grad_norm": 0.5726606756254216, |
| "learning_rate": 3.3817967665179687e-05, |
| "loss": 0.4023, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.658703071672355, |
| "grad_norm": 0.5308251504814927, |
| "learning_rate": 3.3794950493877014e-05, |
| "loss": 0.4373, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.6609783845278727, |
| "grad_norm": 0.48318908441400044, |
| "learning_rate": 3.377189841773848e-05, |
| "loss": 0.4133, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.6632536973833902, |
| "grad_norm": 0.6708760404159205, |
| "learning_rate": 3.374881149509186e-05, |
| "loss": 0.4377, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.6655290102389078, |
| "grad_norm": 0.5173893189818557, |
| "learning_rate": 3.372568978435312e-05, |
| "loss": 0.4582, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.6678043230944255, |
| "grad_norm": 0.5770282850563427, |
| "learning_rate": 3.370253334402624e-05, |
| "loss": 0.3747, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.6700796359499432, |
| "grad_norm": 0.6804621006166472, |
| "learning_rate": 3.367934223270308e-05, |
| "loss": 0.4032, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.6723549488054608, |
| "grad_norm": 0.5324344157986772, |
| "learning_rate": 3.365611650906321e-05, |
| "loss": 0.4575, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.6746302616609783, |
| "grad_norm": 0.7892336868643832, |
| "learning_rate": 3.3632856231873806e-05, |
| "loss": 0.4448, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.676905574516496, |
| "grad_norm": 0.4660395278953551, |
| "learning_rate": 3.3609561459989446e-05, |
| "loss": 0.3867, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.6791808873720138, |
| "grad_norm": 0.6220290351552069, |
| "learning_rate": 3.358623225235201e-05, |
| "loss": 0.3934, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.6814562002275313, |
| "grad_norm": 0.5003423448965053, |
| "learning_rate": 3.3562868667990487e-05, |
| "loss": 0.3917, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.6837315130830488, |
| "grad_norm": 0.5023405186553996, |
| "learning_rate": 3.353947076602088e-05, |
| "loss": 0.4664, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.6860068259385665, |
| "grad_norm": 0.4546077197981959, |
| "learning_rate": 3.3516038605645985e-05, |
| "loss": 0.432, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.6882821387940843, |
| "grad_norm": 0.5142825872996363, |
| "learning_rate": 3.349257224615532e-05, |
| "loss": 0.4528, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.6905574516496018, |
| "grad_norm": 0.5261812581068472, |
| "learning_rate": 3.346907174692491e-05, |
| "loss": 0.382, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.6928327645051193, |
| "grad_norm": 0.48306162195031704, |
| "learning_rate": 3.3445537167417165e-05, |
| "loss": 0.4824, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.695108077360637, |
| "grad_norm": 0.5253247800677288, |
| "learning_rate": 3.342196856718074e-05, |
| "loss": 0.4223, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.6973833902161548, |
| "grad_norm": 0.501305461897868, |
| "learning_rate": 3.339836600585036e-05, |
| "loss": 0.4359, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.6996587030716723, |
| "grad_norm": 0.4732093099233727, |
| "learning_rate": 3.3374729543146674e-05, |
| "loss": 0.3959, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.7019340159271898, |
| "grad_norm": 0.4478272616866317, |
| "learning_rate": 3.335105923887614e-05, |
| "loss": 0.4071, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.7042093287827076, |
| "grad_norm": 0.48865136328489367, |
| "learning_rate": 3.332735515293081e-05, |
| "loss": 0.3944, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.7064846416382253, |
| "grad_norm": 0.5259957045989833, |
| "learning_rate": 3.330361734528823e-05, |
| "loss": 0.4227, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.7087599544937428, |
| "grad_norm": 0.5497563708361985, |
| "learning_rate": 3.327984587601127e-05, |
| "loss": 0.4332, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.7110352673492604, |
| "grad_norm": 0.5843800718238974, |
| "learning_rate": 3.325604080524796e-05, |
| "loss": 0.4286, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.713310580204778, |
| "grad_norm": 0.45291954410142116, |
| "learning_rate": 3.323220219323137e-05, |
| "loss": 0.4476, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.7155858930602959, |
| "grad_norm": 0.5160193476732419, |
| "learning_rate": 3.3208330100279425e-05, |
| "loss": 0.3927, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.7178612059158134, |
| "grad_norm": 0.4931895088903224, |
| "learning_rate": 3.318442458679477e-05, |
| "loss": 0.4866, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.7201365187713311, |
| "grad_norm": 0.5236604180952761, |
| "learning_rate": 3.316048571326461e-05, |
| "loss": 0.3987, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.7224118316268489, |
| "grad_norm": 0.514426955191737, |
| "learning_rate": 3.313651354026057e-05, |
| "loss": 0.4476, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.7246871444823664, |
| "grad_norm": 0.5196465533309423, |
| "learning_rate": 3.311250812843851e-05, |
| "loss": 0.5314, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.726962457337884, |
| "grad_norm": 0.5443372341119862, |
| "learning_rate": 3.308846953853842e-05, |
| "loss": 0.4224, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.7292377701934016, |
| "grad_norm": 0.5250443676651882, |
| "learning_rate": 3.306439783138421e-05, |
| "loss": 0.4563, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.7315130830489194, |
| "grad_norm": 0.4465626870840488, |
| "learning_rate": 3.304029306788361e-05, |
| "loss": 0.4088, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.733788395904437, |
| "grad_norm": 0.6510435095336619, |
| "learning_rate": 3.3016155309027985e-05, |
| "loss": 0.4655, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.7360637087599544, |
| "grad_norm": 0.38834294904495403, |
| "learning_rate": 3.299198461589217e-05, |
| "loss": 0.4174, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.7383390216154722, |
| "grad_norm": 0.5264237824600192, |
| "learning_rate": 3.2967781049634356e-05, |
| "loss": 0.3874, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.74061433447099, |
| "grad_norm": 0.5879282657629377, |
| "learning_rate": 3.29435446714959e-05, |
| "loss": 0.4231, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.7428896473265074, |
| "grad_norm": 0.5393876135695119, |
| "learning_rate": 3.291927554280118e-05, |
| "loss": 0.4339, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.745164960182025, |
| "grad_norm": 0.49399211357173956, |
| "learning_rate": 3.289497372495744e-05, |
| "loss": 0.4163, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.7474402730375427, |
| "grad_norm": 0.47019460412562336, |
| "learning_rate": 3.2870639279454665e-05, |
| "loss": 0.4354, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.7497155858930604, |
| "grad_norm": 0.5685785048850405, |
| "learning_rate": 3.284627226786534e-05, |
| "loss": 0.4498, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.751990898748578, |
| "grad_norm": 0.4753852695722544, |
| "learning_rate": 3.28218727518444e-05, |
| "loss": 0.3912, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.7542662116040955, |
| "grad_norm": 0.562602566827557, |
| "learning_rate": 3.279744079312901e-05, |
| "loss": 0.4488, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.7565415244596132, |
| "grad_norm": 0.5094239312668742, |
| "learning_rate": 3.277297645353842e-05, |
| "loss": 0.3791, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.758816837315131, |
| "grad_norm": 0.620494439748899, |
| "learning_rate": 3.27484797949738e-05, |
| "loss": 0.3924, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.7610921501706485, |
| "grad_norm": 0.6628863050037046, |
| "learning_rate": 3.272395087941812e-05, |
| "loss": 0.4488, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.763367463026166, |
| "grad_norm": 0.637786898326395, |
| "learning_rate": 3.269938976893595e-05, |
| "loss": 0.4429, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.7656427758816837, |
| "grad_norm": 0.510130999535126, |
| "learning_rate": 3.267479652567334e-05, |
| "loss": 0.4216, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.7679180887372015, |
| "grad_norm": 0.5586834514509081, |
| "learning_rate": 3.2650171211857626e-05, |
| "loss": 0.4123, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.770193401592719, |
| "grad_norm": 0.5411046048656755, |
| "learning_rate": 3.2625513889797307e-05, |
| "loss": 0.4633, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.7724687144482365, |
| "grad_norm": 0.5448383040301249, |
| "learning_rate": 3.2600824621881846e-05, |
| "loss": 0.3844, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.7747440273037542, |
| "grad_norm": 0.589811003209115, |
| "learning_rate": 3.2576103470581564e-05, |
| "loss": 0.4563, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.777019340159272, |
| "grad_norm": 0.5584343787626713, |
| "learning_rate": 3.2551350498447446e-05, |
| "loss": 0.4303, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.7792946530147895, |
| "grad_norm": 0.4810202970686725, |
| "learning_rate": 3.252656576811099e-05, |
| "loss": 0.3706, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.781569965870307, |
| "grad_norm": 0.6156367306416939, |
| "learning_rate": 3.2501749342284044e-05, |
| "loss": 0.4231, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.7838452787258248, |
| "grad_norm": 0.5204945060263102, |
| "learning_rate": 3.247690128375867e-05, |
| "loss": 0.4114, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.7861205915813425, |
| "grad_norm": 0.7271145649976276, |
| "learning_rate": 3.245202165540697e-05, |
| "loss": 0.4466, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.78839590443686, |
| "grad_norm": 0.4763873903676693, |
| "learning_rate": 3.24271105201809e-05, |
| "loss": 0.4805, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.7906712172923775, |
| "grad_norm": 0.5144449537309139, |
| "learning_rate": 3.240216794111215e-05, |
| "loss": 0.374, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.7929465301478953, |
| "grad_norm": 0.4718447371813775, |
| "learning_rate": 3.237719398131198e-05, |
| "loss": 0.4204, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.795221843003413, |
| "grad_norm": 0.47782509060055806, |
| "learning_rate": 3.235218870397105e-05, |
| "loss": 0.4451, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.7974971558589306, |
| "grad_norm": 0.5025689132907557, |
| "learning_rate": 3.232715217235927e-05, |
| "loss": 0.4412, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.799772468714448, |
| "grad_norm": 0.4952509085855213, |
| "learning_rate": 3.2302084449825595e-05, |
| "loss": 0.4193, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.802047781569966, |
| "grad_norm": 0.4720298981099759, |
| "learning_rate": 3.227698559979794e-05, |
| "loss": 0.3944, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.8043230944254836, |
| "grad_norm": 0.6208004955993673, |
| "learning_rate": 3.225185568578295e-05, |
| "loss": 0.4374, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.806598407281001, |
| "grad_norm": 0.43207309687869283, |
| "learning_rate": 3.2226694771365906e-05, |
| "loss": 0.3812, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.8088737201365188, |
| "grad_norm": 0.6654873071833073, |
| "learning_rate": 3.220150292021049e-05, |
| "loss": 0.4411, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.8111490329920366, |
| "grad_norm": 0.5760081189256475, |
| "learning_rate": 3.217628019605869e-05, |
| "loss": 0.4424, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.813424345847554, |
| "grad_norm": 0.5018027239654591, |
| "learning_rate": 3.21510266627306e-05, |
| "loss": 0.4175, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.8156996587030716, |
| "grad_norm": 0.539232908897014, |
| "learning_rate": 3.212574238412427e-05, |
| "loss": 0.443, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.8179749715585893, |
| "grad_norm": 0.8883330790380443, |
| "learning_rate": 3.2100427424215536e-05, |
| "loss": 0.451, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.820250284414107, |
| "grad_norm": 0.4607329856252554, |
| "learning_rate": 3.2075081847057886e-05, |
| "loss": 0.4077, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.8225255972696246, |
| "grad_norm": 0.45881098461656095, |
| "learning_rate": 3.204970571678225e-05, |
| "loss": 0.3898, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.8248009101251421, |
| "grad_norm": 0.4950046026315809, |
| "learning_rate": 3.20242990975969e-05, |
| "loss": 0.4913, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.8270762229806599, |
| "grad_norm": 0.4284019412488292, |
| "learning_rate": 3.1998862053787214e-05, |
| "loss": 0.3605, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.8293515358361776, |
| "grad_norm": 0.523837409188779, |
| "learning_rate": 3.197339464971558e-05, |
| "loss": 0.4482, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.8316268486916951, |
| "grad_norm": 0.49142234581824934, |
| "learning_rate": 3.194789694982119e-05, |
| "loss": 0.4118, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.8339021615472126, |
| "grad_norm": 0.49685520725633503, |
| "learning_rate": 3.192236901861992e-05, |
| "loss": 0.4309, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.8361774744027304, |
| "grad_norm": 0.5783313475386348, |
| "learning_rate": 3.1896810920704095e-05, |
| "loss": 0.4543, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.8384527872582481, |
| "grad_norm": 0.4663307446637632, |
| "learning_rate": 3.18712227207424e-05, |
| "loss": 0.3722, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.8407281001137656, |
| "grad_norm": 0.5581189544180488, |
| "learning_rate": 3.184560448347969e-05, |
| "loss": 0.4143, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.8430034129692832, |
| "grad_norm": 0.5691345254440217, |
| "learning_rate": 3.181995627373679e-05, |
| "loss": 0.4441, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.845278725824801, |
| "grad_norm": 0.5066854792282622, |
| "learning_rate": 3.179427815641041e-05, |
| "loss": 0.3638, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.8475540386803186, |
| "grad_norm": 0.7011362779760515, |
| "learning_rate": 3.176857019647289e-05, |
| "loss": 0.4866, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.8498293515358362, |
| "grad_norm": 0.6094416012430611, |
| "learning_rate": 3.17428324589721e-05, |
| "loss": 0.4246, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.8521046643913537, |
| "grad_norm": 0.48464772590754135, |
| "learning_rate": 3.171706500903126e-05, |
| "loss": 0.4035, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.8543799772468714, |
| "grad_norm": 0.5786053028777935, |
| "learning_rate": 3.1691267911848765e-05, |
| "loss": 0.3657, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.8566552901023892, |
| "grad_norm": 0.5861507724160404, |
| "learning_rate": 3.166544123269801e-05, |
| "loss": 0.4289, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.8589306029579067, |
| "grad_norm": 0.6029563372781788, |
| "learning_rate": 3.163958503692727e-05, |
| "loss": 0.4721, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.8612059158134242, |
| "grad_norm": 0.6129610821268773, |
| "learning_rate": 3.1613699389959484e-05, |
| "loss": 0.3952, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.863481228668942, |
| "grad_norm": 0.7662158217676397, |
| "learning_rate": 3.158778435729211e-05, |
| "loss": 0.4303, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.8657565415244597, |
| "grad_norm": 0.5796796368147477, |
| "learning_rate": 3.156184000449697e-05, |
| "loss": 0.4499, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.8680318543799772, |
| "grad_norm": 0.7996782495584651, |
| "learning_rate": 3.153586639722006e-05, |
| "loss": 0.4245, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.8703071672354947, |
| "grad_norm": 0.43525007544954886, |
| "learning_rate": 3.1509863601181424e-05, |
| "loss": 0.3829, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.8725824800910125, |
| "grad_norm": 0.7527664756780577, |
| "learning_rate": 3.1483831682174914e-05, |
| "loss": 0.4542, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.8748577929465302, |
| "grad_norm": 0.46604172701732616, |
| "learning_rate": 3.1457770706068125e-05, |
| "loss": 0.4255, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.8771331058020477, |
| "grad_norm": 0.621671324916588, |
| "learning_rate": 3.143168073880214e-05, |
| "loss": 0.4562, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.8794084186575652, |
| "grad_norm": 0.5812533736282571, |
| "learning_rate": 3.14055618463914e-05, |
| "loss": 0.4408, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.8816837315130832, |
| "grad_norm": 0.48593039499357055, |
| "learning_rate": 3.1379414094923545e-05, |
| "loss": 0.3911, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.8839590443686007, |
| "grad_norm": 0.5222388909677681, |
| "learning_rate": 3.135323755055923e-05, |
| "loss": 0.4699, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.8862343572241183, |
| "grad_norm": 0.6048601049513829, |
| "learning_rate": 3.1327032279531966e-05, |
| "loss": 0.4389, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.888509670079636, |
| "grad_norm": 0.49919715491157246, |
| "learning_rate": 3.1300798348147954e-05, |
| "loss": 0.4262, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.8907849829351537, |
| "grad_norm": 0.6678286159773057, |
| "learning_rate": 3.127453582278589e-05, |
| "loss": 0.4593, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.8930602957906713, |
| "grad_norm": 0.5906564126737536, |
| "learning_rate": 3.124824476989686e-05, |
| "loss": 0.3977, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.8953356086461888, |
| "grad_norm": 0.5832034135440843, |
| "learning_rate": 3.122192525600409e-05, |
| "loss": 0.463, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.8976109215017065, |
| "grad_norm": 0.7135832527092052, |
| "learning_rate": 3.119557734770285e-05, |
| "loss": 0.3795, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.8998862343572243, |
| "grad_norm": 0.5621917499754232, |
| "learning_rate": 3.116920111166025e-05, |
| "loss": 0.4112, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.9021615472127418, |
| "grad_norm": 0.7181751851282073, |
| "learning_rate": 3.114279661461506e-05, |
| "loss": 0.3811, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.9044368600682593, |
| "grad_norm": 0.6743148704065208, |
| "learning_rate": 3.1116363923377576e-05, |
| "loss": 0.447, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.906712172923777, |
| "grad_norm": 0.7349281938472108, |
| "learning_rate": 3.108990310482943e-05, |
| "loss": 0.4133, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.9089874857792948, |
| "grad_norm": 0.6436988476892611, |
| "learning_rate": 3.1063414225923416e-05, |
| "loss": 0.4606, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.9112627986348123, |
| "grad_norm": 1.0022956970748371, |
| "learning_rate": 3.103689735368333e-05, |
| "loss": 0.4597, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.9135381114903298, |
| "grad_norm": 0.5116706876398635, |
| "learning_rate": 3.10103525552038e-05, |
| "loss": 0.4274, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.9158134243458476, |
| "grad_norm": 0.8062209245544874, |
| "learning_rate": 3.098377989765011e-05, |
| "loss": 0.3849, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.9180887372013653, |
| "grad_norm": 0.5590307923149567, |
| "learning_rate": 3.0957179448258053e-05, |
| "loss": 0.4467, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.9203640500568828, |
| "grad_norm": 0.7522537029457251, |
| "learning_rate": 3.0930551274333715e-05, |
| "loss": 0.4432, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.9226393629124003, |
| "grad_norm": 0.5051335750383762, |
| "learning_rate": 3.090389544325335e-05, |
| "loss": 0.3911, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.924914675767918, |
| "grad_norm": 0.7913606783246964, |
| "learning_rate": 3.087721202246321e-05, |
| "loss": 0.4444, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.9271899886234358, |
| "grad_norm": 0.4753111648665716, |
| "learning_rate": 3.08505010794793e-05, |
| "loss": 0.4163, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.9294653014789533, |
| "grad_norm": 0.7035924858849998, |
| "learning_rate": 3.082376268188731e-05, |
| "loss": 0.4377, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.9317406143344709, |
| "grad_norm": 0.5117975652669959, |
| "learning_rate": 3.0796996897342394e-05, |
| "loss": 0.4447, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.9340159271899886, |
| "grad_norm": 0.5513898035344333, |
| "learning_rate": 3.0770203793568994e-05, |
| "loss": 0.3837, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.9362912400455063, |
| "grad_norm": 0.4764564187002147, |
| "learning_rate": 3.0743383438360666e-05, |
| "loss": 0.4389, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.9385665529010239, |
| "grad_norm": 0.5121515231941229, |
| "learning_rate": 3.0716535899579936e-05, |
| "loss": 0.417, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.9408418657565414, |
| "grad_norm": 0.7254375435748747, |
| "learning_rate": 3.06896612451581e-05, |
| "loss": 0.4904, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.9431171786120591, |
| "grad_norm": 0.5312185931920812, |
| "learning_rate": 3.066275954309507e-05, |
| "loss": 0.4833, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.9453924914675769, |
| "grad_norm": 0.3906691207947801, |
| "learning_rate": 3.0635830861459204e-05, |
| "loss": 0.4172, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.9476678043230944, |
| "grad_norm": 0.5490567237256371, |
| "learning_rate": 3.06088752683871e-05, |
| "loss": 0.4367, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.949943117178612, |
| "grad_norm": 0.48830649429397904, |
| "learning_rate": 3.0581892832083484e-05, |
| "loss": 0.4098, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.9522184300341296, |
| "grad_norm": 0.5668417266386482, |
| "learning_rate": 3.0554883620820955e-05, |
| "loss": 0.4232, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.9544937428896474, |
| "grad_norm": 0.5490790985251669, |
| "learning_rate": 3.0527847702939915e-05, |
| "loss": 0.4491, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.956769055745165, |
| "grad_norm": 0.6622999931743726, |
| "learning_rate": 3.0500785146848303e-05, |
| "loss": 0.486, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.9590443686006824, |
| "grad_norm": 0.5954178521700658, |
| "learning_rate": 3.0473696021021466e-05, |
| "loss": 0.4428, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.9613196814562004, |
| "grad_norm": 0.5210378657968507, |
| "learning_rate": 3.0446580394001986e-05, |
| "loss": 0.3706, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.963594994311718, |
| "grad_norm": 0.47230222545638934, |
| "learning_rate": 3.04194383343995e-05, |
| "loss": 0.4372, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.9658703071672354, |
| "grad_norm": 0.550474067886411, |
| "learning_rate": 3.0392269910890523e-05, |
| "loss": 0.4556, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.9681456200227532, |
| "grad_norm": 0.5540535643108356, |
| "learning_rate": 3.0365075192218278e-05, |
| "loss": 0.4529, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.970420932878271, |
| "grad_norm": 0.4896945623597814, |
| "learning_rate": 3.033785424719252e-05, |
| "loss": 0.3787, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.9726962457337884, |
| "grad_norm": 0.5500626404394998, |
| "learning_rate": 3.0310607144689372e-05, |
| "loss": 0.4299, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.974971558589306, |
| "grad_norm": 0.5546330084531744, |
| "learning_rate": 3.0283333953651124e-05, |
| "loss": 0.4132, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.9772468714448237, |
| "grad_norm": 0.6542031906231642, |
| "learning_rate": 3.0256034743086103e-05, |
| "loss": 0.461, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.9795221843003414, |
| "grad_norm": 0.46270201346529116, |
| "learning_rate": 3.022870958206845e-05, |
| "loss": 0.366, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.981797497155859, |
| "grad_norm": 0.6868482518027066, |
| "learning_rate": 3.0201358539737976e-05, |
| "loss": 0.4794, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.9840728100113765, |
| "grad_norm": 0.48697359161478826, |
| "learning_rate": 3.017398168529997e-05, |
| "loss": 0.3659, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.9863481228668942, |
| "grad_norm": 0.7213911255238944, |
| "learning_rate": 3.0146579088025053e-05, |
| "loss": 0.4517, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.988623435722412, |
| "grad_norm": 0.5315352661478399, |
| "learning_rate": 3.0119150817248966e-05, |
| "loss": 0.459, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.9908987485779295, |
| "grad_norm": 0.5241257258878973, |
| "learning_rate": 3.0091696942372412e-05, |
| "loss": 0.4103, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.993174061433447, |
| "grad_norm": 0.5019935569702207, |
| "learning_rate": 3.006421753286088e-05, |
| "loss": 0.4114, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.9954493742889647, |
| "grad_norm": 0.5572188267356125, |
| "learning_rate": 3.0036712658244475e-05, |
| "loss": 0.4606, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.9977246871444825, |
| "grad_norm": 0.4729138466778679, |
| "learning_rate": 3.0009182388117718e-05, |
| "loss": 0.4495, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.8733140039301539, |
| "learning_rate": 2.9981626792139414e-05, |
| "loss": 0.6445, |
| "step": 879 |
| }, |
| { |
| "epoch": 2.0022753128555175, |
| "grad_norm": 0.7126446895469605, |
| "learning_rate": 2.9954045940032423e-05, |
| "loss": 0.3322, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.0045506257110355, |
| "grad_norm": 0.5799465206761989, |
| "learning_rate": 2.9926439901583524e-05, |
| "loss": 0.3348, |
| "step": 881 |
| }, |
| { |
| "epoch": 2.006825938566553, |
| "grad_norm": 0.7521511832974017, |
| "learning_rate": 2.9898808746643223e-05, |
| "loss": 0.3437, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.0091012514220705, |
| "grad_norm": 0.634315449511539, |
| "learning_rate": 2.987115254512557e-05, |
| "loss": 0.3449, |
| "step": 883 |
| }, |
| { |
| "epoch": 2.011376564277588, |
| "grad_norm": 0.6844751831529167, |
| "learning_rate": 2.9843471367008003e-05, |
| "loss": 0.3331, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.013651877133106, |
| "grad_norm": 0.4814286536478195, |
| "learning_rate": 2.981576528233114e-05, |
| "loss": 0.3264, |
| "step": 885 |
| }, |
| { |
| "epoch": 2.0159271899886235, |
| "grad_norm": 0.5467934829900717, |
| "learning_rate": 2.9788034361198633e-05, |
| "loss": 0.2875, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.018202502844141, |
| "grad_norm": 0.5431794266712123, |
| "learning_rate": 2.976027867377697e-05, |
| "loss": 0.3433, |
| "step": 887 |
| }, |
| { |
| "epoch": 2.0204778156996586, |
| "grad_norm": 0.5683506352212406, |
| "learning_rate": 2.9732498290295296e-05, |
| "loss": 0.3436, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.0227531285551765, |
| "grad_norm": 0.5409437069222102, |
| "learning_rate": 2.9704693281045272e-05, |
| "loss": 0.348, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.025028441410694, |
| "grad_norm": 0.5595974396490498, |
| "learning_rate": 2.9676863716380845e-05, |
| "loss": 0.3002, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.0273037542662116, |
| "grad_norm": 0.5072666611966352, |
| "learning_rate": 2.9649009666718086e-05, |
| "loss": 0.3334, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.029579067121729, |
| "grad_norm": 0.44330222985587886, |
| "learning_rate": 2.9621131202535042e-05, |
| "loss": 0.308, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.031854379977247, |
| "grad_norm": 0.5306386567420092, |
| "learning_rate": 2.9593228394371527e-05, |
| "loss": 0.3104, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.0341296928327646, |
| "grad_norm": 0.4019692515169545, |
| "learning_rate": 2.9565301312828945e-05, |
| "loss": 0.3237, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.036405005688282, |
| "grad_norm": 0.5227451720787013, |
| "learning_rate": 2.9537350028570126e-05, |
| "loss": 0.3499, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.0386803185437996, |
| "grad_norm": 0.42757714521426105, |
| "learning_rate": 2.950937461231913e-05, |
| "loss": 0.3273, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.0409556313993176, |
| "grad_norm": 0.47745497100808654, |
| "learning_rate": 2.948137513486109e-05, |
| "loss": 0.3053, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.043230944254835, |
| "grad_norm": 0.5325164876648694, |
| "learning_rate": 2.9453351667042008e-05, |
| "loss": 0.3448, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.0455062571103526, |
| "grad_norm": 0.47210366776632884, |
| "learning_rate": 2.94253042797686e-05, |
| "loss": 0.293, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.04778156996587, |
| "grad_norm": 0.466854126612842, |
| "learning_rate": 2.9397233044008092e-05, |
| "loss": 0.2955, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.050056882821388, |
| "grad_norm": 0.5005964391349274, |
| "learning_rate": 2.936913803078806e-05, |
| "loss": 0.3343, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.0523321956769056, |
| "grad_norm": 0.4651126962723272, |
| "learning_rate": 2.9341019311196247e-05, |
| "loss": 0.3216, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.054607508532423, |
| "grad_norm": 0.5278052326421759, |
| "learning_rate": 2.931287695638037e-05, |
| "loss": 0.3174, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.0568828213879407, |
| "grad_norm": 0.4055235656855877, |
| "learning_rate": 2.9284711037547945e-05, |
| "loss": 0.3232, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.0591581342434586, |
| "grad_norm": 0.557342098786895, |
| "learning_rate": 2.925652162596613e-05, |
| "loss": 0.3338, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.061433447098976, |
| "grad_norm": 0.4535620497514134, |
| "learning_rate": 2.9228308792961523e-05, |
| "loss": 0.3455, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.0637087599544937, |
| "grad_norm": 0.509067194030193, |
| "learning_rate": 2.9200072609919957e-05, |
| "loss": 0.3469, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.065984072810011, |
| "grad_norm": 0.446324127596061, |
| "learning_rate": 2.917181314828638e-05, |
| "loss": 0.3484, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.068259385665529, |
| "grad_norm": 0.5670420846161346, |
| "learning_rate": 2.914353047956463e-05, |
| "loss": 0.3565, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.0705346985210467, |
| "grad_norm": 0.48304097423967896, |
| "learning_rate": 2.9115224675317252e-05, |
| "loss": 0.3396, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.072810011376564, |
| "grad_norm": 0.4596146351472507, |
| "learning_rate": 2.908689580716534e-05, |
| "loss": 0.3507, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.0750853242320817, |
| "grad_norm": 0.5183922247868473, |
| "learning_rate": 2.9058543946788357e-05, |
| "loss": 0.3166, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.0773606370875997, |
| "grad_norm": 0.6011178626020073, |
| "learning_rate": 2.9030169165923926e-05, |
| "loss": 0.3329, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.079635949943117, |
| "grad_norm": 0.4313193879440631, |
| "learning_rate": 2.9001771536367667e-05, |
| "loss": 0.2962, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.0819112627986347, |
| "grad_norm": 0.5766905270202347, |
| "learning_rate": 2.897335112997302e-05, |
| "loss": 0.3343, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.0841865756541527, |
| "grad_norm": 0.4304243154040951, |
| "learning_rate": 2.8944908018651055e-05, |
| "loss": 0.3284, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.08646188850967, |
| "grad_norm": 0.534020213474336, |
| "learning_rate": 2.8916442274370283e-05, |
| "loss": 0.332, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.0887372013651877, |
| "grad_norm": 0.4993577826939032, |
| "learning_rate": 2.88879539691565e-05, |
| "loss": 0.3085, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.091012514220705, |
| "grad_norm": 0.4339374501373621, |
| "learning_rate": 2.8859443175092553e-05, |
| "loss": 0.3409, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.093287827076223, |
| "grad_norm": 0.4476486984150351, |
| "learning_rate": 2.8830909964318242e-05, |
| "loss": 0.3524, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.0955631399317407, |
| "grad_norm": 0.4915992580427168, |
| "learning_rate": 2.880235440903004e-05, |
| "loss": 0.3565, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.0978384527872582, |
| "grad_norm": 0.4347367058608959, |
| "learning_rate": 2.8773776581480986e-05, |
| "loss": 0.346, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.1001137656427757, |
| "grad_norm": 0.4023685534536312, |
| "learning_rate": 2.8745176553980454e-05, |
| "loss": 0.3116, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.1023890784982937, |
| "grad_norm": 0.48203785089108603, |
| "learning_rate": 2.8716554398894015e-05, |
| "loss": 0.3392, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.1046643913538112, |
| "grad_norm": 0.40290034314805556, |
| "learning_rate": 2.868791018864321e-05, |
| "loss": 0.3332, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.1069397042093287, |
| "grad_norm": 0.48397626782125486, |
| "learning_rate": 2.865924399570538e-05, |
| "loss": 0.3565, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.1092150170648463, |
| "grad_norm": 0.4737904462011868, |
| "learning_rate": 2.86305558926135e-05, |
| "loss": 0.3649, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.1114903299203642, |
| "grad_norm": 0.4571656530020779, |
| "learning_rate": 2.860184595195599e-05, |
| "loss": 0.3334, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.1137656427758817, |
| "grad_norm": 0.5089308015525444, |
| "learning_rate": 2.8573114246376508e-05, |
| "loss": 0.3304, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.1160409556313993, |
| "grad_norm": 0.516422270099582, |
| "learning_rate": 2.854436084857379e-05, |
| "loss": 0.3197, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.118316268486917, |
| "grad_norm": 0.42936517494553617, |
| "learning_rate": 2.8515585831301456e-05, |
| "loss": 0.3103, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.1205915813424348, |
| "grad_norm": 0.4208030605335549, |
| "learning_rate": 2.848678926736784e-05, |
| "loss": 0.2928, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.1228668941979523, |
| "grad_norm": 0.40310708599010403, |
| "learning_rate": 2.845797122963578e-05, |
| "loss": 0.296, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.12514220705347, |
| "grad_norm": 0.3926937116571276, |
| "learning_rate": 2.8429131791022467e-05, |
| "loss": 0.2983, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.1274175199089873, |
| "grad_norm": 0.4675033572182952, |
| "learning_rate": 2.8400271024499212e-05, |
| "loss": 0.3568, |
| "step": 935 |
| }, |
| { |
| "epoch": 2.1296928327645053, |
| "grad_norm": 0.3828039852443616, |
| "learning_rate": 2.837138900309133e-05, |
| "loss": 0.3506, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.131968145620023, |
| "grad_norm": 0.47258551556235256, |
| "learning_rate": 2.834248579987787e-05, |
| "loss": 0.3265, |
| "step": 937 |
| }, |
| { |
| "epoch": 2.1342434584755403, |
| "grad_norm": 0.4112834564475498, |
| "learning_rate": 2.8313561487991527e-05, |
| "loss": 0.3123, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.136518771331058, |
| "grad_norm": 0.4382940323504779, |
| "learning_rate": 2.8284616140618366e-05, |
| "loss": 0.3205, |
| "step": 939 |
| }, |
| { |
| "epoch": 2.138794084186576, |
| "grad_norm": 0.4005806317738167, |
| "learning_rate": 2.8255649830997704e-05, |
| "loss": 0.3504, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.1410693970420933, |
| "grad_norm": 0.44219833189801894, |
| "learning_rate": 2.8226662632421874e-05, |
| "loss": 0.3387, |
| "step": 941 |
| }, |
| { |
| "epoch": 2.143344709897611, |
| "grad_norm": 0.39306548433110394, |
| "learning_rate": 2.8197654618236084e-05, |
| "loss": 0.3191, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.1456200227531284, |
| "grad_norm": 0.43320014160920517, |
| "learning_rate": 2.8168625861838204e-05, |
| "loss": 0.3314, |
| "step": 943 |
| }, |
| { |
| "epoch": 2.1478953356086463, |
| "grad_norm": 0.4462810447120457, |
| "learning_rate": 2.813957643667859e-05, |
| "loss": 0.3615, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.150170648464164, |
| "grad_norm": 0.5306187676101278, |
| "learning_rate": 2.811050641625988e-05, |
| "loss": 0.3316, |
| "step": 945 |
| }, |
| { |
| "epoch": 2.1524459613196814, |
| "grad_norm": 0.42718488947869915, |
| "learning_rate": 2.8081415874136846e-05, |
| "loss": 0.3358, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.1547212741751993, |
| "grad_norm": 0.4008754463916256, |
| "learning_rate": 2.805230488391617e-05, |
| "loss": 0.3516, |
| "step": 947 |
| }, |
| { |
| "epoch": 2.156996587030717, |
| "grad_norm": 0.46697523347894887, |
| "learning_rate": 2.8023173519256278e-05, |
| "loss": 0.3283, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.1592718998862344, |
| "grad_norm": 0.4814370655105727, |
| "learning_rate": 2.799402185386715e-05, |
| "loss": 0.348, |
| "step": 949 |
| }, |
| { |
| "epoch": 2.161547212741752, |
| "grad_norm": 0.40967124598455246, |
| "learning_rate": 2.796484996151013e-05, |
| "loss": 0.3429, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.1638225255972694, |
| "grad_norm": 0.5135630313746052, |
| "learning_rate": 2.7935657915997746e-05, |
| "loss": 0.3305, |
| "step": 951 |
| }, |
| { |
| "epoch": 2.1660978384527874, |
| "grad_norm": 0.3787931785217832, |
| "learning_rate": 2.7906445791193504e-05, |
| "loss": 0.3069, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.168373151308305, |
| "grad_norm": 0.4569615318817335, |
| "learning_rate": 2.787721366101173e-05, |
| "loss": 0.3118, |
| "step": 953 |
| }, |
| { |
| "epoch": 2.1706484641638224, |
| "grad_norm": 0.4481188682798795, |
| "learning_rate": 2.7847961599417375e-05, |
| "loss": 0.3252, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.1729237770193404, |
| "grad_norm": 0.48221819740301913, |
| "learning_rate": 2.7818689680425807e-05, |
| "loss": 0.3387, |
| "step": 955 |
| }, |
| { |
| "epoch": 2.175199089874858, |
| "grad_norm": 0.48264529866841904, |
| "learning_rate": 2.7789397978102643e-05, |
| "loss": 0.3377, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.1774744027303754, |
| "grad_norm": 0.41290712374130284, |
| "learning_rate": 2.7760086566563547e-05, |
| "loss": 0.3895, |
| "step": 957 |
| }, |
| { |
| "epoch": 2.179749715585893, |
| "grad_norm": 0.45132639708493355, |
| "learning_rate": 2.773075551997408e-05, |
| "loss": 0.3398, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.182025028441411, |
| "grad_norm": 0.4444127278026595, |
| "learning_rate": 2.7701404912549465e-05, |
| "loss": 0.3029, |
| "step": 959 |
| }, |
| { |
| "epoch": 2.1843003412969284, |
| "grad_norm": 0.4282293855511231, |
| "learning_rate": 2.767203481855441e-05, |
| "loss": 0.3293, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.186575654152446, |
| "grad_norm": 0.4222918245129199, |
| "learning_rate": 2.764264531230295e-05, |
| "loss": 0.3321, |
| "step": 961 |
| }, |
| { |
| "epoch": 2.1888509670079634, |
| "grad_norm": 0.4651754035829156, |
| "learning_rate": 2.7613236468158227e-05, |
| "loss": 0.3412, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.1911262798634814, |
| "grad_norm": 0.4429753682559861, |
| "learning_rate": 2.758380836053232e-05, |
| "loss": 0.334, |
| "step": 963 |
| }, |
| { |
| "epoch": 2.193401592718999, |
| "grad_norm": 0.4506792671473799, |
| "learning_rate": 2.7554361063886042e-05, |
| "loss": 0.3802, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.1956769055745164, |
| "grad_norm": 0.402698452950612, |
| "learning_rate": 2.7524894652728754e-05, |
| "loss": 0.3245, |
| "step": 965 |
| }, |
| { |
| "epoch": 2.197952218430034, |
| "grad_norm": 0.4514698831658316, |
| "learning_rate": 2.7495409201618204e-05, |
| "loss": 0.3699, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.200227531285552, |
| "grad_norm": 0.44071254548311, |
| "learning_rate": 2.7465904785160286e-05, |
| "loss": 0.3095, |
| "step": 967 |
| }, |
| { |
| "epoch": 2.2025028441410694, |
| "grad_norm": 0.3974526433290124, |
| "learning_rate": 2.743638147800891e-05, |
| "loss": 0.3238, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.204778156996587, |
| "grad_norm": 0.47712259703662935, |
| "learning_rate": 2.7406839354865768e-05, |
| "loss": 0.3298, |
| "step": 969 |
| }, |
| { |
| "epoch": 2.2070534698521045, |
| "grad_norm": 0.46205936519251023, |
| "learning_rate": 2.7377278490480157e-05, |
| "loss": 0.3389, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.2093287827076225, |
| "grad_norm": 0.47965062020649035, |
| "learning_rate": 2.7347698959648817e-05, |
| "loss": 0.354, |
| "step": 971 |
| }, |
| { |
| "epoch": 2.21160409556314, |
| "grad_norm": 0.44557008156382216, |
| "learning_rate": 2.7318100837215688e-05, |
| "loss": 0.3425, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.2138794084186575, |
| "grad_norm": 0.4269333534718295, |
| "learning_rate": 2.7288484198071778e-05, |
| "loss": 0.349, |
| "step": 973 |
| }, |
| { |
| "epoch": 2.216154721274175, |
| "grad_norm": 0.38466010268113765, |
| "learning_rate": 2.7258849117154934e-05, |
| "loss": 0.3247, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.218430034129693, |
| "grad_norm": 0.5246724481013021, |
| "learning_rate": 2.7229195669449667e-05, |
| "loss": 0.3296, |
| "step": 975 |
| }, |
| { |
| "epoch": 2.2207053469852105, |
| "grad_norm": 0.4336227929999983, |
| "learning_rate": 2.7199523929986972e-05, |
| "loss": 0.3245, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.222980659840728, |
| "grad_norm": 0.4608049394537325, |
| "learning_rate": 2.7169833973844095e-05, |
| "loss": 0.3348, |
| "step": 977 |
| }, |
| { |
| "epoch": 2.2252559726962455, |
| "grad_norm": 0.4335780434069244, |
| "learning_rate": 2.7140125876144416e-05, |
| "loss": 0.3621, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.2275312855517635, |
| "grad_norm": 0.42545851610183605, |
| "learning_rate": 2.7110399712057195e-05, |
| "loss": 0.3233, |
| "step": 979 |
| }, |
| { |
| "epoch": 2.229806598407281, |
| "grad_norm": 0.41656534471160633, |
| "learning_rate": 2.7080655556797406e-05, |
| "loss": 0.3259, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.2320819112627985, |
| "grad_norm": 0.5495549115860415, |
| "learning_rate": 2.7050893485625547e-05, |
| "loss": 0.3298, |
| "step": 981 |
| }, |
| { |
| "epoch": 2.234357224118316, |
| "grad_norm": 0.338719254100158, |
| "learning_rate": 2.7021113573847454e-05, |
| "loss": 0.3127, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.236632536973834, |
| "grad_norm": 0.575628509160119, |
| "learning_rate": 2.6991315896814098e-05, |
| "loss": 0.3127, |
| "step": 983 |
| }, |
| { |
| "epoch": 2.2389078498293515, |
| "grad_norm": 0.4018107807328143, |
| "learning_rate": 2.6961500529921395e-05, |
| "loss": 0.3189, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.241183162684869, |
| "grad_norm": 0.5309139349913046, |
| "learning_rate": 2.693166754861003e-05, |
| "loss": 0.4735, |
| "step": 985 |
| }, |
| { |
| "epoch": 2.243458475540387, |
| "grad_norm": 0.40738353010271605, |
| "learning_rate": 2.6901817028365263e-05, |
| "loss": 0.3144, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.2457337883959045, |
| "grad_norm": 0.49702330123818667, |
| "learning_rate": 2.6871949044716723e-05, |
| "loss": 0.3382, |
| "step": 987 |
| }, |
| { |
| "epoch": 2.248009101251422, |
| "grad_norm": 0.47987163769127383, |
| "learning_rate": 2.6842063673238223e-05, |
| "loss": 0.3334, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.2502844141069396, |
| "grad_norm": 0.45620904242825305, |
| "learning_rate": 2.681216098954757e-05, |
| "loss": 0.3373, |
| "step": 989 |
| }, |
| { |
| "epoch": 2.252559726962457, |
| "grad_norm": 0.44644654191757066, |
| "learning_rate": 2.6782241069306395e-05, |
| "loss": 0.3163, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.254835039817975, |
| "grad_norm": 0.4068960377449261, |
| "learning_rate": 2.6752303988219915e-05, |
| "loss": 0.3197, |
| "step": 991 |
| }, |
| { |
| "epoch": 2.2571103526734926, |
| "grad_norm": 0.45285913624947255, |
| "learning_rate": 2.6722349822036796e-05, |
| "loss": 0.3497, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.25938566552901, |
| "grad_norm": 0.40760134014949057, |
| "learning_rate": 2.6692378646548904e-05, |
| "loss": 0.3426, |
| "step": 993 |
| }, |
| { |
| "epoch": 2.261660978384528, |
| "grad_norm": 0.38386299423561715, |
| "learning_rate": 2.6662390537591175e-05, |
| "loss": 0.3494, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.2639362912400456, |
| "grad_norm": 0.42369854867815504, |
| "learning_rate": 2.663238557104136e-05, |
| "loss": 0.3418, |
| "step": 995 |
| }, |
| { |
| "epoch": 2.266211604095563, |
| "grad_norm": 0.41909488435243325, |
| "learning_rate": 2.6602363822819887e-05, |
| "loss": 0.3225, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.2684869169510806, |
| "grad_norm": 0.3994565368867542, |
| "learning_rate": 2.6572325368889633e-05, |
| "loss": 0.3045, |
| "step": 997 |
| }, |
| { |
| "epoch": 2.2707622298065986, |
| "grad_norm": 0.388146229645343, |
| "learning_rate": 2.6542270285255756e-05, |
| "loss": 0.2995, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.273037542662116, |
| "grad_norm": 0.44156111760441435, |
| "learning_rate": 2.6512198647965478e-05, |
| "loss": 0.3505, |
| "step": 999 |
| }, |
| { |
| "epoch": 2.2753128555176336, |
| "grad_norm": 0.42089597928299294, |
| "learning_rate": 2.648211053310792e-05, |
| "loss": 0.3493, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.277588168373151, |
| "grad_norm": 0.45712285437388794, |
| "learning_rate": 2.6452006016813882e-05, |
| "loss": 0.3366, |
| "step": 1001 |
| }, |
| { |
| "epoch": 2.279863481228669, |
| "grad_norm": 0.4501732377023807, |
| "learning_rate": 2.6421885175255675e-05, |
| "loss": 0.344, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.2821387940841866, |
| "grad_norm": 0.4229283632947017, |
| "learning_rate": 2.6391748084646914e-05, |
| "loss": 0.3562, |
| "step": 1003 |
| }, |
| { |
| "epoch": 2.284414106939704, |
| "grad_norm": 0.4347442412553129, |
| "learning_rate": 2.6361594821242324e-05, |
| "loss": 0.3509, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.2866894197952217, |
| "grad_norm": 0.3931610774927613, |
| "learning_rate": 2.6331425461337557e-05, |
| "loss": 0.3199, |
| "step": 1005 |
| }, |
| { |
| "epoch": 2.2889647326507396, |
| "grad_norm": 0.408117532581314, |
| "learning_rate": 2.6301240081268987e-05, |
| "loss": 0.3504, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.291240045506257, |
| "grad_norm": 0.39819784875619857, |
| "learning_rate": 2.6271038757413535e-05, |
| "loss": 0.344, |
| "step": 1007 |
| }, |
| { |
| "epoch": 2.2935153583617747, |
| "grad_norm": 0.39159211366569074, |
| "learning_rate": 2.624082156618845e-05, |
| "loss": 0.331, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.295790671217292, |
| "grad_norm": 0.4014550533237517, |
| "learning_rate": 2.621058858405113e-05, |
| "loss": 0.3268, |
| "step": 1009 |
| }, |
| { |
| "epoch": 2.29806598407281, |
| "grad_norm": 0.4243208085102565, |
| "learning_rate": 2.618033988749895e-05, |
| "loss": 0.3068, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.3003412969283277, |
| "grad_norm": 0.4076296649805579, |
| "learning_rate": 2.615007555306902e-05, |
| "loss": 0.3571, |
| "step": 1011 |
| }, |
| { |
| "epoch": 2.302616609783845, |
| "grad_norm": 0.4686341511962635, |
| "learning_rate": 2.6119795657338028e-05, |
| "loss": 0.3403, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.3048919226393627, |
| "grad_norm": 0.4336300175984805, |
| "learning_rate": 2.6089500276922038e-05, |
| "loss": 0.3534, |
| "step": 1013 |
| }, |
| { |
| "epoch": 2.3071672354948807, |
| "grad_norm": 0.4230808109642132, |
| "learning_rate": 2.60591894884763e-05, |
| "loss": 0.3202, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.309442548350398, |
| "grad_norm": 0.40936239516570677, |
| "learning_rate": 2.602886336869503e-05, |
| "loss": 0.3003, |
| "step": 1015 |
| }, |
| { |
| "epoch": 2.3117178612059157, |
| "grad_norm": 0.408650560420394, |
| "learning_rate": 2.5998521994311264e-05, |
| "loss": 0.3318, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.3139931740614337, |
| "grad_norm": 0.3858722141552388, |
| "learning_rate": 2.5968165442096603e-05, |
| "loss": 0.3194, |
| "step": 1017 |
| }, |
| { |
| "epoch": 2.316268486916951, |
| "grad_norm": 0.3982980138390431, |
| "learning_rate": 2.593779378886109e-05, |
| "loss": 0.3201, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.3185437997724687, |
| "grad_norm": 0.4551971424360926, |
| "learning_rate": 2.5907407111452942e-05, |
| "loss": 0.3546, |
| "step": 1019 |
| }, |
| { |
| "epoch": 2.3208191126279862, |
| "grad_norm": 0.401481106912197, |
| "learning_rate": 2.5877005486758424e-05, |
| "loss": 0.3405, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.3230944254835038, |
| "grad_norm": 0.4136507043623277, |
| "learning_rate": 2.5846588991701585e-05, |
| "loss": 0.3313, |
| "step": 1021 |
| }, |
| { |
| "epoch": 2.3253697383390217, |
| "grad_norm": 0.39821934668137265, |
| "learning_rate": 2.5816157703244142e-05, |
| "loss": 0.3224, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.3276450511945392, |
| "grad_norm": 0.4242575172743546, |
| "learning_rate": 2.5785711698385204e-05, |
| "loss": 0.303, |
| "step": 1023 |
| }, |
| { |
| "epoch": 2.3299203640500568, |
| "grad_norm": 0.4126931267650357, |
| "learning_rate": 2.5755251054161136e-05, |
| "loss": 0.3496, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.3321956769055747, |
| "grad_norm": 0.4289455241021369, |
| "learning_rate": 2.5724775847645345e-05, |
| "loss": 0.3159, |
| "step": 1025 |
| }, |
| { |
| "epoch": 2.3344709897610922, |
| "grad_norm": 0.4102737686333417, |
| "learning_rate": 2.569428615594808e-05, |
| "loss": 0.3412, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.3367463026166098, |
| "grad_norm": 0.4498395660714342, |
| "learning_rate": 2.5663782056216242e-05, |
| "loss": 0.3309, |
| "step": 1027 |
| }, |
| { |
| "epoch": 2.3390216154721273, |
| "grad_norm": 0.4098791457012732, |
| "learning_rate": 2.5633263625633188e-05, |
| "loss": 0.3073, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.3412969283276452, |
| "grad_norm": 0.41319480076430887, |
| "learning_rate": 2.560273094141854e-05, |
| "loss": 0.3351, |
| "step": 1029 |
| }, |
| { |
| "epoch": 2.3435722411831628, |
| "grad_norm": 0.3883968682027695, |
| "learning_rate": 2.557218408082798e-05, |
| "loss": 0.3176, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.3458475540386803, |
| "grad_norm": 0.4411653876987364, |
| "learning_rate": 2.5541623121153053e-05, |
| "loss": 0.3256, |
| "step": 1031 |
| }, |
| { |
| "epoch": 2.348122866894198, |
| "grad_norm": 0.45631384309745954, |
| "learning_rate": 2.5511048139720993e-05, |
| "loss": 0.3578, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.3503981797497158, |
| "grad_norm": 0.39850839240719127, |
| "learning_rate": 2.5480459213894514e-05, |
| "loss": 0.3396, |
| "step": 1033 |
| }, |
| { |
| "epoch": 2.3526734926052333, |
| "grad_norm": 0.4207268711095779, |
| "learning_rate": 2.5449856421071603e-05, |
| "loss": 0.3104, |
| "step": 1034 |
| }, |
| { |
| "epoch": 2.354948805460751, |
| "grad_norm": 0.35535500820002625, |
| "learning_rate": 2.5419239838685325e-05, |
| "loss": 0.3425, |
| "step": 1035 |
| }, |
| { |
| "epoch": 2.3572241183162683, |
| "grad_norm": 0.4101822274977704, |
| "learning_rate": 2.5388609544203653e-05, |
| "loss": 0.3667, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.3594994311717863, |
| "grad_norm": 0.4104696919475045, |
| "learning_rate": 2.535796561512926e-05, |
| "loss": 0.3478, |
| "step": 1037 |
| }, |
| { |
| "epoch": 2.361774744027304, |
| "grad_norm": 0.4190560418201998, |
| "learning_rate": 2.5327308128999302e-05, |
| "loss": 0.3573, |
| "step": 1038 |
| }, |
| { |
| "epoch": 2.3640500568828213, |
| "grad_norm": 0.3606297237914685, |
| "learning_rate": 2.529663716338524e-05, |
| "loss": 0.3338, |
| "step": 1039 |
| }, |
| { |
| "epoch": 2.366325369738339, |
| "grad_norm": 0.474284737988354, |
| "learning_rate": 2.526595279589265e-05, |
| "loss": 0.3285, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.368600682593857, |
| "grad_norm": 0.3933117024944243, |
| "learning_rate": 2.5235255104161013e-05, |
| "loss": 0.3131, |
| "step": 1041 |
| }, |
| { |
| "epoch": 2.3708759954493743, |
| "grad_norm": 0.45174667825198905, |
| "learning_rate": 2.5204544165863528e-05, |
| "loss": 0.2943, |
| "step": 1042 |
| }, |
| { |
| "epoch": 2.373151308304892, |
| "grad_norm": 0.3950012310464477, |
| "learning_rate": 2.5173820058706902e-05, |
| "loss": 0.3455, |
| "step": 1043 |
| }, |
| { |
| "epoch": 2.3754266211604094, |
| "grad_norm": 0.4622525221261252, |
| "learning_rate": 2.5143082860431173e-05, |
| "loss": 0.3455, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.3777019340159273, |
| "grad_norm": 0.46850316974346173, |
| "learning_rate": 2.5112332648809495e-05, |
| "loss": 0.3464, |
| "step": 1045 |
| }, |
| { |
| "epoch": 2.379977246871445, |
| "grad_norm": 0.4148884520885508, |
| "learning_rate": 2.508156950164796e-05, |
| "loss": 0.3103, |
| "step": 1046 |
| }, |
| { |
| "epoch": 2.3822525597269624, |
| "grad_norm": 0.470572029624604, |
| "learning_rate": 2.505079349678538e-05, |
| "loss": 0.3459, |
| "step": 1047 |
| }, |
| { |
| "epoch": 2.3845278725824803, |
| "grad_norm": 0.4695289856062143, |
| "learning_rate": 2.50200047120931e-05, |
| "loss": 0.3265, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.386803185437998, |
| "grad_norm": 0.4100169364168893, |
| "learning_rate": 2.4989203225474807e-05, |
| "loss": 0.3053, |
| "step": 1049 |
| }, |
| { |
| "epoch": 2.3890784982935154, |
| "grad_norm": 0.5321799194490549, |
| "learning_rate": 2.4958389114866326e-05, |
| "loss": 0.3325, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.391353811149033, |
| "grad_norm": 0.4159936423428224, |
| "learning_rate": 2.492756245823542e-05, |
| "loss": 0.3399, |
| "step": 1051 |
| }, |
| { |
| "epoch": 2.3936291240045504, |
| "grad_norm": 0.4266756865011224, |
| "learning_rate": 2.4896723333581607e-05, |
| "loss": 0.365, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.3959044368600684, |
| "grad_norm": 0.4407767461713438, |
| "learning_rate": 2.4865871818935935e-05, |
| "loss": 0.3496, |
| "step": 1053 |
| }, |
| { |
| "epoch": 2.398179749715586, |
| "grad_norm": 0.39165662572831933, |
| "learning_rate": 2.4835007992360817e-05, |
| "loss": 0.3469, |
| "step": 1054 |
| }, |
| { |
| "epoch": 2.4004550625711034, |
| "grad_norm": 0.446346752400498, |
| "learning_rate": 2.4804131931949823e-05, |
| "loss": 0.3126, |
| "step": 1055 |
| }, |
| { |
| "epoch": 2.4027303754266214, |
| "grad_norm": 0.43032428277877155, |
| "learning_rate": 2.4773243715827452e-05, |
| "loss": 0.3185, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.405005688282139, |
| "grad_norm": 0.4507009034591805, |
| "learning_rate": 2.4742343422148986e-05, |
| "loss": 0.3161, |
| "step": 1057 |
| }, |
| { |
| "epoch": 2.4072810011376564, |
| "grad_norm": 0.4236196606036691, |
| "learning_rate": 2.4711431129100252e-05, |
| "loss": 0.322, |
| "step": 1058 |
| }, |
| { |
| "epoch": 2.409556313993174, |
| "grad_norm": 0.5011939156733887, |
| "learning_rate": 2.4680506914897456e-05, |
| "loss": 0.3487, |
| "step": 1059 |
| }, |
| { |
| "epoch": 2.4118316268486915, |
| "grad_norm": 0.431184104034181, |
| "learning_rate": 2.4649570857786928e-05, |
| "loss": 0.3408, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.4141069397042094, |
| "grad_norm": 0.46561161383770455, |
| "learning_rate": 2.461862303604502e-05, |
| "loss": 0.3393, |
| "step": 1061 |
| }, |
| { |
| "epoch": 2.416382252559727, |
| "grad_norm": 0.46307110671585516, |
| "learning_rate": 2.4587663527977802e-05, |
| "loss": 0.3212, |
| "step": 1062 |
| }, |
| { |
| "epoch": 2.4186575654152445, |
| "grad_norm": 0.38832680834240807, |
| "learning_rate": 2.455669241192095e-05, |
| "loss": 0.3281, |
| "step": 1063 |
| }, |
| { |
| "epoch": 2.4209328782707624, |
| "grad_norm": 0.5036416899794153, |
| "learning_rate": 2.452570976623948e-05, |
| "loss": 0.3463, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.42320819112628, |
| "grad_norm": 0.36768104349960495, |
| "learning_rate": 2.449471566932761e-05, |
| "loss": 0.3265, |
| "step": 1065 |
| }, |
| { |
| "epoch": 2.4254835039817975, |
| "grad_norm": 0.4706364182344478, |
| "learning_rate": 2.4463710199608503e-05, |
| "loss": 0.3456, |
| "step": 1066 |
| }, |
| { |
| "epoch": 2.427758816837315, |
| "grad_norm": 0.5005365274468633, |
| "learning_rate": 2.4432693435534128e-05, |
| "loss": 0.3667, |
| "step": 1067 |
| }, |
| { |
| "epoch": 2.430034129692833, |
| "grad_norm": 0.4230800225468822, |
| "learning_rate": 2.4401665455585004e-05, |
| "loss": 0.3327, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.4323094425483505, |
| "grad_norm": 0.4669416782022287, |
| "learning_rate": 2.4370626338270052e-05, |
| "loss": 0.3316, |
| "step": 1069 |
| }, |
| { |
| "epoch": 2.434584755403868, |
| "grad_norm": 0.4327164651401064, |
| "learning_rate": 2.4339576162126362e-05, |
| "loss": 0.3356, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.4368600682593855, |
| "grad_norm": 0.39994880861025606, |
| "learning_rate": 2.430851500571901e-05, |
| "loss": 0.3566, |
| "step": 1071 |
| }, |
| { |
| "epoch": 2.4391353811149035, |
| "grad_norm": 0.45847337574158525, |
| "learning_rate": 2.4277442947640845e-05, |
| "loss": 0.3484, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.441410693970421, |
| "grad_norm": 0.4744507089590578, |
| "learning_rate": 2.4246360066512316e-05, |
| "loss": 0.3315, |
| "step": 1073 |
| }, |
| { |
| "epoch": 2.4436860068259385, |
| "grad_norm": 0.3833324983793694, |
| "learning_rate": 2.4215266440981245e-05, |
| "loss": 0.3307, |
| "step": 1074 |
| }, |
| { |
| "epoch": 2.445961319681456, |
| "grad_norm": 0.50740565370416, |
| "learning_rate": 2.418416214972265e-05, |
| "loss": 0.3287, |
| "step": 1075 |
| }, |
| { |
| "epoch": 2.448236632536974, |
| "grad_norm": 0.46729101607621365, |
| "learning_rate": 2.415304727143851e-05, |
| "loss": 0.3284, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.4505119453924915, |
| "grad_norm": 0.42570906698675304, |
| "learning_rate": 2.4121921884857638e-05, |
| "loss": 0.3249, |
| "step": 1077 |
| }, |
| { |
| "epoch": 2.452787258248009, |
| "grad_norm": 0.4597186479797924, |
| "learning_rate": 2.4090786068735387e-05, |
| "loss": 0.3527, |
| "step": 1078 |
| }, |
| { |
| "epoch": 2.4550625711035265, |
| "grad_norm": 0.4425804779532458, |
| "learning_rate": 2.4059639901853552e-05, |
| "loss": 0.332, |
| "step": 1079 |
| }, |
| { |
| "epoch": 2.4573378839590445, |
| "grad_norm": 0.40481512380156626, |
| "learning_rate": 2.4028483463020053e-05, |
| "loss": 0.3341, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.459613196814562, |
| "grad_norm": 0.4030421736394406, |
| "learning_rate": 2.3997316831068858e-05, |
| "loss": 0.3066, |
| "step": 1081 |
| }, |
| { |
| "epoch": 2.4618885096700796, |
| "grad_norm": 0.43379563589411485, |
| "learning_rate": 2.3966140084859698e-05, |
| "loss": 0.371, |
| "step": 1082 |
| }, |
| { |
| "epoch": 2.464163822525597, |
| "grad_norm": 0.3959467073732541, |
| "learning_rate": 2.3934953303277912e-05, |
| "loss": 0.3271, |
| "step": 1083 |
| }, |
| { |
| "epoch": 2.466439135381115, |
| "grad_norm": 0.3831467603215834, |
| "learning_rate": 2.39037565652342e-05, |
| "loss": 0.3133, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.4687144482366326, |
| "grad_norm": 0.38031635551424137, |
| "learning_rate": 2.3872549949664493e-05, |
| "loss": 0.3427, |
| "step": 1085 |
| }, |
| { |
| "epoch": 2.47098976109215, |
| "grad_norm": 0.38672451087371595, |
| "learning_rate": 2.3841333535529695e-05, |
| "loss": 0.3223, |
| "step": 1086 |
| }, |
| { |
| "epoch": 2.473265073947668, |
| "grad_norm": 0.3887032117399429, |
| "learning_rate": 2.3810107401815507e-05, |
| "loss": 0.2995, |
| "step": 1087 |
| }, |
| { |
| "epoch": 2.4755403868031856, |
| "grad_norm": 0.3840230443404789, |
| "learning_rate": 2.3778871627532207e-05, |
| "loss": 0.3063, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.477815699658703, |
| "grad_norm": 0.41811193328879326, |
| "learning_rate": 2.37476262917145e-05, |
| "loss": 0.3278, |
| "step": 1089 |
| }, |
| { |
| "epoch": 2.4800910125142206, |
| "grad_norm": 0.3903229827000629, |
| "learning_rate": 2.3716371473421242e-05, |
| "loss": 0.3341, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.482366325369738, |
| "grad_norm": 0.43928485970506703, |
| "learning_rate": 2.3685107251735336e-05, |
| "loss": 0.3299, |
| "step": 1091 |
| }, |
| { |
| "epoch": 2.484641638225256, |
| "grad_norm": 0.4118161474697317, |
| "learning_rate": 2.3653833705763417e-05, |
| "loss": 0.3105, |
| "step": 1092 |
| }, |
| { |
| "epoch": 2.4869169510807736, |
| "grad_norm": 0.4089962324637304, |
| "learning_rate": 2.3622550914635758e-05, |
| "loss": 0.3171, |
| "step": 1093 |
| }, |
| { |
| "epoch": 2.489192263936291, |
| "grad_norm": 0.4075036878096672, |
| "learning_rate": 2.3591258957506007e-05, |
| "loss": 0.3277, |
| "step": 1094 |
| }, |
| { |
| "epoch": 2.491467576791809, |
| "grad_norm": 0.4130842827382354, |
| "learning_rate": 2.3559957913551014e-05, |
| "loss": 0.3551, |
| "step": 1095 |
| }, |
| { |
| "epoch": 2.4937428896473266, |
| "grad_norm": 0.38827396600299435, |
| "learning_rate": 2.3528647861970596e-05, |
| "loss": 0.308, |
| "step": 1096 |
| }, |
| { |
| "epoch": 2.496018202502844, |
| "grad_norm": 0.3765239077400001, |
| "learning_rate": 2.349732888198739e-05, |
| "loss": 0.3047, |
| "step": 1097 |
| }, |
| { |
| "epoch": 2.4982935153583616, |
| "grad_norm": 0.42562109897231937, |
| "learning_rate": 2.3466001052846617e-05, |
| "loss": 0.3813, |
| "step": 1098 |
| }, |
| { |
| "epoch": 2.500568828213879, |
| "grad_norm": 0.43075331823376906, |
| "learning_rate": 2.3434664453815885e-05, |
| "loss": 0.3577, |
| "step": 1099 |
| }, |
| { |
| "epoch": 2.502844141069397, |
| "grad_norm": 0.4280771817374536, |
| "learning_rate": 2.3403319164184986e-05, |
| "loss": 0.3217, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.5051194539249146, |
| "grad_norm": 0.37875391419931537, |
| "learning_rate": 2.3371965263265714e-05, |
| "loss": 0.3036, |
| "step": 1101 |
| }, |
| { |
| "epoch": 2.507394766780432, |
| "grad_norm": 0.4286854105083951, |
| "learning_rate": 2.334060283039164e-05, |
| "loss": 0.3041, |
| "step": 1102 |
| }, |
| { |
| "epoch": 2.50967007963595, |
| "grad_norm": 0.42423899167875767, |
| "learning_rate": 2.3309231944917937e-05, |
| "loss": 0.3486, |
| "step": 1103 |
| }, |
| { |
| "epoch": 2.5119453924914676, |
| "grad_norm": 0.39380880826140496, |
| "learning_rate": 2.327785268622115e-05, |
| "loss": 0.3279, |
| "step": 1104 |
| }, |
| { |
| "epoch": 2.514220705346985, |
| "grad_norm": 0.44149400886970624, |
| "learning_rate": 2.3246465133699024e-05, |
| "loss": 0.2983, |
| "step": 1105 |
| }, |
| { |
| "epoch": 2.5164960182025027, |
| "grad_norm": 0.4220494995967289, |
| "learning_rate": 2.321506936677027e-05, |
| "loss": 0.3545, |
| "step": 1106 |
| }, |
| { |
| "epoch": 2.51877133105802, |
| "grad_norm": 0.39004032618224976, |
| "learning_rate": 2.318366546487442e-05, |
| "loss": 0.3285, |
| "step": 1107 |
| }, |
| { |
| "epoch": 2.521046643913538, |
| "grad_norm": 0.4293755406970271, |
| "learning_rate": 2.3152253507471543e-05, |
| "loss": 0.3224, |
| "step": 1108 |
| }, |
| { |
| "epoch": 2.5233219567690557, |
| "grad_norm": 0.4534148070646945, |
| "learning_rate": 2.312083357404213e-05, |
| "loss": 0.3233, |
| "step": 1109 |
| }, |
| { |
| "epoch": 2.5255972696245736, |
| "grad_norm": 0.4218339483415786, |
| "learning_rate": 2.3089405744086823e-05, |
| "loss": 0.3648, |
| "step": 1110 |
| }, |
| { |
| "epoch": 2.527872582480091, |
| "grad_norm": 0.4868020320366218, |
| "learning_rate": 2.305797009712628e-05, |
| "loss": 0.32, |
| "step": 1111 |
| }, |
| { |
| "epoch": 2.5301478953356087, |
| "grad_norm": 0.4063169680463677, |
| "learning_rate": 2.3026526712700893e-05, |
| "loss": 0.3233, |
| "step": 1112 |
| }, |
| { |
| "epoch": 2.532423208191126, |
| "grad_norm": 0.42980601020754616, |
| "learning_rate": 2.299507567037067e-05, |
| "loss": 0.3311, |
| "step": 1113 |
| }, |
| { |
| "epoch": 2.5346985210466437, |
| "grad_norm": 0.4881371234939724, |
| "learning_rate": 2.2963617049714974e-05, |
| "loss": 0.3578, |
| "step": 1114 |
| }, |
| { |
| "epoch": 2.5369738339021617, |
| "grad_norm": 0.4494329234196991, |
| "learning_rate": 2.2932150930332363e-05, |
| "loss": 0.3349, |
| "step": 1115 |
| }, |
| { |
| "epoch": 2.539249146757679, |
| "grad_norm": 0.5396261834748108, |
| "learning_rate": 2.2900677391840338e-05, |
| "loss": 0.3604, |
| "step": 1116 |
| }, |
| { |
| "epoch": 2.5415244596131967, |
| "grad_norm": 0.48364432647591565, |
| "learning_rate": 2.28691965138752e-05, |
| "loss": 0.3472, |
| "step": 1117 |
| }, |
| { |
| "epoch": 2.5437997724687147, |
| "grad_norm": 0.46639259119824894, |
| "learning_rate": 2.283770837609181e-05, |
| "loss": 0.3263, |
| "step": 1118 |
| }, |
| { |
| "epoch": 2.546075085324232, |
| "grad_norm": 0.4154978640587029, |
| "learning_rate": 2.28062130581634e-05, |
| "loss": 0.3114, |
| "step": 1119 |
| }, |
| { |
| "epoch": 2.5483503981797497, |
| "grad_norm": 0.5293824526466654, |
| "learning_rate": 2.277471063978137e-05, |
| "loss": 0.3668, |
| "step": 1120 |
| }, |
| { |
| "epoch": 2.5506257110352673, |
| "grad_norm": 0.4241598383229602, |
| "learning_rate": 2.2743201200655077e-05, |
| "loss": 0.3326, |
| "step": 1121 |
| }, |
| { |
| "epoch": 2.5529010238907848, |
| "grad_norm": 0.40894333863788457, |
| "learning_rate": 2.2711684820511662e-05, |
| "loss": 0.3624, |
| "step": 1122 |
| }, |
| { |
| "epoch": 2.5551763367463027, |
| "grad_norm": 0.43884989380427636, |
| "learning_rate": 2.268016157909581e-05, |
| "loss": 0.3501, |
| "step": 1123 |
| }, |
| { |
| "epoch": 2.5574516496018203, |
| "grad_norm": 0.3790395735860823, |
| "learning_rate": 2.2648631556169575e-05, |
| "loss": 0.3183, |
| "step": 1124 |
| }, |
| { |
| "epoch": 2.5597269624573378, |
| "grad_norm": 0.442061991986524, |
| "learning_rate": 2.2617094831512167e-05, |
| "loss": 0.3253, |
| "step": 1125 |
| }, |
| { |
| "epoch": 2.5620022753128557, |
| "grad_norm": 0.41881109276377154, |
| "learning_rate": 2.258555148491975e-05, |
| "loss": 0.3381, |
| "step": 1126 |
| }, |
| { |
| "epoch": 2.5642775881683733, |
| "grad_norm": 0.4437153525163446, |
| "learning_rate": 2.2554001596205262e-05, |
| "loss": 0.3525, |
| "step": 1127 |
| }, |
| { |
| "epoch": 2.5665529010238908, |
| "grad_norm": 0.47642938536248186, |
| "learning_rate": 2.2522445245198172e-05, |
| "loss": 0.331, |
| "step": 1128 |
| }, |
| { |
| "epoch": 2.5688282138794083, |
| "grad_norm": 0.4286456745031337, |
| "learning_rate": 2.249088251174431e-05, |
| "loss": 0.3145, |
| "step": 1129 |
| }, |
| { |
| "epoch": 2.571103526734926, |
| "grad_norm": 0.49626610344590827, |
| "learning_rate": 2.2459313475705645e-05, |
| "loss": 0.3579, |
| "step": 1130 |
| }, |
| { |
| "epoch": 2.573378839590444, |
| "grad_norm": 0.4400110077306319, |
| "learning_rate": 2.2427738216960116e-05, |
| "loss": 0.3362, |
| "step": 1131 |
| }, |
| { |
| "epoch": 2.5756541524459613, |
| "grad_norm": 0.40840187148347123, |
| "learning_rate": 2.239615681540139e-05, |
| "loss": 0.3255, |
| "step": 1132 |
| }, |
| { |
| "epoch": 2.577929465301479, |
| "grad_norm": 0.403734345115153, |
| "learning_rate": 2.236456935093867e-05, |
| "loss": 0.296, |
| "step": 1133 |
| }, |
| { |
| "epoch": 2.580204778156997, |
| "grad_norm": 0.4007096524305456, |
| "learning_rate": 2.233297590349652e-05, |
| "loss": 0.3683, |
| "step": 1134 |
| }, |
| { |
| "epoch": 2.5824800910125143, |
| "grad_norm": 0.43563774745733447, |
| "learning_rate": 2.2301376553014625e-05, |
| "loss": 0.3511, |
| "step": 1135 |
| }, |
| { |
| "epoch": 2.584755403868032, |
| "grad_norm": 0.42235690826665484, |
| "learning_rate": 2.226977137944762e-05, |
| "loss": 0.3442, |
| "step": 1136 |
| }, |
| { |
| "epoch": 2.5870307167235493, |
| "grad_norm": 0.41315770540383245, |
| "learning_rate": 2.223816046276487e-05, |
| "loss": 0.3364, |
| "step": 1137 |
| }, |
| { |
| "epoch": 2.589306029579067, |
| "grad_norm": 0.3999109408063849, |
| "learning_rate": 2.2206543882950257e-05, |
| "loss": 0.3463, |
| "step": 1138 |
| }, |
| { |
| "epoch": 2.591581342434585, |
| "grad_norm": 0.46969421630938196, |
| "learning_rate": 2.2174921720002018e-05, |
| "loss": 0.3375, |
| "step": 1139 |
| }, |
| { |
| "epoch": 2.5938566552901023, |
| "grad_norm": 0.41838610421310074, |
| "learning_rate": 2.214329405393249e-05, |
| "loss": 0.3319, |
| "step": 1140 |
| }, |
| { |
| "epoch": 2.59613196814562, |
| "grad_norm": 0.40684873288050794, |
| "learning_rate": 2.211166096476796e-05, |
| "loss": 0.3253, |
| "step": 1141 |
| }, |
| { |
| "epoch": 2.598407281001138, |
| "grad_norm": 0.4373340577549481, |
| "learning_rate": 2.208002253254841e-05, |
| "loss": 0.3209, |
| "step": 1142 |
| }, |
| { |
| "epoch": 2.6006825938566553, |
| "grad_norm": 0.4034129811630071, |
| "learning_rate": 2.204837883732737e-05, |
| "loss": 0.3126, |
| "step": 1143 |
| }, |
| { |
| "epoch": 2.602957906712173, |
| "grad_norm": 0.3918569162074108, |
| "learning_rate": 2.201672995917166e-05, |
| "loss": 0.3397, |
| "step": 1144 |
| }, |
| { |
| "epoch": 2.6052332195676904, |
| "grad_norm": 0.4467241134900881, |
| "learning_rate": 2.1985075978161236e-05, |
| "loss": 0.3191, |
| "step": 1145 |
| }, |
| { |
| "epoch": 2.6075085324232083, |
| "grad_norm": 0.4505057754744431, |
| "learning_rate": 2.1953416974388948e-05, |
| "loss": 0.2992, |
| "step": 1146 |
| }, |
| { |
| "epoch": 2.609783845278726, |
| "grad_norm": 0.4146029371414084, |
| "learning_rate": 2.192175302796037e-05, |
| "loss": 0.3285, |
| "step": 1147 |
| }, |
| { |
| "epoch": 2.6120591581342434, |
| "grad_norm": 0.4292927317289128, |
| "learning_rate": 2.1890084218993577e-05, |
| "loss": 0.3262, |
| "step": 1148 |
| }, |
| { |
| "epoch": 2.6143344709897613, |
| "grad_norm": 0.44875236167205973, |
| "learning_rate": 2.185841062761893e-05, |
| "loss": 0.3299, |
| "step": 1149 |
| }, |
| { |
| "epoch": 2.616609783845279, |
| "grad_norm": 0.38374058776773967, |
| "learning_rate": 2.1826732333978924e-05, |
| "loss": 0.3214, |
| "step": 1150 |
| }, |
| { |
| "epoch": 2.6188850967007964, |
| "grad_norm": 0.40845814985241696, |
| "learning_rate": 2.179504941822793e-05, |
| "loss": 0.2941, |
| "step": 1151 |
| }, |
| { |
| "epoch": 2.621160409556314, |
| "grad_norm": 0.4245944997869959, |
| "learning_rate": 2.1763361960532014e-05, |
| "loss": 0.3632, |
| "step": 1152 |
| }, |
| { |
| "epoch": 2.6234357224118314, |
| "grad_norm": 0.4124890178533331, |
| "learning_rate": 2.173167004106874e-05, |
| "loss": 0.3517, |
| "step": 1153 |
| }, |
| { |
| "epoch": 2.6257110352673494, |
| "grad_norm": 0.4703164987096634, |
| "learning_rate": 2.1699973740026962e-05, |
| "loss": 0.3438, |
| "step": 1154 |
| }, |
| { |
| "epoch": 2.627986348122867, |
| "grad_norm": 0.46930203738129295, |
| "learning_rate": 2.166827313760662e-05, |
| "loss": 0.3491, |
| "step": 1155 |
| }, |
| { |
| "epoch": 2.6302616609783844, |
| "grad_norm": 0.4391271679349783, |
| "learning_rate": 2.1636568314018534e-05, |
| "loss": 0.299, |
| "step": 1156 |
| }, |
| { |
| "epoch": 2.6325369738339024, |
| "grad_norm": 0.4299397208820665, |
| "learning_rate": 2.1604859349484203e-05, |
| "loss": 0.366, |
| "step": 1157 |
| }, |
| { |
| "epoch": 2.63481228668942, |
| "grad_norm": 0.4180615865892131, |
| "learning_rate": 2.1573146324235618e-05, |
| "loss": 0.3353, |
| "step": 1158 |
| }, |
| { |
| "epoch": 2.6370875995449374, |
| "grad_norm": 0.48576423446336753, |
| "learning_rate": 2.1541429318515026e-05, |
| "loss": 0.3575, |
| "step": 1159 |
| }, |
| { |
| "epoch": 2.639362912400455, |
| "grad_norm": 0.3848453312354331, |
| "learning_rate": 2.150970841257476e-05, |
| "loss": 0.3458, |
| "step": 1160 |
| }, |
| { |
| "epoch": 2.6416382252559725, |
| "grad_norm": 0.3907750301355957, |
| "learning_rate": 2.1477983686677003e-05, |
| "loss": 0.3027, |
| "step": 1161 |
| }, |
| { |
| "epoch": 2.6439135381114904, |
| "grad_norm": 0.3974183662564141, |
| "learning_rate": 2.1446255221093636e-05, |
| "loss": 0.338, |
| "step": 1162 |
| }, |
| { |
| "epoch": 2.646188850967008, |
| "grad_norm": 0.3845848149906779, |
| "learning_rate": 2.141452309610597e-05, |
| "loss": 0.3481, |
| "step": 1163 |
| }, |
| { |
| "epoch": 2.6484641638225255, |
| "grad_norm": 0.4373210160414838, |
| "learning_rate": 2.138278739200459e-05, |
| "loss": 0.345, |
| "step": 1164 |
| }, |
| { |
| "epoch": 2.6507394766780434, |
| "grad_norm": 0.3824463733252028, |
| "learning_rate": 2.135104818908913e-05, |
| "loss": 0.3208, |
| "step": 1165 |
| }, |
| { |
| "epoch": 2.653014789533561, |
| "grad_norm": 0.4528584606435942, |
| "learning_rate": 2.131930556766809e-05, |
| "loss": 0.3226, |
| "step": 1166 |
| }, |
| { |
| "epoch": 2.6552901023890785, |
| "grad_norm": 0.4058424243479965, |
| "learning_rate": 2.1287559608058607e-05, |
| "loss": 0.3113, |
| "step": 1167 |
| }, |
| { |
| "epoch": 2.657565415244596, |
| "grad_norm": 0.4202388416175268, |
| "learning_rate": 2.125581039058627e-05, |
| "loss": 0.3404, |
| "step": 1168 |
| }, |
| { |
| "epoch": 2.6598407281001135, |
| "grad_norm": 0.42098415966269187, |
| "learning_rate": 2.122405799558491e-05, |
| "loss": 0.3406, |
| "step": 1169 |
| }, |
| { |
| "epoch": 2.6621160409556315, |
| "grad_norm": 0.4094431284142787, |
| "learning_rate": 2.11923025033964e-05, |
| "loss": 0.3483, |
| "step": 1170 |
| }, |
| { |
| "epoch": 2.664391353811149, |
| "grad_norm": 0.3761811585723381, |
| "learning_rate": 2.116054399437044e-05, |
| "loss": 0.2999, |
| "step": 1171 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.37488136915012654, |
| "learning_rate": 2.112878254886439e-05, |
| "loss": 0.3414, |
| "step": 1172 |
| }, |
| { |
| "epoch": 2.6689419795221845, |
| "grad_norm": 0.40620205902134443, |
| "learning_rate": 2.1097018247243e-05, |
| "loss": 0.3159, |
| "step": 1173 |
| }, |
| { |
| "epoch": 2.671217292377702, |
| "grad_norm": 0.4038563460802831, |
| "learning_rate": 2.106525116987828e-05, |
| "loss": 0.3459, |
| "step": 1174 |
| }, |
| { |
| "epoch": 2.6734926052332195, |
| "grad_norm": 0.41926081828150746, |
| "learning_rate": 2.103348139714925e-05, |
| "loss": 0.3623, |
| "step": 1175 |
| }, |
| { |
| "epoch": 2.675767918088737, |
| "grad_norm": 0.4616141172796574, |
| "learning_rate": 2.1001709009441748e-05, |
| "loss": 0.328, |
| "step": 1176 |
| }, |
| { |
| "epoch": 2.6780432309442546, |
| "grad_norm": 0.4362223873012969, |
| "learning_rate": 2.096993408714823e-05, |
| "loss": 0.3581, |
| "step": 1177 |
| }, |
| { |
| "epoch": 2.6803185437997725, |
| "grad_norm": 0.44938021129645267, |
| "learning_rate": 2.0938156710667577e-05, |
| "loss": 0.3368, |
| "step": 1178 |
| }, |
| { |
| "epoch": 2.68259385665529, |
| "grad_norm": 0.3971849821913564, |
| "learning_rate": 2.090637696040486e-05, |
| "loss": 0.3143, |
| "step": 1179 |
| }, |
| { |
| "epoch": 2.684869169510808, |
| "grad_norm": 0.48071925305593577, |
| "learning_rate": 2.0874594916771174e-05, |
| "loss": 0.3384, |
| "step": 1180 |
| }, |
| { |
| "epoch": 2.6871444823663255, |
| "grad_norm": 0.4381908076397643, |
| "learning_rate": 2.0842810660183402e-05, |
| "loss": 0.3455, |
| "step": 1181 |
| }, |
| { |
| "epoch": 2.689419795221843, |
| "grad_norm": 0.41950514466306293, |
| "learning_rate": 2.0811024271064038e-05, |
| "loss": 0.3389, |
| "step": 1182 |
| }, |
| { |
| "epoch": 2.6916951080773606, |
| "grad_norm": 0.4563487817057512, |
| "learning_rate": 2.0779235829840962e-05, |
| "loss": 0.3111, |
| "step": 1183 |
| }, |
| { |
| "epoch": 2.693970420932878, |
| "grad_norm": 0.4075398413704911, |
| "learning_rate": 2.074744541694726e-05, |
| "loss": 0.3253, |
| "step": 1184 |
| }, |
| { |
| "epoch": 2.696245733788396, |
| "grad_norm": 0.4024468884794903, |
| "learning_rate": 2.071565311282099e-05, |
| "loss": 0.3174, |
| "step": 1185 |
| }, |
| { |
| "epoch": 2.6985210466439136, |
| "grad_norm": 0.41381064381022864, |
| "learning_rate": 2.068385899790501e-05, |
| "loss": 0.3371, |
| "step": 1186 |
| }, |
| { |
| "epoch": 2.700796359499431, |
| "grad_norm": 0.45174817528660066, |
| "learning_rate": 2.065206315264675e-05, |
| "loss": 0.3092, |
| "step": 1187 |
| }, |
| { |
| "epoch": 2.703071672354949, |
| "grad_norm": 0.4081105192132397, |
| "learning_rate": 2.0620265657498027e-05, |
| "loss": 0.3376, |
| "step": 1188 |
| }, |
| { |
| "epoch": 2.7053469852104666, |
| "grad_norm": 0.46840498959393, |
| "learning_rate": 2.0588466592914814e-05, |
| "loss": 0.3236, |
| "step": 1189 |
| }, |
| { |
| "epoch": 2.707622298065984, |
| "grad_norm": 0.4219057713870426, |
| "learning_rate": 2.0556666039357084e-05, |
| "loss": 0.3657, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.7098976109215016, |
| "grad_norm": 0.4788694048573619, |
| "learning_rate": 2.052486407728855e-05, |
| "loss": 0.3299, |
| "step": 1191 |
| }, |
| { |
| "epoch": 2.712172923777019, |
| "grad_norm": 0.433904805616125, |
| "learning_rate": 2.0493060787176513e-05, |
| "loss": 0.3581, |
| "step": 1192 |
| }, |
| { |
| "epoch": 2.714448236632537, |
| "grad_norm": 0.3737957765771606, |
| "learning_rate": 2.046125624949161e-05, |
| "loss": 0.3351, |
| "step": 1193 |
| }, |
| { |
| "epoch": 2.7167235494880546, |
| "grad_norm": 0.4636212723722085, |
| "learning_rate": 2.042945054470766e-05, |
| "loss": 0.3623, |
| "step": 1194 |
| }, |
| { |
| "epoch": 2.718998862343572, |
| "grad_norm": 0.3880127067244525, |
| "learning_rate": 2.0397643753301403e-05, |
| "loss": 0.3585, |
| "step": 1195 |
| }, |
| { |
| "epoch": 2.72127417519909, |
| "grad_norm": 0.36654974143294056, |
| "learning_rate": 2.036583595575237e-05, |
| "loss": 0.3284, |
| "step": 1196 |
| }, |
| { |
| "epoch": 2.7235494880546076, |
| "grad_norm": 0.48034755356053316, |
| "learning_rate": 2.0334027232542595e-05, |
| "loss": 0.3599, |
| "step": 1197 |
| }, |
| { |
| "epoch": 2.725824800910125, |
| "grad_norm": 0.4418794637066644, |
| "learning_rate": 2.030221766415649e-05, |
| "loss": 0.3327, |
| "step": 1198 |
| }, |
| { |
| "epoch": 2.7281001137656427, |
| "grad_norm": 0.444200319161712, |
| "learning_rate": 2.0270407331080575e-05, |
| "loss": 0.3303, |
| "step": 1199 |
| }, |
| { |
| "epoch": 2.73037542662116, |
| "grad_norm": 0.4035643207657577, |
| "learning_rate": 2.0238596313803337e-05, |
| "loss": 0.3108, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.732650739476678, |
| "grad_norm": 0.3571362184850684, |
| "learning_rate": 2.0206784692814957e-05, |
| "loss": 0.3387, |
| "step": 1201 |
| }, |
| { |
| "epoch": 2.7349260523321957, |
| "grad_norm": 0.35134568741895283, |
| "learning_rate": 2.0174972548607183e-05, |
| "loss": 0.354, |
| "step": 1202 |
| }, |
| { |
| "epoch": 2.737201365187713, |
| "grad_norm": 0.41167143435227466, |
| "learning_rate": 2.014315996167305e-05, |
| "loss": 0.31, |
| "step": 1203 |
| }, |
| { |
| "epoch": 2.739476678043231, |
| "grad_norm": 0.3419684364181005, |
| "learning_rate": 2.0111347012506753e-05, |
| "loss": 0.3078, |
| "step": 1204 |
| }, |
| { |
| "epoch": 2.7417519908987487, |
| "grad_norm": 0.37900537937003825, |
| "learning_rate": 2.0079533781603352e-05, |
| "loss": 0.3555, |
| "step": 1205 |
| }, |
| { |
| "epoch": 2.744027303754266, |
| "grad_norm": 0.3933678389303421, |
| "learning_rate": 2.004772034945867e-05, |
| "loss": 0.3435, |
| "step": 1206 |
| }, |
| { |
| "epoch": 2.7463026166097837, |
| "grad_norm": 0.3721686834336099, |
| "learning_rate": 2.001590679656901e-05, |
| "loss": 0.333, |
| "step": 1207 |
| }, |
| { |
| "epoch": 2.748577929465301, |
| "grad_norm": 0.3885080139471526, |
| "learning_rate": 1.9984093203430996e-05, |
| "loss": 0.3395, |
| "step": 1208 |
| }, |
| { |
| "epoch": 2.750853242320819, |
| "grad_norm": 0.406519404842218, |
| "learning_rate": 1.9952279650541332e-05, |
| "loss": 0.3624, |
| "step": 1209 |
| }, |
| { |
| "epoch": 2.7531285551763367, |
| "grad_norm": 0.37505712731585505, |
| "learning_rate": 1.9920466218396655e-05, |
| "loss": 0.3239, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.755403868031854, |
| "grad_norm": 0.36610554514055926, |
| "learning_rate": 1.988865298749326e-05, |
| "loss": 0.3095, |
| "step": 1211 |
| }, |
| { |
| "epoch": 2.757679180887372, |
| "grad_norm": 0.39302726430386103, |
| "learning_rate": 1.985684003832695e-05, |
| "loss": 0.3196, |
| "step": 1212 |
| }, |
| { |
| "epoch": 2.7599544937428897, |
| "grad_norm": 0.39747897923075504, |
| "learning_rate": 1.982502745139282e-05, |
| "loss": 0.3199, |
| "step": 1213 |
| }, |
| { |
| "epoch": 2.7622298065984072, |
| "grad_norm": 0.3595503828398732, |
| "learning_rate": 1.9793215307185053e-05, |
| "loss": 0.305, |
| "step": 1214 |
| }, |
| { |
| "epoch": 2.7645051194539247, |
| "grad_norm": 0.396177560368738, |
| "learning_rate": 1.976140368619667e-05, |
| "loss": 0.3618, |
| "step": 1215 |
| }, |
| { |
| "epoch": 2.7667804323094427, |
| "grad_norm": 0.4676467159822378, |
| "learning_rate": 1.972959266891943e-05, |
| "loss": 0.3432, |
| "step": 1216 |
| }, |
| { |
| "epoch": 2.7690557451649602, |
| "grad_norm": 0.41532924076950134, |
| "learning_rate": 1.9697782335843512e-05, |
| "loss": 0.3342, |
| "step": 1217 |
| }, |
| { |
| "epoch": 2.7713310580204777, |
| "grad_norm": 0.386754144273086, |
| "learning_rate": 1.966597276745741e-05, |
| "loss": 0.3074, |
| "step": 1218 |
| }, |
| { |
| "epoch": 2.7736063708759957, |
| "grad_norm": 0.41603270499755085, |
| "learning_rate": 1.963416404424764e-05, |
| "loss": 0.3534, |
| "step": 1219 |
| }, |
| { |
| "epoch": 2.7758816837315132, |
| "grad_norm": 0.3754548451201784, |
| "learning_rate": 1.9602356246698597e-05, |
| "loss": 0.3066, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.7781569965870307, |
| "grad_norm": 0.40805461049030567, |
| "learning_rate": 1.9570549455292348e-05, |
| "loss": 0.3183, |
| "step": 1221 |
| }, |
| { |
| "epoch": 2.7804323094425483, |
| "grad_norm": 0.4006941290488259, |
| "learning_rate": 1.9538743750508394e-05, |
| "loss": 0.3018, |
| "step": 1222 |
| }, |
| { |
| "epoch": 2.782707622298066, |
| "grad_norm": 0.4156799110873144, |
| "learning_rate": 1.950693921282349e-05, |
| "loss": 0.3476, |
| "step": 1223 |
| }, |
| { |
| "epoch": 2.7849829351535837, |
| "grad_norm": 0.4697145256845964, |
| "learning_rate": 1.9475135922711457e-05, |
| "loss": 0.3574, |
| "step": 1224 |
| }, |
| { |
| "epoch": 2.7872582480091013, |
| "grad_norm": 0.3893570385324192, |
| "learning_rate": 1.944333396064292e-05, |
| "loss": 0.3426, |
| "step": 1225 |
| }, |
| { |
| "epoch": 2.789533560864619, |
| "grad_norm": 0.43090362034832597, |
| "learning_rate": 1.9411533407085192e-05, |
| "loss": 0.3393, |
| "step": 1226 |
| }, |
| { |
| "epoch": 2.7918088737201368, |
| "grad_norm": 0.42054757043063234, |
| "learning_rate": 1.9379734342501987e-05, |
| "loss": 0.3237, |
| "step": 1227 |
| }, |
| { |
| "epoch": 2.7940841865756543, |
| "grad_norm": 0.38221543475601394, |
| "learning_rate": 1.9347936847353254e-05, |
| "loss": 0.3028, |
| "step": 1228 |
| }, |
| { |
| "epoch": 2.796359499431172, |
| "grad_norm": 0.37763704583230345, |
| "learning_rate": 1.9316141002094995e-05, |
| "loss": 0.3236, |
| "step": 1229 |
| }, |
| { |
| "epoch": 2.7986348122866893, |
| "grad_norm": 0.4206697615376536, |
| "learning_rate": 1.9284346887179016e-05, |
| "loss": 0.3384, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.800910125142207, |
| "grad_norm": 0.3953685229204783, |
| "learning_rate": 1.9252554583052744e-05, |
| "loss": 0.3592, |
| "step": 1231 |
| }, |
| { |
| "epoch": 2.803185437997725, |
| "grad_norm": 0.3915164783946858, |
| "learning_rate": 1.9220764170159045e-05, |
| "loss": 0.3337, |
| "step": 1232 |
| }, |
| { |
| "epoch": 2.8054607508532423, |
| "grad_norm": 0.5187383959053034, |
| "learning_rate": 1.9188975728935965e-05, |
| "loss": 0.3502, |
| "step": 1233 |
| }, |
| { |
| "epoch": 2.80773606370876, |
| "grad_norm": 0.4636595479793326, |
| "learning_rate": 1.9157189339816608e-05, |
| "loss": 0.3574, |
| "step": 1234 |
| }, |
| { |
| "epoch": 2.810011376564278, |
| "grad_norm": 0.3752458006501017, |
| "learning_rate": 1.9125405083228833e-05, |
| "loss": 0.3211, |
| "step": 1235 |
| }, |
| { |
| "epoch": 2.8122866894197953, |
| "grad_norm": 0.422860682847553, |
| "learning_rate": 1.9093623039595144e-05, |
| "loss": 0.3473, |
| "step": 1236 |
| }, |
| { |
| "epoch": 2.814562002275313, |
| "grad_norm": 0.43453944438849534, |
| "learning_rate": 1.906184328933243e-05, |
| "loss": 0.3265, |
| "step": 1237 |
| }, |
| { |
| "epoch": 2.8168373151308304, |
| "grad_norm": 0.449663210415, |
| "learning_rate": 1.9030065912851775e-05, |
| "loss": 0.3485, |
| "step": 1238 |
| }, |
| { |
| "epoch": 2.819112627986348, |
| "grad_norm": 0.45165695123021987, |
| "learning_rate": 1.8998290990558255e-05, |
| "loss": 0.3074, |
| "step": 1239 |
| }, |
| { |
| "epoch": 2.821387940841866, |
| "grad_norm": 0.4446006954201955, |
| "learning_rate": 1.896651860285076e-05, |
| "loss": 0.3645, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.8236632536973834, |
| "grad_norm": 0.39348141345452986, |
| "learning_rate": 1.8934748830121722e-05, |
| "loss": 0.3201, |
| "step": 1241 |
| }, |
| { |
| "epoch": 2.825938566552901, |
| "grad_norm": 0.5341345156314935, |
| "learning_rate": 1.8902981752757004e-05, |
| "loss": 0.3291, |
| "step": 1242 |
| }, |
| { |
| "epoch": 2.828213879408419, |
| "grad_norm": 0.4285735656951308, |
| "learning_rate": 1.887121745113562e-05, |
| "loss": 0.332, |
| "step": 1243 |
| }, |
| { |
| "epoch": 2.8304891922639364, |
| "grad_norm": 0.4229697688971152, |
| "learning_rate": 1.8839456005629558e-05, |
| "loss": 0.3115, |
| "step": 1244 |
| }, |
| { |
| "epoch": 2.832764505119454, |
| "grad_norm": 0.4318299780335706, |
| "learning_rate": 1.8807697496603604e-05, |
| "loss": 0.4012, |
| "step": 1245 |
| }, |
| { |
| "epoch": 2.8350398179749714, |
| "grad_norm": 0.42178408290930636, |
| "learning_rate": 1.8775942004415094e-05, |
| "loss": 0.3323, |
| "step": 1246 |
| }, |
| { |
| "epoch": 2.837315130830489, |
| "grad_norm": 0.3723509665815239, |
| "learning_rate": 1.8744189609413733e-05, |
| "loss": 0.3237, |
| "step": 1247 |
| }, |
| { |
| "epoch": 2.839590443686007, |
| "grad_norm": 0.4305482774531959, |
| "learning_rate": 1.8712440391941396e-05, |
| "loss": 0.3472, |
| "step": 1248 |
| }, |
| { |
| "epoch": 2.8418657565415244, |
| "grad_norm": 0.46366347580937944, |
| "learning_rate": 1.8680694432331914e-05, |
| "loss": 0.3668, |
| "step": 1249 |
| }, |
| { |
| "epoch": 2.8441410693970424, |
| "grad_norm": 0.3705622266403713, |
| "learning_rate": 1.8648951810910878e-05, |
| "loss": 0.3143, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.84641638225256, |
| "grad_norm": 0.49100142288919724, |
| "learning_rate": 1.8617212607995423e-05, |
| "loss": 0.3751, |
| "step": 1251 |
| }, |
| { |
| "epoch": 2.8486916951080774, |
| "grad_norm": 0.3514112339230659, |
| "learning_rate": 1.8585476903894036e-05, |
| "loss": 0.2869, |
| "step": 1252 |
| }, |
| { |
| "epoch": 2.850967007963595, |
| "grad_norm": 0.4722074620404091, |
| "learning_rate": 1.855374477890637e-05, |
| "loss": 0.3468, |
| "step": 1253 |
| }, |
| { |
| "epoch": 2.8532423208191124, |
| "grad_norm": 0.45737612006959516, |
| "learning_rate": 1.8522016313323e-05, |
| "loss": 0.3847, |
| "step": 1254 |
| }, |
| { |
| "epoch": 2.8555176336746304, |
| "grad_norm": 0.4049743949427917, |
| "learning_rate": 1.849029158742525e-05, |
| "loss": 0.3611, |
| "step": 1255 |
| }, |
| { |
| "epoch": 2.857792946530148, |
| "grad_norm": 0.43874858432410946, |
| "learning_rate": 1.845857068148498e-05, |
| "loss": 0.3354, |
| "step": 1256 |
| }, |
| { |
| "epoch": 2.8600682593856654, |
| "grad_norm": 0.40492277366109236, |
| "learning_rate": 1.8426853675764386e-05, |
| "loss": 0.3296, |
| "step": 1257 |
| }, |
| { |
| "epoch": 2.8623435722411834, |
| "grad_norm": 0.3472960988318782, |
| "learning_rate": 1.8395140650515803e-05, |
| "loss": 0.3263, |
| "step": 1258 |
| }, |
| { |
| "epoch": 2.864618885096701, |
| "grad_norm": 0.44332665688391126, |
| "learning_rate": 1.8363431685981476e-05, |
| "loss": 0.331, |
| "step": 1259 |
| }, |
| { |
| "epoch": 2.8668941979522184, |
| "grad_norm": 0.42360608795436905, |
| "learning_rate": 1.8331726862393385e-05, |
| "loss": 0.3614, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.869169510807736, |
| "grad_norm": 0.38736693003836803, |
| "learning_rate": 1.830002625997304e-05, |
| "loss": 0.3335, |
| "step": 1261 |
| }, |
| { |
| "epoch": 2.8714448236632535, |
| "grad_norm": 0.4562383953624614, |
| "learning_rate": 1.8268329958931267e-05, |
| "loss": 0.3251, |
| "step": 1262 |
| }, |
| { |
| "epoch": 2.8737201365187715, |
| "grad_norm": 0.40212087556281695, |
| "learning_rate": 1.823663803946799e-05, |
| "loss": 0.3506, |
| "step": 1263 |
| }, |
| { |
| "epoch": 2.875995449374289, |
| "grad_norm": 0.4729884947349188, |
| "learning_rate": 1.8204950581772073e-05, |
| "loss": 0.3489, |
| "step": 1264 |
| }, |
| { |
| "epoch": 2.8782707622298065, |
| "grad_norm": 0.41992748172255673, |
| "learning_rate": 1.8173267666021075e-05, |
| "loss": 0.3485, |
| "step": 1265 |
| }, |
| { |
| "epoch": 2.8805460750853245, |
| "grad_norm": 0.4043140099911277, |
| "learning_rate": 1.8141589372381073e-05, |
| "loss": 0.3169, |
| "step": 1266 |
| }, |
| { |
| "epoch": 2.882821387940842, |
| "grad_norm": 0.40756018663020205, |
| "learning_rate": 1.8109915781006436e-05, |
| "loss": 0.3203, |
| "step": 1267 |
| }, |
| { |
| "epoch": 2.8850967007963595, |
| "grad_norm": 0.4684773385227538, |
| "learning_rate": 1.8078246972039633e-05, |
| "loss": 0.3073, |
| "step": 1268 |
| }, |
| { |
| "epoch": 2.887372013651877, |
| "grad_norm": 0.3723068919285976, |
| "learning_rate": 1.804658302561106e-05, |
| "loss": 0.3302, |
| "step": 1269 |
| }, |
| { |
| "epoch": 2.8896473265073945, |
| "grad_norm": 0.3673828907740494, |
| "learning_rate": 1.8014924021838774e-05, |
| "loss": 0.3176, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.8919226393629125, |
| "grad_norm": 0.4056625374216179, |
| "learning_rate": 1.7983270040828346e-05, |
| "loss": 0.3295, |
| "step": 1271 |
| }, |
| { |
| "epoch": 2.89419795221843, |
| "grad_norm": 0.32625489285710857, |
| "learning_rate": 1.795162116267264e-05, |
| "loss": 0.3181, |
| "step": 1272 |
| }, |
| { |
| "epoch": 2.8964732650739475, |
| "grad_norm": 0.3576177266546512, |
| "learning_rate": 1.7919977467451593e-05, |
| "loss": 0.3178, |
| "step": 1273 |
| }, |
| { |
| "epoch": 2.8987485779294655, |
| "grad_norm": 0.3681071605577923, |
| "learning_rate": 1.7888339035232047e-05, |
| "loss": 0.3428, |
| "step": 1274 |
| }, |
| { |
| "epoch": 2.901023890784983, |
| "grad_norm": 0.34539372920147526, |
| "learning_rate": 1.785670594606752e-05, |
| "loss": 0.333, |
| "step": 1275 |
| }, |
| { |
| "epoch": 2.9032992036405005, |
| "grad_norm": 0.37170066511097377, |
| "learning_rate": 1.782507827999799e-05, |
| "loss": 0.3254, |
| "step": 1276 |
| }, |
| { |
| "epoch": 2.905574516496018, |
| "grad_norm": 0.39952313728459865, |
| "learning_rate": 1.7793456117049746e-05, |
| "loss": 0.3253, |
| "step": 1277 |
| }, |
| { |
| "epoch": 2.9078498293515356, |
| "grad_norm": 0.354210897652707, |
| "learning_rate": 1.776183953723514e-05, |
| "loss": 0.3317, |
| "step": 1278 |
| }, |
| { |
| "epoch": 2.9101251422070535, |
| "grad_norm": 0.3652645740006678, |
| "learning_rate": 1.773022862055238e-05, |
| "loss": 0.3138, |
| "step": 1279 |
| }, |
| { |
| "epoch": 2.912400455062571, |
| "grad_norm": 0.3888678131084162, |
| "learning_rate": 1.7698623446985378e-05, |
| "loss": 0.3177, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.9146757679180886, |
| "grad_norm": 0.3861332065384473, |
| "learning_rate": 1.7667024096503485e-05, |
| "loss": 0.3341, |
| "step": 1281 |
| }, |
| { |
| "epoch": 2.9169510807736065, |
| "grad_norm": 0.3968878850605964, |
| "learning_rate": 1.7635430649061333e-05, |
| "loss": 0.3119, |
| "step": 1282 |
| }, |
| { |
| "epoch": 2.919226393629124, |
| "grad_norm": 0.3692324698525607, |
| "learning_rate": 1.7603843184598623e-05, |
| "loss": 0.303, |
| "step": 1283 |
| }, |
| { |
| "epoch": 2.9215017064846416, |
| "grad_norm": 0.3809675871957472, |
| "learning_rate": 1.757226178303989e-05, |
| "loss": 0.319, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.923777019340159, |
| "grad_norm": 0.38478746620596943, |
| "learning_rate": 1.754068652429436e-05, |
| "loss": 0.3243, |
| "step": 1285 |
| }, |
| { |
| "epoch": 2.926052332195677, |
| "grad_norm": 0.46139888708407795, |
| "learning_rate": 1.75091174882557e-05, |
| "loss": 0.3146, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.9283276450511946, |
| "grad_norm": 0.39783512322413067, |
| "learning_rate": 1.7477554754801835e-05, |
| "loss": 0.3275, |
| "step": 1287 |
| }, |
| { |
| "epoch": 2.930602957906712, |
| "grad_norm": 0.48084823548951655, |
| "learning_rate": 1.7445998403794744e-05, |
| "loss": 0.3486, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.93287827076223, |
| "grad_norm": 0.40567899003023666, |
| "learning_rate": 1.7414448515080253e-05, |
| "loss": 0.3692, |
| "step": 1289 |
| }, |
| { |
| "epoch": 2.9351535836177476, |
| "grad_norm": 0.383209141100548, |
| "learning_rate": 1.738290516848784e-05, |
| "loss": 0.3181, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.937428896473265, |
| "grad_norm": 0.43877459749694897, |
| "learning_rate": 1.7351368443830438e-05, |
| "loss": 0.3048, |
| "step": 1291 |
| }, |
| { |
| "epoch": 2.9397042093287826, |
| "grad_norm": 0.40759673361135945, |
| "learning_rate": 1.7319838420904194e-05, |
| "loss": 0.3534, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.9419795221843, |
| "grad_norm": 0.39045373617754325, |
| "learning_rate": 1.7288315179488348e-05, |
| "loss": 0.3732, |
| "step": 1293 |
| }, |
| { |
| "epoch": 2.944254835039818, |
| "grad_norm": 0.408840585877605, |
| "learning_rate": 1.725679879934493e-05, |
| "loss": 0.3264, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.9465301478953356, |
| "grad_norm": 0.3816064165282624, |
| "learning_rate": 1.722528936021864e-05, |
| "loss": 0.3316, |
| "step": 1295 |
| }, |
| { |
| "epoch": 2.948805460750853, |
| "grad_norm": 0.40041531131659197, |
| "learning_rate": 1.7193786941836608e-05, |
| "loss": 0.344, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.951080773606371, |
| "grad_norm": 0.41058583751790734, |
| "learning_rate": 1.7162291623908194e-05, |
| "loss": 0.2957, |
| "step": 1297 |
| }, |
| { |
| "epoch": 2.9533560864618886, |
| "grad_norm": 0.3798093592582125, |
| "learning_rate": 1.7130803486124806e-05, |
| "loss": 0.3269, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.955631399317406, |
| "grad_norm": 0.4104075641164096, |
| "learning_rate": 1.7099322608159676e-05, |
| "loss": 0.3102, |
| "step": 1299 |
| }, |
| { |
| "epoch": 2.9579067121729237, |
| "grad_norm": 0.40594869779106296, |
| "learning_rate": 1.7067849069667644e-05, |
| "loss": 0.3479, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.960182025028441, |
| "grad_norm": 0.3826029772595492, |
| "learning_rate": 1.703638295028503e-05, |
| "loss": 0.3491, |
| "step": 1301 |
| }, |
| { |
| "epoch": 2.962457337883959, |
| "grad_norm": 0.3721508444462159, |
| "learning_rate": 1.7004924329629335e-05, |
| "loss": 0.3328, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.9647326507394767, |
| "grad_norm": 0.4020437609324411, |
| "learning_rate": 1.6973473287299114e-05, |
| "loss": 0.3334, |
| "step": 1303 |
| }, |
| { |
| "epoch": 2.967007963594994, |
| "grad_norm": 0.393328296256328, |
| "learning_rate": 1.694202990287373e-05, |
| "loss": 0.307, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.969283276450512, |
| "grad_norm": 0.3841804775756949, |
| "learning_rate": 1.6910594255913177e-05, |
| "loss": 0.3198, |
| "step": 1305 |
| }, |
| { |
| "epoch": 2.9715585893060297, |
| "grad_norm": 0.41379994210505555, |
| "learning_rate": 1.687916642595788e-05, |
| "loss": 0.3159, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.973833902161547, |
| "grad_norm": 0.3975357129785672, |
| "learning_rate": 1.6847746492528467e-05, |
| "loss": 0.3228, |
| "step": 1307 |
| }, |
| { |
| "epoch": 2.9761092150170647, |
| "grad_norm": 0.35636419669271596, |
| "learning_rate": 1.6816334535125583e-05, |
| "loss": 0.3512, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.9783845278725822, |
| "grad_norm": 0.38436643906411494, |
| "learning_rate": 1.6784930633229734e-05, |
| "loss": 0.3247, |
| "step": 1309 |
| }, |
| { |
| "epoch": 2.9806598407281, |
| "grad_norm": 0.339825615244889, |
| "learning_rate": 1.6753534866300983e-05, |
| "loss": 0.3105, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.9829351535836177, |
| "grad_norm": 0.36635896773075227, |
| "learning_rate": 1.6722147313778855e-05, |
| "loss": 0.2991, |
| "step": 1311 |
| }, |
| { |
| "epoch": 2.9852104664391352, |
| "grad_norm": 0.3696431495703968, |
| "learning_rate": 1.669076805508207e-05, |
| "loss": 0.324, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.987485779294653, |
| "grad_norm": 0.3931605856930635, |
| "learning_rate": 1.6659397169608363e-05, |
| "loss": 0.333, |
| "step": 1313 |
| }, |
| { |
| "epoch": 2.9897610921501707, |
| "grad_norm": 0.4032923555871362, |
| "learning_rate": 1.6628034736734293e-05, |
| "loss": 0.3486, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.9920364050056882, |
| "grad_norm": 0.37102792124122086, |
| "learning_rate": 1.6596680835815024e-05, |
| "loss": 0.325, |
| "step": 1315 |
| }, |
| { |
| "epoch": 2.9943117178612058, |
| "grad_norm": 0.3843008555876691, |
| "learning_rate": 1.6565335546184118e-05, |
| "loss": 0.3367, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.9965870307167233, |
| "grad_norm": 0.3416313482353009, |
| "learning_rate": 1.6533998947153393e-05, |
| "loss": 0.3025, |
| "step": 1317 |
| }, |
| { |
| "epoch": 2.9988623435722412, |
| "grad_norm": 0.4127951133201824, |
| "learning_rate": 1.650267111801261e-05, |
| "loss": 0.3555, |
| "step": 1318 |
| }, |
| { |
| "epoch": 3.0011376564277588, |
| "grad_norm": 0.6665390187415182, |
| "learning_rate": 1.6471352138029414e-05, |
| "loss": 0.388, |
| "step": 1319 |
| }, |
| { |
| "epoch": 3.0034129692832763, |
| "grad_norm": 0.5122750406971657, |
| "learning_rate": 1.6440042086449e-05, |
| "loss": 0.2487, |
| "step": 1320 |
| }, |
| { |
| "epoch": 3.0056882821387942, |
| "grad_norm": 0.42385291143597537, |
| "learning_rate": 1.6408741042493996e-05, |
| "loss": 0.2772, |
| "step": 1321 |
| }, |
| { |
| "epoch": 3.0079635949943118, |
| "grad_norm": 0.650654703167525, |
| "learning_rate": 1.6377449085364246e-05, |
| "loss": 0.2536, |
| "step": 1322 |
| }, |
| { |
| "epoch": 3.0102389078498293, |
| "grad_norm": 0.4489795360940919, |
| "learning_rate": 1.6346166294236593e-05, |
| "loss": 0.2202, |
| "step": 1323 |
| }, |
| { |
| "epoch": 3.012514220705347, |
| "grad_norm": 0.49251096342350414, |
| "learning_rate": 1.631489274826467e-05, |
| "loss": 0.2449, |
| "step": 1324 |
| }, |
| { |
| "epoch": 3.0147895335608648, |
| "grad_norm": 0.5871177283470034, |
| "learning_rate": 1.628362852657876e-05, |
| "loss": 0.2506, |
| "step": 1325 |
| }, |
| { |
| "epoch": 3.0170648464163823, |
| "grad_norm": 0.45158096813826065, |
| "learning_rate": 1.6252373708285505e-05, |
| "loss": 0.2425, |
| "step": 1326 |
| }, |
| { |
| "epoch": 3.0193401592719, |
| "grad_norm": 0.395446791937916, |
| "learning_rate": 1.62211283724678e-05, |
| "loss": 0.2401, |
| "step": 1327 |
| }, |
| { |
| "epoch": 3.0216154721274173, |
| "grad_norm": 0.5740116419937004, |
| "learning_rate": 1.6189892598184504e-05, |
| "loss": 0.2741, |
| "step": 1328 |
| }, |
| { |
| "epoch": 3.0238907849829353, |
| "grad_norm": 0.47098133857587854, |
| "learning_rate": 1.615866646447031e-05, |
| "loss": 0.2535, |
| "step": 1329 |
| }, |
| { |
| "epoch": 3.026166097838453, |
| "grad_norm": 0.4120517637411914, |
| "learning_rate": 1.612745005033551e-05, |
| "loss": 0.2304, |
| "step": 1330 |
| }, |
| { |
| "epoch": 3.0284414106939703, |
| "grad_norm": 0.4875077502609603, |
| "learning_rate": 1.6096243434765808e-05, |
| "loss": 0.2388, |
| "step": 1331 |
| }, |
| { |
| "epoch": 3.030716723549488, |
| "grad_norm": 0.3888715470441953, |
| "learning_rate": 1.6065046696722094e-05, |
| "loss": 0.1889, |
| "step": 1332 |
| }, |
| { |
| "epoch": 3.032992036405006, |
| "grad_norm": 0.4404723669577395, |
| "learning_rate": 1.603385991514031e-05, |
| "loss": 0.3357, |
| "step": 1333 |
| }, |
| { |
| "epoch": 3.0352673492605233, |
| "grad_norm": 0.3986029461724571, |
| "learning_rate": 1.600268316893114e-05, |
| "loss": 0.2574, |
| "step": 1334 |
| }, |
| { |
| "epoch": 3.037542662116041, |
| "grad_norm": 0.37205035189042457, |
| "learning_rate": 1.597151653697995e-05, |
| "loss": 0.2527, |
| "step": 1335 |
| }, |
| { |
| "epoch": 3.039817974971559, |
| "grad_norm": 0.3889281620378717, |
| "learning_rate": 1.594036009814646e-05, |
| "loss": 0.2165, |
| "step": 1336 |
| }, |
| { |
| "epoch": 3.0420932878270763, |
| "grad_norm": 0.4038841594833209, |
| "learning_rate": 1.590921393126461e-05, |
| "loss": 0.2385, |
| "step": 1337 |
| }, |
| { |
| "epoch": 3.044368600682594, |
| "grad_norm": 0.3538892185732872, |
| "learning_rate": 1.5878078115142372e-05, |
| "loss": 0.2484, |
| "step": 1338 |
| }, |
| { |
| "epoch": 3.0466439135381114, |
| "grad_norm": 0.4581473851645579, |
| "learning_rate": 1.5846952728561496e-05, |
| "loss": 0.2906, |
| "step": 1339 |
| }, |
| { |
| "epoch": 3.0489192263936293, |
| "grad_norm": 0.3344402981437661, |
| "learning_rate": 1.581583785027736e-05, |
| "loss": 0.2073, |
| "step": 1340 |
| }, |
| { |
| "epoch": 3.051194539249147, |
| "grad_norm": 0.4125810966065969, |
| "learning_rate": 1.5784733559018762e-05, |
| "loss": 0.2686, |
| "step": 1341 |
| }, |
| { |
| "epoch": 3.0534698521046644, |
| "grad_norm": 0.37740963579799397, |
| "learning_rate": 1.5753639933487684e-05, |
| "loss": 0.2303, |
| "step": 1342 |
| }, |
| { |
| "epoch": 3.055745164960182, |
| "grad_norm": 0.3212141400616197, |
| "learning_rate": 1.572255705235916e-05, |
| "loss": 0.203, |
| "step": 1343 |
| }, |
| { |
| "epoch": 3.0580204778157, |
| "grad_norm": 0.3867866079306578, |
| "learning_rate": 1.5691484994280996e-05, |
| "loss": 0.2369, |
| "step": 1344 |
| }, |
| { |
| "epoch": 3.0602957906712174, |
| "grad_norm": 0.34234519810414454, |
| "learning_rate": 1.566042383787364e-05, |
| "loss": 0.2245, |
| "step": 1345 |
| }, |
| { |
| "epoch": 3.062571103526735, |
| "grad_norm": 0.37375415604623985, |
| "learning_rate": 1.562937366172995e-05, |
| "loss": 0.2603, |
| "step": 1346 |
| }, |
| { |
| "epoch": 3.0648464163822524, |
| "grad_norm": 0.3658935669421099, |
| "learning_rate": 1.5598334544415e-05, |
| "loss": 0.2506, |
| "step": 1347 |
| }, |
| { |
| "epoch": 3.0671217292377704, |
| "grad_norm": 0.3435077709797847, |
| "learning_rate": 1.556730656446588e-05, |
| "loss": 0.2396, |
| "step": 1348 |
| }, |
| { |
| "epoch": 3.069397042093288, |
| "grad_norm": 0.3594998561611038, |
| "learning_rate": 1.5536289800391504e-05, |
| "loss": 0.2223, |
| "step": 1349 |
| }, |
| { |
| "epoch": 3.0716723549488054, |
| "grad_norm": 0.34511347334216075, |
| "learning_rate": 1.550528433067239e-05, |
| "loss": 0.2382, |
| "step": 1350 |
| }, |
| { |
| "epoch": 3.073947667804323, |
| "grad_norm": 0.3397290693234507, |
| "learning_rate": 1.5474290233760522e-05, |
| "loss": 0.2775, |
| "step": 1351 |
| }, |
| { |
| "epoch": 3.076222980659841, |
| "grad_norm": 0.36915212461759794, |
| "learning_rate": 1.544330758807906e-05, |
| "loss": 0.243, |
| "step": 1352 |
| }, |
| { |
| "epoch": 3.0784982935153584, |
| "grad_norm": 0.3343502303670519, |
| "learning_rate": 1.5412336472022198e-05, |
| "loss": 0.2081, |
| "step": 1353 |
| }, |
| { |
| "epoch": 3.080773606370876, |
| "grad_norm": 0.3612524770320426, |
| "learning_rate": 1.5381376963954986e-05, |
| "loss": 0.2513, |
| "step": 1354 |
| }, |
| { |
| "epoch": 3.0830489192263935, |
| "grad_norm": 0.3609904278246174, |
| "learning_rate": 1.5350429142213075e-05, |
| "loss": 0.2527, |
| "step": 1355 |
| }, |
| { |
| "epoch": 3.0853242320819114, |
| "grad_norm": 0.34163028966822784, |
| "learning_rate": 1.531949308510255e-05, |
| "loss": 0.2375, |
| "step": 1356 |
| }, |
| { |
| "epoch": 3.087599544937429, |
| "grad_norm": 0.43019417255945536, |
| "learning_rate": 1.5288568870899754e-05, |
| "loss": 0.2815, |
| "step": 1357 |
| }, |
| { |
| "epoch": 3.0898748577929465, |
| "grad_norm": 0.37950466145542605, |
| "learning_rate": 1.5257656577851014e-05, |
| "loss": 0.2347, |
| "step": 1358 |
| }, |
| { |
| "epoch": 3.092150170648464, |
| "grad_norm": 0.6184686430893538, |
| "learning_rate": 1.5226756284172555e-05, |
| "loss": 0.2191, |
| "step": 1359 |
| }, |
| { |
| "epoch": 3.094425483503982, |
| "grad_norm": 0.34092582499384705, |
| "learning_rate": 1.5195868068050185e-05, |
| "loss": 0.238, |
| "step": 1360 |
| }, |
| { |
| "epoch": 3.0967007963594995, |
| "grad_norm": 0.37356227814658555, |
| "learning_rate": 1.5164992007639183e-05, |
| "loss": 0.2783, |
| "step": 1361 |
| }, |
| { |
| "epoch": 3.098976109215017, |
| "grad_norm": 0.33800457672474493, |
| "learning_rate": 1.513412818106407e-05, |
| "loss": 0.2092, |
| "step": 1362 |
| }, |
| { |
| "epoch": 3.1012514220705345, |
| "grad_norm": 0.3345134707481074, |
| "learning_rate": 1.5103276666418404e-05, |
| "loss": 0.2522, |
| "step": 1363 |
| }, |
| { |
| "epoch": 3.1035267349260525, |
| "grad_norm": 0.35279615944744536, |
| "learning_rate": 1.5072437541764585e-05, |
| "loss": 0.201, |
| "step": 1364 |
| }, |
| { |
| "epoch": 3.10580204778157, |
| "grad_norm": 0.390946887337508, |
| "learning_rate": 1.5041610885133681e-05, |
| "loss": 0.293, |
| "step": 1365 |
| }, |
| { |
| "epoch": 3.1080773606370875, |
| "grad_norm": 0.33124360951531784, |
| "learning_rate": 1.5010796774525197e-05, |
| "loss": 0.2424, |
| "step": 1366 |
| }, |
| { |
| "epoch": 3.110352673492605, |
| "grad_norm": 0.3687294463727865, |
| "learning_rate": 1.497999528790691e-05, |
| "loss": 0.25, |
| "step": 1367 |
| }, |
| { |
| "epoch": 3.112627986348123, |
| "grad_norm": 0.3592837699433852, |
| "learning_rate": 1.4949206503214631e-05, |
| "loss": 0.2348, |
| "step": 1368 |
| }, |
| { |
| "epoch": 3.1149032992036405, |
| "grad_norm": 0.32555904443408795, |
| "learning_rate": 1.4918430498352043e-05, |
| "loss": 0.2153, |
| "step": 1369 |
| }, |
| { |
| "epoch": 3.117178612059158, |
| "grad_norm": 0.3604675192479075, |
| "learning_rate": 1.4887667351190508e-05, |
| "loss": 0.2582, |
| "step": 1370 |
| }, |
| { |
| "epoch": 3.1194539249146755, |
| "grad_norm": 0.34282164163989276, |
| "learning_rate": 1.4856917139568836e-05, |
| "loss": 0.2364, |
| "step": 1371 |
| }, |
| { |
| "epoch": 3.1217292377701935, |
| "grad_norm": 0.3370745107292784, |
| "learning_rate": 1.4826179941293104e-05, |
| "loss": 0.2358, |
| "step": 1372 |
| }, |
| { |
| "epoch": 3.124004550625711, |
| "grad_norm": 0.3347171832918944, |
| "learning_rate": 1.479545583413648e-05, |
| "loss": 0.2577, |
| "step": 1373 |
| }, |
| { |
| "epoch": 3.1262798634812285, |
| "grad_norm": 0.3828614334838476, |
| "learning_rate": 1.4764744895838989e-05, |
| "loss": 0.227, |
| "step": 1374 |
| }, |
| { |
| "epoch": 3.1285551763367465, |
| "grad_norm": 0.3428856010232504, |
| "learning_rate": 1.4734047204107358e-05, |
| "loss": 0.2004, |
| "step": 1375 |
| }, |
| { |
| "epoch": 3.130830489192264, |
| "grad_norm": 0.369809547171071, |
| "learning_rate": 1.4703362836614767e-05, |
| "loss": 0.2382, |
| "step": 1376 |
| }, |
| { |
| "epoch": 3.1331058020477816, |
| "grad_norm": 0.35765496336791786, |
| "learning_rate": 1.4672691871000706e-05, |
| "loss": 0.2573, |
| "step": 1377 |
| }, |
| { |
| "epoch": 3.135381114903299, |
| "grad_norm": 0.35443189050242446, |
| "learning_rate": 1.4642034384870747e-05, |
| "loss": 0.2508, |
| "step": 1378 |
| }, |
| { |
| "epoch": 3.137656427758817, |
| "grad_norm": 0.405676939837812, |
| "learning_rate": 1.461139045579635e-05, |
| "loss": 0.2552, |
| "step": 1379 |
| }, |
| { |
| "epoch": 3.1399317406143346, |
| "grad_norm": 0.31010535796515454, |
| "learning_rate": 1.458076016131468e-05, |
| "loss": 0.2213, |
| "step": 1380 |
| }, |
| { |
| "epoch": 3.142207053469852, |
| "grad_norm": 0.36707703136955583, |
| "learning_rate": 1.4550143578928406e-05, |
| "loss": 0.2364, |
| "step": 1381 |
| }, |
| { |
| "epoch": 3.1444823663253696, |
| "grad_norm": 0.4133050899168252, |
| "learning_rate": 1.4519540786105486e-05, |
| "loss": 0.2687, |
| "step": 1382 |
| }, |
| { |
| "epoch": 3.1467576791808876, |
| "grad_norm": 0.3381717821117065, |
| "learning_rate": 1.448895186027901e-05, |
| "loss": 0.2262, |
| "step": 1383 |
| }, |
| { |
| "epoch": 3.149032992036405, |
| "grad_norm": 0.38820845950823396, |
| "learning_rate": 1.4458376878846956e-05, |
| "loss": 0.2675, |
| "step": 1384 |
| }, |
| { |
| "epoch": 3.1513083048919226, |
| "grad_norm": 0.34061659959440704, |
| "learning_rate": 1.442781591917203e-05, |
| "loss": 0.2155, |
| "step": 1385 |
| }, |
| { |
| "epoch": 3.15358361774744, |
| "grad_norm": 0.3603746448529638, |
| "learning_rate": 1.4397269058581467e-05, |
| "loss": 0.2396, |
| "step": 1386 |
| }, |
| { |
| "epoch": 3.155858930602958, |
| "grad_norm": 0.3534849537926216, |
| "learning_rate": 1.4366736374366817e-05, |
| "loss": 0.2371, |
| "step": 1387 |
| }, |
| { |
| "epoch": 3.1581342434584756, |
| "grad_norm": 0.40457147006129407, |
| "learning_rate": 1.4336217943783761e-05, |
| "loss": 0.2599, |
| "step": 1388 |
| }, |
| { |
| "epoch": 3.160409556313993, |
| "grad_norm": 0.3666157730553102, |
| "learning_rate": 1.4305713844051925e-05, |
| "loss": 0.2114, |
| "step": 1389 |
| }, |
| { |
| "epoch": 3.1626848691695106, |
| "grad_norm": 0.3294560042638884, |
| "learning_rate": 1.4275224152354658e-05, |
| "loss": 0.2038, |
| "step": 1390 |
| }, |
| { |
| "epoch": 3.1649601820250286, |
| "grad_norm": 0.3631745191632101, |
| "learning_rate": 1.4244748945838869e-05, |
| "loss": 0.2696, |
| "step": 1391 |
| }, |
| { |
| "epoch": 3.167235494880546, |
| "grad_norm": 0.37962273786727296, |
| "learning_rate": 1.4214288301614806e-05, |
| "loss": 0.2634, |
| "step": 1392 |
| }, |
| { |
| "epoch": 3.1695108077360636, |
| "grad_norm": 0.35320895287602805, |
| "learning_rate": 1.4183842296755865e-05, |
| "loss": 0.2376, |
| "step": 1393 |
| }, |
| { |
| "epoch": 3.171786120591581, |
| "grad_norm": 0.3259971048184194, |
| "learning_rate": 1.4153411008298416e-05, |
| "loss": 0.2567, |
| "step": 1394 |
| }, |
| { |
| "epoch": 3.174061433447099, |
| "grad_norm": 0.35611087287793414, |
| "learning_rate": 1.4122994513241587e-05, |
| "loss": 0.2534, |
| "step": 1395 |
| }, |
| { |
| "epoch": 3.1763367463026166, |
| "grad_norm": 0.3148922392334282, |
| "learning_rate": 1.4092592888547061e-05, |
| "loss": 0.2313, |
| "step": 1396 |
| }, |
| { |
| "epoch": 3.178612059158134, |
| "grad_norm": 0.326721621928511, |
| "learning_rate": 1.4062206211138917e-05, |
| "loss": 0.2369, |
| "step": 1397 |
| }, |
| { |
| "epoch": 3.1808873720136517, |
| "grad_norm": 0.3471825310924632, |
| "learning_rate": 1.4031834557903399e-05, |
| "loss": 0.2141, |
| "step": 1398 |
| }, |
| { |
| "epoch": 3.1831626848691696, |
| "grad_norm": 0.3290346572256407, |
| "learning_rate": 1.4001478005688744e-05, |
| "loss": 0.2045, |
| "step": 1399 |
| }, |
| { |
| "epoch": 3.185437997724687, |
| "grad_norm": 0.36785750709054627, |
| "learning_rate": 1.3971136631304978e-05, |
| "loss": 0.24, |
| "step": 1400 |
| }, |
| { |
| "epoch": 3.1877133105802047, |
| "grad_norm": 0.3360590731380939, |
| "learning_rate": 1.3940810511523708e-05, |
| "loss": 0.2346, |
| "step": 1401 |
| }, |
| { |
| "epoch": 3.189988623435722, |
| "grad_norm": 0.33235200603477055, |
| "learning_rate": 1.3910499723077965e-05, |
| "loss": 0.213, |
| "step": 1402 |
| }, |
| { |
| "epoch": 3.19226393629124, |
| "grad_norm": 0.3554694875952007, |
| "learning_rate": 1.3880204342661979e-05, |
| "loss": 0.2308, |
| "step": 1403 |
| }, |
| { |
| "epoch": 3.1945392491467577, |
| "grad_norm": 0.3368960682666124, |
| "learning_rate": 1.3849924446930985e-05, |
| "loss": 0.2642, |
| "step": 1404 |
| }, |
| { |
| "epoch": 3.196814562002275, |
| "grad_norm": 0.34050458371850023, |
| "learning_rate": 1.3819660112501054e-05, |
| "loss": 0.271, |
| "step": 1405 |
| }, |
| { |
| "epoch": 3.199089874857793, |
| "grad_norm": 0.3240902133031251, |
| "learning_rate": 1.3789411415948868e-05, |
| "loss": 0.2089, |
| "step": 1406 |
| }, |
| { |
| "epoch": 3.2013651877133107, |
| "grad_norm": 0.34732714520787034, |
| "learning_rate": 1.3759178433811554e-05, |
| "loss": 0.2398, |
| "step": 1407 |
| }, |
| { |
| "epoch": 3.203640500568828, |
| "grad_norm": 0.3619506043549804, |
| "learning_rate": 1.3728961242586474e-05, |
| "loss": 0.2597, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.2059158134243457, |
| "grad_norm": 0.32982359532241645, |
| "learning_rate": 1.3698759918731018e-05, |
| "loss": 0.2488, |
| "step": 1409 |
| }, |
| { |
| "epoch": 3.2081911262798632, |
| "grad_norm": 0.36132873965131257, |
| "learning_rate": 1.3668574538662451e-05, |
| "loss": 0.2369, |
| "step": 1410 |
| }, |
| { |
| "epoch": 3.210466439135381, |
| "grad_norm": 0.34382437372925284, |
| "learning_rate": 1.3638405178757682e-05, |
| "loss": 0.2507, |
| "step": 1411 |
| }, |
| { |
| "epoch": 3.2127417519908987, |
| "grad_norm": 0.33668161325420715, |
| "learning_rate": 1.3608251915353092e-05, |
| "loss": 0.2482, |
| "step": 1412 |
| }, |
| { |
| "epoch": 3.2150170648464163, |
| "grad_norm": 0.3340776386733362, |
| "learning_rate": 1.357811482474433e-05, |
| "loss": 0.2056, |
| "step": 1413 |
| }, |
| { |
| "epoch": 3.217292377701934, |
| "grad_norm": 0.35745487006805743, |
| "learning_rate": 1.3547993983186122e-05, |
| "loss": 0.2439, |
| "step": 1414 |
| }, |
| { |
| "epoch": 3.2195676905574517, |
| "grad_norm": 0.32125462699455243, |
| "learning_rate": 1.3517889466892085e-05, |
| "loss": 0.2188, |
| "step": 1415 |
| }, |
| { |
| "epoch": 3.2218430034129693, |
| "grad_norm": 0.31510471304694576, |
| "learning_rate": 1.348780135203453e-05, |
| "loss": 0.2128, |
| "step": 1416 |
| }, |
| { |
| "epoch": 3.2241183162684868, |
| "grad_norm": 0.3405797577146921, |
| "learning_rate": 1.3457729714744247e-05, |
| "loss": 0.2204, |
| "step": 1417 |
| }, |
| { |
| "epoch": 3.2263936291240047, |
| "grad_norm": 0.3435449909933618, |
| "learning_rate": 1.3427674631110374e-05, |
| "loss": 0.2549, |
| "step": 1418 |
| }, |
| { |
| "epoch": 3.2286689419795223, |
| "grad_norm": 0.368268378202336, |
| "learning_rate": 1.3397636177180125e-05, |
| "loss": 0.2903, |
| "step": 1419 |
| }, |
| { |
| "epoch": 3.2309442548350398, |
| "grad_norm": 0.292427137855411, |
| "learning_rate": 1.3367614428958646e-05, |
| "loss": 0.1921, |
| "step": 1420 |
| }, |
| { |
| "epoch": 3.2332195676905573, |
| "grad_norm": 0.38271334568666937, |
| "learning_rate": 1.3337609462408834e-05, |
| "loss": 0.2656, |
| "step": 1421 |
| }, |
| { |
| "epoch": 3.2354948805460753, |
| "grad_norm": 0.31727091050560885, |
| "learning_rate": 1.3307621353451096e-05, |
| "loss": 0.231, |
| "step": 1422 |
| }, |
| { |
| "epoch": 3.2377701934015928, |
| "grad_norm": 0.36004182056870804, |
| "learning_rate": 1.3277650177963208e-05, |
| "loss": 0.2786, |
| "step": 1423 |
| }, |
| { |
| "epoch": 3.2400455062571103, |
| "grad_norm": 0.31907622977065264, |
| "learning_rate": 1.3247696011780091e-05, |
| "loss": 0.2327, |
| "step": 1424 |
| }, |
| { |
| "epoch": 3.242320819112628, |
| "grad_norm": 0.33736643952931383, |
| "learning_rate": 1.3217758930693608e-05, |
| "loss": 0.2309, |
| "step": 1425 |
| }, |
| { |
| "epoch": 3.244596131968146, |
| "grad_norm": 0.31832255649693225, |
| "learning_rate": 1.3187839010452437e-05, |
| "loss": 0.2307, |
| "step": 1426 |
| }, |
| { |
| "epoch": 3.2468714448236633, |
| "grad_norm": 0.3593354547706998, |
| "learning_rate": 1.3157936326761789e-05, |
| "loss": 0.2506, |
| "step": 1427 |
| }, |
| { |
| "epoch": 3.249146757679181, |
| "grad_norm": 0.33616548685144265, |
| "learning_rate": 1.312805095528328e-05, |
| "loss": 0.2205, |
| "step": 1428 |
| }, |
| { |
| "epoch": 3.2514220705346983, |
| "grad_norm": 0.3654579069097713, |
| "learning_rate": 1.3098182971634738e-05, |
| "loss": 0.2531, |
| "step": 1429 |
| }, |
| { |
| "epoch": 3.2536973833902163, |
| "grad_norm": 0.345959063129829, |
| "learning_rate": 1.3068332451389969e-05, |
| "loss": 0.2346, |
| "step": 1430 |
| }, |
| { |
| "epoch": 3.255972696245734, |
| "grad_norm": 0.3661676468618493, |
| "learning_rate": 1.3038499470078611e-05, |
| "loss": 0.2536, |
| "step": 1431 |
| }, |
| { |
| "epoch": 3.2582480091012513, |
| "grad_norm": 0.3420232850373675, |
| "learning_rate": 1.3008684103185915e-05, |
| "loss": 0.25, |
| "step": 1432 |
| }, |
| { |
| "epoch": 3.260523321956769, |
| "grad_norm": 0.3506690952818245, |
| "learning_rate": 1.2978886426152549e-05, |
| "loss": 0.2198, |
| "step": 1433 |
| }, |
| { |
| "epoch": 3.262798634812287, |
| "grad_norm": 0.35765375060393567, |
| "learning_rate": 1.2949106514374458e-05, |
| "loss": 0.2154, |
| "step": 1434 |
| }, |
| { |
| "epoch": 3.2650739476678043, |
| "grad_norm": 0.3514554551432673, |
| "learning_rate": 1.2919344443202602e-05, |
| "loss": 0.2188, |
| "step": 1435 |
| }, |
| { |
| "epoch": 3.267349260523322, |
| "grad_norm": 0.33219201290281225, |
| "learning_rate": 1.2889600287942809e-05, |
| "loss": 0.2124, |
| "step": 1436 |
| }, |
| { |
| "epoch": 3.26962457337884, |
| "grad_norm": 0.3547778577433831, |
| "learning_rate": 1.285987412385559e-05, |
| "loss": 0.2386, |
| "step": 1437 |
| }, |
| { |
| "epoch": 3.2718998862343573, |
| "grad_norm": 0.374012599713748, |
| "learning_rate": 1.2830166026155908e-05, |
| "loss": 0.2844, |
| "step": 1438 |
| }, |
| { |
| "epoch": 3.274175199089875, |
| "grad_norm": 0.32390850806717336, |
| "learning_rate": 1.2800476070013038e-05, |
| "loss": 0.2303, |
| "step": 1439 |
| }, |
| { |
| "epoch": 3.2764505119453924, |
| "grad_norm": 0.31711858818464106, |
| "learning_rate": 1.277080433055034e-05, |
| "loss": 0.2287, |
| "step": 1440 |
| }, |
| { |
| "epoch": 3.27872582480091, |
| "grad_norm": 0.30594860363235177, |
| "learning_rate": 1.2741150882845066e-05, |
| "loss": 0.2016, |
| "step": 1441 |
| }, |
| { |
| "epoch": 3.281001137656428, |
| "grad_norm": 0.35211838532005957, |
| "learning_rate": 1.2711515801928229e-05, |
| "loss": 0.2915, |
| "step": 1442 |
| }, |
| { |
| "epoch": 3.2832764505119454, |
| "grad_norm": 0.3070214436924776, |
| "learning_rate": 1.2681899162784318e-05, |
| "loss": 0.2281, |
| "step": 1443 |
| }, |
| { |
| "epoch": 3.285551763367463, |
| "grad_norm": 0.34635081909766463, |
| "learning_rate": 1.2652301040351191e-05, |
| "loss": 0.2296, |
| "step": 1444 |
| }, |
| { |
| "epoch": 3.287827076222981, |
| "grad_norm": 0.3643612010569894, |
| "learning_rate": 1.2622721509519846e-05, |
| "loss": 0.2802, |
| "step": 1445 |
| }, |
| { |
| "epoch": 3.2901023890784984, |
| "grad_norm": 0.3387586387244522, |
| "learning_rate": 1.2593160645134237e-05, |
| "loss": 0.2057, |
| "step": 1446 |
| }, |
| { |
| "epoch": 3.292377701934016, |
| "grad_norm": 0.35441126738561124, |
| "learning_rate": 1.2563618521991093e-05, |
| "loss": 0.2627, |
| "step": 1447 |
| }, |
| { |
| "epoch": 3.2946530147895334, |
| "grad_norm": 0.3270523514329843, |
| "learning_rate": 1.2534095214839721e-05, |
| "loss": 0.2328, |
| "step": 1448 |
| }, |
| { |
| "epoch": 3.296928327645051, |
| "grad_norm": 0.3485054867825353, |
| "learning_rate": 1.2504590798381799e-05, |
| "loss": 0.2654, |
| "step": 1449 |
| }, |
| { |
| "epoch": 3.299203640500569, |
| "grad_norm": 0.33361561605989926, |
| "learning_rate": 1.247510534727125e-05, |
| "loss": 0.2233, |
| "step": 1450 |
| }, |
| { |
| "epoch": 3.3014789533560864, |
| "grad_norm": 0.3332124162613073, |
| "learning_rate": 1.2445638936113968e-05, |
| "loss": 0.254, |
| "step": 1451 |
| }, |
| { |
| "epoch": 3.303754266211604, |
| "grad_norm": 0.33396306209540555, |
| "learning_rate": 1.2416191639467683e-05, |
| "loss": 0.2813, |
| "step": 1452 |
| }, |
| { |
| "epoch": 3.306029579067122, |
| "grad_norm": 0.3459951135041794, |
| "learning_rate": 1.2386763531841776e-05, |
| "loss": 0.2358, |
| "step": 1453 |
| }, |
| { |
| "epoch": 3.3083048919226394, |
| "grad_norm": 0.33833889844367776, |
| "learning_rate": 1.2357354687697055e-05, |
| "loss": 0.2552, |
| "step": 1454 |
| }, |
| { |
| "epoch": 3.310580204778157, |
| "grad_norm": 0.3826153444150679, |
| "learning_rate": 1.2327965181445593e-05, |
| "loss": 0.2411, |
| "step": 1455 |
| }, |
| { |
| "epoch": 3.3128555176336745, |
| "grad_norm": 0.3267318396869323, |
| "learning_rate": 1.2298595087450549e-05, |
| "loss": 0.2396, |
| "step": 1456 |
| }, |
| { |
| "epoch": 3.3151308304891924, |
| "grad_norm": 0.3728421172188106, |
| "learning_rate": 1.2269244480025919e-05, |
| "loss": 0.2389, |
| "step": 1457 |
| }, |
| { |
| "epoch": 3.31740614334471, |
| "grad_norm": 0.34763878883093824, |
| "learning_rate": 1.223991343343646e-05, |
| "loss": 0.2276, |
| "step": 1458 |
| }, |
| { |
| "epoch": 3.3196814562002275, |
| "grad_norm": 0.338468563084109, |
| "learning_rate": 1.2210602021897369e-05, |
| "loss": 0.233, |
| "step": 1459 |
| }, |
| { |
| "epoch": 3.321956769055745, |
| "grad_norm": 0.3323678746338327, |
| "learning_rate": 1.21813103195742e-05, |
| "loss": 0.2561, |
| "step": 1460 |
| }, |
| { |
| "epoch": 3.324232081911263, |
| "grad_norm": 0.3318887505578868, |
| "learning_rate": 1.2152038400582628e-05, |
| "loss": 0.254, |
| "step": 1461 |
| }, |
| { |
| "epoch": 3.3265073947667805, |
| "grad_norm": 0.3284576201359955, |
| "learning_rate": 1.2122786338988268e-05, |
| "loss": 0.2174, |
| "step": 1462 |
| }, |
| { |
| "epoch": 3.328782707622298, |
| "grad_norm": 0.3213797717013886, |
| "learning_rate": 1.20935542088065e-05, |
| "loss": 0.253, |
| "step": 1463 |
| }, |
| { |
| "epoch": 3.3310580204778155, |
| "grad_norm": 0.3461505829660388, |
| "learning_rate": 1.2064342084002264e-05, |
| "loss": 0.2435, |
| "step": 1464 |
| }, |
| { |
| "epoch": 3.3333333333333335, |
| "grad_norm": 0.352184817284991, |
| "learning_rate": 1.203515003848987e-05, |
| "loss": 0.2267, |
| "step": 1465 |
| }, |
| { |
| "epoch": 3.335608646188851, |
| "grad_norm": 0.32028901572619467, |
| "learning_rate": 1.2005978146132855e-05, |
| "loss": 0.2434, |
| "step": 1466 |
| }, |
| { |
| "epoch": 3.3378839590443685, |
| "grad_norm": 0.2982423511962225, |
| "learning_rate": 1.1976826480743729e-05, |
| "loss": 0.2111, |
| "step": 1467 |
| }, |
| { |
| "epoch": 3.3401592718998865, |
| "grad_norm": 0.3770540532242069, |
| "learning_rate": 1.1947695116083835e-05, |
| "loss": 0.278, |
| "step": 1468 |
| }, |
| { |
| "epoch": 3.342434584755404, |
| "grad_norm": 0.3281814863566095, |
| "learning_rate": 1.1918584125863163e-05, |
| "loss": 0.2202, |
| "step": 1469 |
| }, |
| { |
| "epoch": 3.3447098976109215, |
| "grad_norm": 0.3388970029576235, |
| "learning_rate": 1.1889493583740124e-05, |
| "loss": 0.2699, |
| "step": 1470 |
| }, |
| { |
| "epoch": 3.346985210466439, |
| "grad_norm": 0.33285749805546927, |
| "learning_rate": 1.1860423563321417e-05, |
| "loss": 0.2385, |
| "step": 1471 |
| }, |
| { |
| "epoch": 3.3492605233219566, |
| "grad_norm": 0.37062291383489476, |
| "learning_rate": 1.18313741381618e-05, |
| "loss": 0.2523, |
| "step": 1472 |
| }, |
| { |
| "epoch": 3.3515358361774745, |
| "grad_norm": 0.304526701299871, |
| "learning_rate": 1.1802345381763918e-05, |
| "loss": 0.2197, |
| "step": 1473 |
| }, |
| { |
| "epoch": 3.353811149032992, |
| "grad_norm": 0.35064431333493157, |
| "learning_rate": 1.1773337367578134e-05, |
| "loss": 0.2346, |
| "step": 1474 |
| }, |
| { |
| "epoch": 3.3560864618885096, |
| "grad_norm": 0.38588974905676426, |
| "learning_rate": 1.1744350169002308e-05, |
| "loss": 0.2433, |
| "step": 1475 |
| }, |
| { |
| "epoch": 3.3583617747440275, |
| "grad_norm": 0.34525997525679775, |
| "learning_rate": 1.1715383859381639e-05, |
| "loss": 0.224, |
| "step": 1476 |
| }, |
| { |
| "epoch": 3.360637087599545, |
| "grad_norm": 0.5393672418195411, |
| "learning_rate": 1.168643851200848e-05, |
| "loss": 0.278, |
| "step": 1477 |
| }, |
| { |
| "epoch": 3.3629124004550626, |
| "grad_norm": 0.3344984797575095, |
| "learning_rate": 1.1657514200122133e-05, |
| "loss": 0.2277, |
| "step": 1478 |
| }, |
| { |
| "epoch": 3.36518771331058, |
| "grad_norm": 0.3807232368526946, |
| "learning_rate": 1.1628610996908685e-05, |
| "loss": 0.247, |
| "step": 1479 |
| }, |
| { |
| "epoch": 3.3674630261660976, |
| "grad_norm": 0.34520588066288926, |
| "learning_rate": 1.159972897550079e-05, |
| "loss": 0.2553, |
| "step": 1480 |
| }, |
| { |
| "epoch": 3.3697383390216156, |
| "grad_norm": 0.31387145207674094, |
| "learning_rate": 1.157086820897754e-05, |
| "loss": 0.2114, |
| "step": 1481 |
| }, |
| { |
| "epoch": 3.372013651877133, |
| "grad_norm": 0.3459709809459857, |
| "learning_rate": 1.1542028770364225e-05, |
| "loss": 0.2086, |
| "step": 1482 |
| }, |
| { |
| "epoch": 3.3742889647326506, |
| "grad_norm": 0.3248421852769576, |
| "learning_rate": 1.1513210732632165e-05, |
| "loss": 0.1794, |
| "step": 1483 |
| }, |
| { |
| "epoch": 3.3765642775881686, |
| "grad_norm": 0.38897925073848516, |
| "learning_rate": 1.1484414168698547e-05, |
| "loss": 0.2945, |
| "step": 1484 |
| }, |
| { |
| "epoch": 3.378839590443686, |
| "grad_norm": 0.3795025010628272, |
| "learning_rate": 1.145563915142622e-05, |
| "loss": 0.2186, |
| "step": 1485 |
| }, |
| { |
| "epoch": 3.3811149032992036, |
| "grad_norm": 0.3985729069914676, |
| "learning_rate": 1.14268857536235e-05, |
| "loss": 0.2607, |
| "step": 1486 |
| }, |
| { |
| "epoch": 3.383390216154721, |
| "grad_norm": 0.3437196809151993, |
| "learning_rate": 1.1398154048044022e-05, |
| "loss": 0.2123, |
| "step": 1487 |
| }, |
| { |
| "epoch": 3.385665529010239, |
| "grad_norm": 0.3289103296082285, |
| "learning_rate": 1.1369444107386505e-05, |
| "loss": 0.2253, |
| "step": 1488 |
| }, |
| { |
| "epoch": 3.3879408418657566, |
| "grad_norm": 0.38051780478879027, |
| "learning_rate": 1.1340756004294626e-05, |
| "loss": 0.2673, |
| "step": 1489 |
| }, |
| { |
| "epoch": 3.390216154721274, |
| "grad_norm": 0.3060080157181269, |
| "learning_rate": 1.1312089811356803e-05, |
| "loss": 0.2191, |
| "step": 1490 |
| }, |
| { |
| "epoch": 3.3924914675767917, |
| "grad_norm": 0.31770551000551334, |
| "learning_rate": 1.1283445601105988e-05, |
| "loss": 0.2286, |
| "step": 1491 |
| }, |
| { |
| "epoch": 3.3947667804323096, |
| "grad_norm": 0.36426252820361144, |
| "learning_rate": 1.1254823446019544e-05, |
| "loss": 0.2482, |
| "step": 1492 |
| }, |
| { |
| "epoch": 3.397042093287827, |
| "grad_norm": 0.33824561418896854, |
| "learning_rate": 1.1226223418519024e-05, |
| "loss": 0.2309, |
| "step": 1493 |
| }, |
| { |
| "epoch": 3.3993174061433447, |
| "grad_norm": 0.34268422073166216, |
| "learning_rate": 1.1197645590969965e-05, |
| "loss": 0.2557, |
| "step": 1494 |
| }, |
| { |
| "epoch": 3.401592718998862, |
| "grad_norm": 0.33591817107983135, |
| "learning_rate": 1.1169090035681772e-05, |
| "loss": 0.2784, |
| "step": 1495 |
| }, |
| { |
| "epoch": 3.40386803185438, |
| "grad_norm": 0.3114963695621851, |
| "learning_rate": 1.114055682490745e-05, |
| "loss": 0.22, |
| "step": 1496 |
| }, |
| { |
| "epoch": 3.4061433447098977, |
| "grad_norm": 0.35637555406825994, |
| "learning_rate": 1.111204603084351e-05, |
| "loss": 0.2992, |
| "step": 1497 |
| }, |
| { |
| "epoch": 3.408418657565415, |
| "grad_norm": 0.30938826214986764, |
| "learning_rate": 1.1083557725629723e-05, |
| "loss": 0.2179, |
| "step": 1498 |
| }, |
| { |
| "epoch": 3.4106939704209327, |
| "grad_norm": 0.33999235461053073, |
| "learning_rate": 1.105509198134895e-05, |
| "loss": 0.2293, |
| "step": 1499 |
| }, |
| { |
| "epoch": 3.4129692832764507, |
| "grad_norm": 0.3346137284958368, |
| "learning_rate": 1.102664887002698e-05, |
| "loss": 0.2321, |
| "step": 1500 |
| }, |
| { |
| "epoch": 3.415244596131968, |
| "grad_norm": 0.35961820157939406, |
| "learning_rate": 1.099822846363234e-05, |
| "loss": 0.2671, |
| "step": 1501 |
| }, |
| { |
| "epoch": 3.4175199089874857, |
| "grad_norm": 0.3032687812693232, |
| "learning_rate": 1.0969830834076079e-05, |
| "loss": 0.2435, |
| "step": 1502 |
| }, |
| { |
| "epoch": 3.419795221843003, |
| "grad_norm": 0.3299085763321149, |
| "learning_rate": 1.0941456053211643e-05, |
| "loss": 0.2227, |
| "step": 1503 |
| }, |
| { |
| "epoch": 3.422070534698521, |
| "grad_norm": 0.35062052767564167, |
| "learning_rate": 1.0913104192834665e-05, |
| "loss": 0.2577, |
| "step": 1504 |
| }, |
| { |
| "epoch": 3.4243458475540387, |
| "grad_norm": 0.3171905569045706, |
| "learning_rate": 1.0884775324682755e-05, |
| "loss": 0.2714, |
| "step": 1505 |
| }, |
| { |
| "epoch": 3.426621160409556, |
| "grad_norm": 0.3050583814158292, |
| "learning_rate": 1.085646952043538e-05, |
| "loss": 0.2076, |
| "step": 1506 |
| }, |
| { |
| "epoch": 3.428896473265074, |
| "grad_norm": 0.31824113240968743, |
| "learning_rate": 1.0828186851713625e-05, |
| "loss": 0.2311, |
| "step": 1507 |
| }, |
| { |
| "epoch": 3.4311717861205917, |
| "grad_norm": 0.3258202780170239, |
| "learning_rate": 1.0799927390080045e-05, |
| "loss": 0.2339, |
| "step": 1508 |
| }, |
| { |
| "epoch": 3.4334470989761092, |
| "grad_norm": 0.32618685833040023, |
| "learning_rate": 1.0771691207038489e-05, |
| "loss": 0.2507, |
| "step": 1509 |
| }, |
| { |
| "epoch": 3.4357224118316267, |
| "grad_norm": 0.3469821193351217, |
| "learning_rate": 1.074347837403387e-05, |
| "loss": 0.2596, |
| "step": 1510 |
| }, |
| { |
| "epoch": 3.4379977246871443, |
| "grad_norm": 0.32569711419010833, |
| "learning_rate": 1.0715288962452055e-05, |
| "loss": 0.2089, |
| "step": 1511 |
| }, |
| { |
| "epoch": 3.4402730375426622, |
| "grad_norm": 0.3561430085177732, |
| "learning_rate": 1.0687123043619643e-05, |
| "loss": 0.3069, |
| "step": 1512 |
| }, |
| { |
| "epoch": 3.4425483503981797, |
| "grad_norm": 0.33589253239111483, |
| "learning_rate": 1.065898068880376e-05, |
| "loss": 0.2299, |
| "step": 1513 |
| }, |
| { |
| "epoch": 3.4448236632536973, |
| "grad_norm": 0.3289052416730758, |
| "learning_rate": 1.0630861969211948e-05, |
| "loss": 0.2578, |
| "step": 1514 |
| }, |
| { |
| "epoch": 3.4470989761092152, |
| "grad_norm": 0.29329682255766953, |
| "learning_rate": 1.0602766955991913e-05, |
| "loss": 0.2069, |
| "step": 1515 |
| }, |
| { |
| "epoch": 3.4493742889647327, |
| "grad_norm": 0.34344762978311627, |
| "learning_rate": 1.0574695720231402e-05, |
| "loss": 0.2597, |
| "step": 1516 |
| }, |
| { |
| "epoch": 3.4516496018202503, |
| "grad_norm": 0.33783465976482363, |
| "learning_rate": 1.0546648332957999e-05, |
| "loss": 0.2619, |
| "step": 1517 |
| }, |
| { |
| "epoch": 3.453924914675768, |
| "grad_norm": 0.3038166722942614, |
| "learning_rate": 1.0518624865138915e-05, |
| "loss": 0.2057, |
| "step": 1518 |
| }, |
| { |
| "epoch": 3.4562002275312853, |
| "grad_norm": 0.34704986882471467, |
| "learning_rate": 1.0490625387680872e-05, |
| "loss": 0.2656, |
| "step": 1519 |
| }, |
| { |
| "epoch": 3.4584755403868033, |
| "grad_norm": 0.28763258753357995, |
| "learning_rate": 1.0462649971429884e-05, |
| "loss": 0.2081, |
| "step": 1520 |
| }, |
| { |
| "epoch": 3.460750853242321, |
| "grad_norm": 0.3539165701148113, |
| "learning_rate": 1.0434698687171059e-05, |
| "loss": 0.2499, |
| "step": 1521 |
| }, |
| { |
| "epoch": 3.4630261660978383, |
| "grad_norm": 0.2983963903646923, |
| "learning_rate": 1.040677160562848e-05, |
| "loss": 0.2042, |
| "step": 1522 |
| }, |
| { |
| "epoch": 3.4653014789533563, |
| "grad_norm": 0.33886381721871905, |
| "learning_rate": 1.0378868797464961e-05, |
| "loss": 0.2539, |
| "step": 1523 |
| }, |
| { |
| "epoch": 3.467576791808874, |
| "grad_norm": 0.33131937800817474, |
| "learning_rate": 1.0350990333281915e-05, |
| "loss": 0.183, |
| "step": 1524 |
| }, |
| { |
| "epoch": 3.4698521046643913, |
| "grad_norm": 0.3334157312213503, |
| "learning_rate": 1.0323136283619167e-05, |
| "loss": 0.2291, |
| "step": 1525 |
| }, |
| { |
| "epoch": 3.472127417519909, |
| "grad_norm": 0.36557655183280097, |
| "learning_rate": 1.029530671895473e-05, |
| "loss": 0.2592, |
| "step": 1526 |
| }, |
| { |
| "epoch": 3.474402730375427, |
| "grad_norm": 0.3098710582941038, |
| "learning_rate": 1.02675017097047e-05, |
| "loss": 0.2216, |
| "step": 1527 |
| }, |
| { |
| "epoch": 3.4766780432309443, |
| "grad_norm": 0.33798309183034975, |
| "learning_rate": 1.023972132622304e-05, |
| "loss": 0.2486, |
| "step": 1528 |
| }, |
| { |
| "epoch": 3.478953356086462, |
| "grad_norm": 0.29037782431225956, |
| "learning_rate": 1.0211965638801374e-05, |
| "loss": 0.1917, |
| "step": 1529 |
| }, |
| { |
| "epoch": 3.4812286689419794, |
| "grad_norm": 0.33565460827318755, |
| "learning_rate": 1.0184234717668867e-05, |
| "loss": 0.2528, |
| "step": 1530 |
| }, |
| { |
| "epoch": 3.4835039817974973, |
| "grad_norm": 0.33527755448329066, |
| "learning_rate": 1.0156528632992004e-05, |
| "loss": 0.2312, |
| "step": 1531 |
| }, |
| { |
| "epoch": 3.485779294653015, |
| "grad_norm": 0.34743533992096876, |
| "learning_rate": 1.012884745487443e-05, |
| "loss": 0.2828, |
| "step": 1532 |
| }, |
| { |
| "epoch": 3.4880546075085324, |
| "grad_norm": 0.3029170690482156, |
| "learning_rate": 1.0101191253356783e-05, |
| "loss": 0.2052, |
| "step": 1533 |
| }, |
| { |
| "epoch": 3.49032992036405, |
| "grad_norm": 0.31040382592429244, |
| "learning_rate": 1.0073560098416479e-05, |
| "loss": 0.2323, |
| "step": 1534 |
| }, |
| { |
| "epoch": 3.492605233219568, |
| "grad_norm": 0.3291743879414811, |
| "learning_rate": 1.0045954059967577e-05, |
| "loss": 0.2515, |
| "step": 1535 |
| }, |
| { |
| "epoch": 3.4948805460750854, |
| "grad_norm": 0.3030205392683074, |
| "learning_rate": 1.0018373207860593e-05, |
| "loss": 0.2239, |
| "step": 1536 |
| }, |
| { |
| "epoch": 3.497155858930603, |
| "grad_norm": 0.34698271644365286, |
| "learning_rate": 9.990817611882284e-06, |
| "loss": 0.2902, |
| "step": 1537 |
| }, |
| { |
| "epoch": 3.4994311717861204, |
| "grad_norm": 0.3131439916281456, |
| "learning_rate": 9.963287341755537e-06, |
| "loss": 0.1915, |
| "step": 1538 |
| }, |
| { |
| "epoch": 3.5017064846416384, |
| "grad_norm": 0.36945832587752353, |
| "learning_rate": 9.935782467139126e-06, |
| "loss": 0.306, |
| "step": 1539 |
| }, |
| { |
| "epoch": 3.503981797497156, |
| "grad_norm": 0.3339534854776466, |
| "learning_rate": 9.908303057627591e-06, |
| "loss": 0.2465, |
| "step": 1540 |
| }, |
| { |
| "epoch": 3.5062571103526734, |
| "grad_norm": 0.2856094747480189, |
| "learning_rate": 9.880849182751042e-06, |
| "loss": 0.2048, |
| "step": 1541 |
| }, |
| { |
| "epoch": 3.508532423208191, |
| "grad_norm": 0.3074938623502293, |
| "learning_rate": 9.85342091197495e-06, |
| "loss": 0.2517, |
| "step": 1542 |
| }, |
| { |
| "epoch": 3.510807736063709, |
| "grad_norm": 0.3341468790362437, |
| "learning_rate": 9.826018314700033e-06, |
| "loss": 0.2492, |
| "step": 1543 |
| }, |
| { |
| "epoch": 3.5130830489192264, |
| "grad_norm": 0.33344807328931014, |
| "learning_rate": 9.798641460262037e-06, |
| "loss": 0.2431, |
| "step": 1544 |
| }, |
| { |
| "epoch": 3.515358361774744, |
| "grad_norm": 0.31979928441877864, |
| "learning_rate": 9.771290417931559e-06, |
| "loss": 0.2353, |
| "step": 1545 |
| }, |
| { |
| "epoch": 3.517633674630262, |
| "grad_norm": 0.319428974933737, |
| "learning_rate": 9.743965256913908e-06, |
| "loss": 0.2378, |
| "step": 1546 |
| }, |
| { |
| "epoch": 3.5199089874857794, |
| "grad_norm": 0.3283945511768955, |
| "learning_rate": 9.716666046348872e-06, |
| "loss": 0.2313, |
| "step": 1547 |
| }, |
| { |
| "epoch": 3.522184300341297, |
| "grad_norm": 0.34531998696589944, |
| "learning_rate": 9.689392855310633e-06, |
| "loss": 0.2617, |
| "step": 1548 |
| }, |
| { |
| "epoch": 3.5244596131968144, |
| "grad_norm": 0.31043476202292597, |
| "learning_rate": 9.662145752807488e-06, |
| "loss": 0.2135, |
| "step": 1549 |
| }, |
| { |
| "epoch": 3.526734926052332, |
| "grad_norm": 0.33406451563957484, |
| "learning_rate": 9.634924807781729e-06, |
| "loss": 0.226, |
| "step": 1550 |
| }, |
| { |
| "epoch": 3.52901023890785, |
| "grad_norm": 0.3365790815539411, |
| "learning_rate": 9.607730089109479e-06, |
| "loss": 0.2166, |
| "step": 1551 |
| }, |
| { |
| "epoch": 3.5312855517633674, |
| "grad_norm": 0.34723707276145616, |
| "learning_rate": 9.580561665600507e-06, |
| "loss": 0.2038, |
| "step": 1552 |
| }, |
| { |
| "epoch": 3.533560864618885, |
| "grad_norm": 0.3537401937413241, |
| "learning_rate": 9.55341960599802e-06, |
| "loss": 0.2231, |
| "step": 1553 |
| }, |
| { |
| "epoch": 3.535836177474403, |
| "grad_norm": 0.33721120314003905, |
| "learning_rate": 9.526303978978546e-06, |
| "loss": 0.2631, |
| "step": 1554 |
| }, |
| { |
| "epoch": 3.5381114903299204, |
| "grad_norm": 0.3302043329828681, |
| "learning_rate": 9.499214853151699e-06, |
| "loss": 0.2437, |
| "step": 1555 |
| }, |
| { |
| "epoch": 3.540386803185438, |
| "grad_norm": 0.3826580704046, |
| "learning_rate": 9.472152297060088e-06, |
| "loss": 0.2351, |
| "step": 1556 |
| }, |
| { |
| "epoch": 3.5426621160409555, |
| "grad_norm": 0.3489687845389196, |
| "learning_rate": 9.445116379179052e-06, |
| "loss": 0.2463, |
| "step": 1557 |
| }, |
| { |
| "epoch": 3.544937428896473, |
| "grad_norm": 0.3270717426834041, |
| "learning_rate": 9.418107167916526e-06, |
| "loss": 0.2356, |
| "step": 1558 |
| }, |
| { |
| "epoch": 3.547212741751991, |
| "grad_norm": 0.35109088802465127, |
| "learning_rate": 9.3911247316129e-06, |
| "loss": 0.2142, |
| "step": 1559 |
| }, |
| { |
| "epoch": 3.5494880546075085, |
| "grad_norm": 0.4163412510419317, |
| "learning_rate": 9.364169138540805e-06, |
| "loss": 0.2296, |
| "step": 1560 |
| }, |
| { |
| "epoch": 3.551763367463026, |
| "grad_norm": 0.3808250685074194, |
| "learning_rate": 9.33724045690493e-06, |
| "loss": 0.2646, |
| "step": 1561 |
| }, |
| { |
| "epoch": 3.554038680318544, |
| "grad_norm": 0.3357674933739998, |
| "learning_rate": 9.310338754841907e-06, |
| "loss": 0.2462, |
| "step": 1562 |
| }, |
| { |
| "epoch": 3.5563139931740615, |
| "grad_norm": 0.31994156406414276, |
| "learning_rate": 9.283464100420064e-06, |
| "loss": 0.2236, |
| "step": 1563 |
| }, |
| { |
| "epoch": 3.558589306029579, |
| "grad_norm": 0.35903123661267816, |
| "learning_rate": 9.256616561639337e-06, |
| "loss": 0.2279, |
| "step": 1564 |
| }, |
| { |
| "epoch": 3.5608646188850965, |
| "grad_norm": 0.35744852928869253, |
| "learning_rate": 9.229796206431015e-06, |
| "loss": 0.233, |
| "step": 1565 |
| }, |
| { |
| "epoch": 3.5631399317406145, |
| "grad_norm": 0.34937225764885005, |
| "learning_rate": 9.203003102657608e-06, |
| "loss": 0.2473, |
| "step": 1566 |
| }, |
| { |
| "epoch": 3.565415244596132, |
| "grad_norm": 0.32826327293032775, |
| "learning_rate": 9.176237318112691e-06, |
| "loss": 0.2095, |
| "step": 1567 |
| }, |
| { |
| "epoch": 3.5676905574516495, |
| "grad_norm": 0.34233653160590005, |
| "learning_rate": 9.149498920520712e-06, |
| "loss": 0.2295, |
| "step": 1568 |
| }, |
| { |
| "epoch": 3.5699658703071675, |
| "grad_norm": 0.40351720299178445, |
| "learning_rate": 9.122787977536802e-06, |
| "loss": 0.2963, |
| "step": 1569 |
| }, |
| { |
| "epoch": 3.572241183162685, |
| "grad_norm": 0.3003693453078882, |
| "learning_rate": 9.096104556746654e-06, |
| "loss": 0.2107, |
| "step": 1570 |
| }, |
| { |
| "epoch": 3.5745164960182025, |
| "grad_norm": 0.39003272892477975, |
| "learning_rate": 9.069448725666285e-06, |
| "loss": 0.2816, |
| "step": 1571 |
| }, |
| { |
| "epoch": 3.57679180887372, |
| "grad_norm": 0.3273815427887145, |
| "learning_rate": 9.042820551741955e-06, |
| "loss": 0.2348, |
| "step": 1572 |
| }, |
| { |
| "epoch": 3.5790671217292376, |
| "grad_norm": 0.3629427404955351, |
| "learning_rate": 9.0162201023499e-06, |
| "loss": 0.2429, |
| "step": 1573 |
| }, |
| { |
| "epoch": 3.5813424345847555, |
| "grad_norm": 0.33919168356346674, |
| "learning_rate": 8.98964744479621e-06, |
| "loss": 0.2513, |
| "step": 1574 |
| }, |
| { |
| "epoch": 3.583617747440273, |
| "grad_norm": 0.33498714294273985, |
| "learning_rate": 8.963102646316677e-06, |
| "loss": 0.2161, |
| "step": 1575 |
| }, |
| { |
| "epoch": 3.5858930602957906, |
| "grad_norm": 0.40300529209904284, |
| "learning_rate": 8.936585774076594e-06, |
| "loss": 0.2622, |
| "step": 1576 |
| }, |
| { |
| "epoch": 3.5881683731513085, |
| "grad_norm": 0.324525449514623, |
| "learning_rate": 8.910096895170572e-06, |
| "loss": 0.2033, |
| "step": 1577 |
| }, |
| { |
| "epoch": 3.590443686006826, |
| "grad_norm": 0.3418244524758691, |
| "learning_rate": 8.88363607662243e-06, |
| "loss": 0.2596, |
| "step": 1578 |
| }, |
| { |
| "epoch": 3.5927189988623436, |
| "grad_norm": 0.3813231405164915, |
| "learning_rate": 8.857203385384938e-06, |
| "loss": 0.2493, |
| "step": 1579 |
| }, |
| { |
| "epoch": 3.594994311717861, |
| "grad_norm": 0.3544543624874025, |
| "learning_rate": 8.830798888339756e-06, |
| "loss": 0.2406, |
| "step": 1580 |
| }, |
| { |
| "epoch": 3.5972696245733786, |
| "grad_norm": 0.3679759598835115, |
| "learning_rate": 8.804422652297157e-06, |
| "loss": 0.2328, |
| "step": 1581 |
| }, |
| { |
| "epoch": 3.5995449374288966, |
| "grad_norm": 0.3523367839674939, |
| "learning_rate": 8.778074743995916e-06, |
| "loss": 0.2556, |
| "step": 1582 |
| }, |
| { |
| "epoch": 3.601820250284414, |
| "grad_norm": 0.3183567073970889, |
| "learning_rate": 8.75175523010315e-06, |
| "loss": 0.1958, |
| "step": 1583 |
| }, |
| { |
| "epoch": 3.6040955631399316, |
| "grad_norm": 0.37485870319862513, |
| "learning_rate": 8.725464177214119e-06, |
| "loss": 0.2611, |
| "step": 1584 |
| }, |
| { |
| "epoch": 3.6063708759954496, |
| "grad_norm": 0.35712252822034435, |
| "learning_rate": 8.699201651852056e-06, |
| "loss": 0.2352, |
| "step": 1585 |
| }, |
| { |
| "epoch": 3.608646188850967, |
| "grad_norm": 0.37283944559977766, |
| "learning_rate": 8.672967720468044e-06, |
| "loss": 0.2844, |
| "step": 1586 |
| }, |
| { |
| "epoch": 3.6109215017064846, |
| "grad_norm": 0.311035508065256, |
| "learning_rate": 8.646762449440768e-06, |
| "loss": 0.2201, |
| "step": 1587 |
| }, |
| { |
| "epoch": 3.613196814562002, |
| "grad_norm": 0.2988780921453698, |
| "learning_rate": 8.620585905076462e-06, |
| "loss": 0.2186, |
| "step": 1588 |
| }, |
| { |
| "epoch": 3.6154721274175197, |
| "grad_norm": 0.4240158000247216, |
| "learning_rate": 8.594438153608611e-06, |
| "loss": 0.3004, |
| "step": 1589 |
| }, |
| { |
| "epoch": 3.6177474402730376, |
| "grad_norm": 0.32568214186763933, |
| "learning_rate": 8.56831926119787e-06, |
| "loss": 0.2174, |
| "step": 1590 |
| }, |
| { |
| "epoch": 3.620022753128555, |
| "grad_norm": 0.30055241402531796, |
| "learning_rate": 8.542229293931878e-06, |
| "loss": 0.2077, |
| "step": 1591 |
| }, |
| { |
| "epoch": 3.6222980659840727, |
| "grad_norm": 0.3528261607678874, |
| "learning_rate": 8.51616831782509e-06, |
| "loss": 0.2505, |
| "step": 1592 |
| }, |
| { |
| "epoch": 3.6245733788395906, |
| "grad_norm": 0.3366924619649751, |
| "learning_rate": 8.490136398818584e-06, |
| "loss": 0.2288, |
| "step": 1593 |
| }, |
| { |
| "epoch": 3.626848691695108, |
| "grad_norm": 0.32639722472827926, |
| "learning_rate": 8.464133602779937e-06, |
| "loss": 0.2159, |
| "step": 1594 |
| }, |
| { |
| "epoch": 3.6291240045506257, |
| "grad_norm": 0.335990588461995, |
| "learning_rate": 8.43815999550303e-06, |
| "loss": 0.2318, |
| "step": 1595 |
| }, |
| { |
| "epoch": 3.631399317406143, |
| "grad_norm": 0.3170153764669358, |
| "learning_rate": 8.412215642707892e-06, |
| "loss": 0.2235, |
| "step": 1596 |
| }, |
| { |
| "epoch": 3.6336746302616607, |
| "grad_norm": 0.3499875495179629, |
| "learning_rate": 8.386300610040523e-06, |
| "loss": 0.2622, |
| "step": 1597 |
| }, |
| { |
| "epoch": 3.6359499431171787, |
| "grad_norm": 0.31930183238712134, |
| "learning_rate": 8.360414963072731e-06, |
| "loss": 0.2304, |
| "step": 1598 |
| }, |
| { |
| "epoch": 3.638225255972696, |
| "grad_norm": 0.3062417178184185, |
| "learning_rate": 8.33455876730199e-06, |
| "loss": 0.2244, |
| "step": 1599 |
| }, |
| { |
| "epoch": 3.640500568828214, |
| "grad_norm": 0.3128935206442823, |
| "learning_rate": 8.308732088151245e-06, |
| "loss": 0.2047, |
| "step": 1600 |
| }, |
| { |
| "epoch": 3.6427758816837317, |
| "grad_norm": 0.315330561097062, |
| "learning_rate": 8.282934990968742e-06, |
| "loss": 0.2143, |
| "step": 1601 |
| }, |
| { |
| "epoch": 3.645051194539249, |
| "grad_norm": 0.3649351233173286, |
| "learning_rate": 8.257167541027901e-06, |
| "loss": 0.2447, |
| "step": 1602 |
| }, |
| { |
| "epoch": 3.6473265073947667, |
| "grad_norm": 0.3590062283162223, |
| "learning_rate": 8.231429803527114e-06, |
| "loss": 0.2331, |
| "step": 1603 |
| }, |
| { |
| "epoch": 3.6496018202502842, |
| "grad_norm": 0.3070921237937883, |
| "learning_rate": 8.205721843589597e-06, |
| "loss": 0.2066, |
| "step": 1604 |
| }, |
| { |
| "epoch": 3.651877133105802, |
| "grad_norm": 0.3182414091477948, |
| "learning_rate": 8.180043726263216e-06, |
| "loss": 0.2238, |
| "step": 1605 |
| }, |
| { |
| "epoch": 3.6541524459613197, |
| "grad_norm": 0.2883644587362991, |
| "learning_rate": 8.15439551652032e-06, |
| "loss": 0.1942, |
| "step": 1606 |
| }, |
| { |
| "epoch": 3.6564277588168372, |
| "grad_norm": 0.35235761141896277, |
| "learning_rate": 8.1287772792576e-06, |
| "loss": 0.285, |
| "step": 1607 |
| }, |
| { |
| "epoch": 3.658703071672355, |
| "grad_norm": 0.3066178498743668, |
| "learning_rate": 8.103189079295911e-06, |
| "loss": 0.2116, |
| "step": 1608 |
| }, |
| { |
| "epoch": 3.6609783845278727, |
| "grad_norm": 0.31320618542805817, |
| "learning_rate": 8.077630981380087e-06, |
| "loss": 0.2585, |
| "step": 1609 |
| }, |
| { |
| "epoch": 3.6632536973833902, |
| "grad_norm": 0.30635364571870777, |
| "learning_rate": 8.052103050178806e-06, |
| "loss": 0.2032, |
| "step": 1610 |
| }, |
| { |
| "epoch": 3.6655290102389078, |
| "grad_norm": 0.34555526775527085, |
| "learning_rate": 8.02660535028442e-06, |
| "loss": 0.2762, |
| "step": 1611 |
| }, |
| { |
| "epoch": 3.6678043230944253, |
| "grad_norm": 0.3478375071781361, |
| "learning_rate": 8.001137946212792e-06, |
| "loss": 0.264, |
| "step": 1612 |
| }, |
| { |
| "epoch": 3.6700796359499432, |
| "grad_norm": 0.32259979937173106, |
| "learning_rate": 7.975700902403112e-06, |
| "loss": 0.2322, |
| "step": 1613 |
| }, |
| { |
| "epoch": 3.6723549488054608, |
| "grad_norm": 0.30054960499588285, |
| "learning_rate": 7.950294283217752e-06, |
| "loss": 0.2176, |
| "step": 1614 |
| }, |
| { |
| "epoch": 3.6746302616609783, |
| "grad_norm": 0.334280726764928, |
| "learning_rate": 7.924918152942117e-06, |
| "loss": 0.2472, |
| "step": 1615 |
| }, |
| { |
| "epoch": 3.6769055745164962, |
| "grad_norm": 0.3334232858921928, |
| "learning_rate": 7.89957257578447e-06, |
| "loss": 0.2476, |
| "step": 1616 |
| }, |
| { |
| "epoch": 3.6791808873720138, |
| "grad_norm": 0.3350423889743071, |
| "learning_rate": 7.874257615875737e-06, |
| "loss": 0.249, |
| "step": 1617 |
| }, |
| { |
| "epoch": 3.6814562002275313, |
| "grad_norm": 0.31257388107369793, |
| "learning_rate": 7.8489733372694e-06, |
| "loss": 0.2477, |
| "step": 1618 |
| }, |
| { |
| "epoch": 3.683731513083049, |
| "grad_norm": 0.3241796725065618, |
| "learning_rate": 7.823719803941308e-06, |
| "loss": 0.207, |
| "step": 1619 |
| }, |
| { |
| "epoch": 3.6860068259385663, |
| "grad_norm": 0.3678066881401646, |
| "learning_rate": 7.798497079789513e-06, |
| "loss": 0.239, |
| "step": 1620 |
| }, |
| { |
| "epoch": 3.6882821387940843, |
| "grad_norm": 0.3072202237563532, |
| "learning_rate": 7.773305228634105e-06, |
| "loss": 0.241, |
| "step": 1621 |
| }, |
| { |
| "epoch": 3.690557451649602, |
| "grad_norm": 0.3171033622346416, |
| "learning_rate": 7.748144314217055e-06, |
| "loss": 0.2508, |
| "step": 1622 |
| }, |
| { |
| "epoch": 3.6928327645051193, |
| "grad_norm": 0.35675741351576, |
| "learning_rate": 7.723014400202067e-06, |
| "loss": 0.2364, |
| "step": 1623 |
| }, |
| { |
| "epoch": 3.6951080773606373, |
| "grad_norm": 0.32339638486968514, |
| "learning_rate": 7.697915550174414e-06, |
| "loss": 0.2606, |
| "step": 1624 |
| }, |
| { |
| "epoch": 3.697383390216155, |
| "grad_norm": 0.3277199881918983, |
| "learning_rate": 7.672847827640735e-06, |
| "loss": 0.2524, |
| "step": 1625 |
| }, |
| { |
| "epoch": 3.6996587030716723, |
| "grad_norm": 0.3325613261956722, |
| "learning_rate": 7.647811296028943e-06, |
| "loss": 0.2434, |
| "step": 1626 |
| }, |
| { |
| "epoch": 3.70193401592719, |
| "grad_norm": 0.3235937045048768, |
| "learning_rate": 7.622806018688016e-06, |
| "loss": 0.2226, |
| "step": 1627 |
| }, |
| { |
| "epoch": 3.7042093287827074, |
| "grad_norm": 0.31644979052423133, |
| "learning_rate": 7.5978320588878595e-06, |
| "loss": 0.2689, |
| "step": 1628 |
| }, |
| { |
| "epoch": 3.7064846416382253, |
| "grad_norm": 0.30401544252127216, |
| "learning_rate": 7.572889479819117e-06, |
| "loss": 0.2134, |
| "step": 1629 |
| }, |
| { |
| "epoch": 3.708759954493743, |
| "grad_norm": 0.3247293577698998, |
| "learning_rate": 7.5479783445930414e-06, |
| "loss": 0.2431, |
| "step": 1630 |
| }, |
| { |
| "epoch": 3.7110352673492604, |
| "grad_norm": 0.3343087436377371, |
| "learning_rate": 7.523098716241331e-06, |
| "loss": 0.256, |
| "step": 1631 |
| }, |
| { |
| "epoch": 3.7133105802047783, |
| "grad_norm": 0.29014471473935294, |
| "learning_rate": 7.498250657715962e-06, |
| "loss": 0.2437, |
| "step": 1632 |
| }, |
| { |
| "epoch": 3.715585893060296, |
| "grad_norm": 0.30810596917320315, |
| "learning_rate": 7.473434231889018e-06, |
| "loss": 0.2602, |
| "step": 1633 |
| }, |
| { |
| "epoch": 3.7178612059158134, |
| "grad_norm": 0.3193602943821451, |
| "learning_rate": 7.448649501552559e-06, |
| "loss": 0.2353, |
| "step": 1634 |
| }, |
| { |
| "epoch": 3.720136518771331, |
| "grad_norm": 0.34473063758045885, |
| "learning_rate": 7.4238965294184374e-06, |
| "loss": 0.2464, |
| "step": 1635 |
| }, |
| { |
| "epoch": 3.722411831626849, |
| "grad_norm": 0.2870067699187315, |
| "learning_rate": 7.39917537811816e-06, |
| "loss": 0.2133, |
| "step": 1636 |
| }, |
| { |
| "epoch": 3.7246871444823664, |
| "grad_norm": 0.3509161389314907, |
| "learning_rate": 7.374486110202705e-06, |
| "loss": 0.2372, |
| "step": 1637 |
| }, |
| { |
| "epoch": 3.726962457337884, |
| "grad_norm": 0.3382312400661472, |
| "learning_rate": 7.349828788142377e-06, |
| "loss": 0.2763, |
| "step": 1638 |
| }, |
| { |
| "epoch": 3.729237770193402, |
| "grad_norm": 0.31679908169558146, |
| "learning_rate": 7.325203474326663e-06, |
| "loss": 0.2134, |
| "step": 1639 |
| }, |
| { |
| "epoch": 3.7315130830489194, |
| "grad_norm": 0.2817679233954677, |
| "learning_rate": 7.300610231064056e-06, |
| "loss": 0.2025, |
| "step": 1640 |
| }, |
| { |
| "epoch": 3.733788395904437, |
| "grad_norm": 0.3301349553684187, |
| "learning_rate": 7.276049120581889e-06, |
| "loss": 0.2618, |
| "step": 1641 |
| }, |
| { |
| "epoch": 3.7360637087599544, |
| "grad_norm": 0.3047477589901083, |
| "learning_rate": 7.251520205026206e-06, |
| "loss": 0.2176, |
| "step": 1642 |
| }, |
| { |
| "epoch": 3.738339021615472, |
| "grad_norm": 0.3243528934414049, |
| "learning_rate": 7.2270235464615865e-06, |
| "loss": 0.2796, |
| "step": 1643 |
| }, |
| { |
| "epoch": 3.74061433447099, |
| "grad_norm": 0.31843745378436267, |
| "learning_rate": 7.202559206870994e-06, |
| "loss": 0.2259, |
| "step": 1644 |
| }, |
| { |
| "epoch": 3.7428896473265074, |
| "grad_norm": 0.3455587389606911, |
| "learning_rate": 7.178127248155604e-06, |
| "loss": 0.2699, |
| "step": 1645 |
| }, |
| { |
| "epoch": 3.745164960182025, |
| "grad_norm": 0.31124739072799124, |
| "learning_rate": 7.153727732134656e-06, |
| "loss": 0.2582, |
| "step": 1646 |
| }, |
| { |
| "epoch": 3.747440273037543, |
| "grad_norm": 0.3294380869999005, |
| "learning_rate": 7.12936072054534e-06, |
| "loss": 0.236, |
| "step": 1647 |
| }, |
| { |
| "epoch": 3.7497155858930604, |
| "grad_norm": 0.291163440361664, |
| "learning_rate": 7.1050262750425595e-06, |
| "loss": 0.2117, |
| "step": 1648 |
| }, |
| { |
| "epoch": 3.751990898748578, |
| "grad_norm": 0.3114191632293124, |
| "learning_rate": 7.080724457198824e-06, |
| "loss": 0.2324, |
| "step": 1649 |
| }, |
| { |
| "epoch": 3.7542662116040955, |
| "grad_norm": 0.3552526236860238, |
| "learning_rate": 7.056455328504104e-06, |
| "loss": 0.2511, |
| "step": 1650 |
| }, |
| { |
| "epoch": 3.756541524459613, |
| "grad_norm": 0.3400004570124406, |
| "learning_rate": 7.0322189503656435e-06, |
| "loss": 0.2529, |
| "step": 1651 |
| }, |
| { |
| "epoch": 3.758816837315131, |
| "grad_norm": 0.3310169176404239, |
| "learning_rate": 7.008015384107836e-06, |
| "loss": 0.2114, |
| "step": 1652 |
| }, |
| { |
| "epoch": 3.7610921501706485, |
| "grad_norm": 0.3413549271745786, |
| "learning_rate": 6.983844690972026e-06, |
| "loss": 0.2544, |
| "step": 1653 |
| }, |
| { |
| "epoch": 3.763367463026166, |
| "grad_norm": 0.3167657265637229, |
| "learning_rate": 6.959706932116388e-06, |
| "loss": 0.2192, |
| "step": 1654 |
| }, |
| { |
| "epoch": 3.765642775881684, |
| "grad_norm": 0.32633386745815, |
| "learning_rate": 6.935602168615792e-06, |
| "loss": 0.2766, |
| "step": 1655 |
| }, |
| { |
| "epoch": 3.7679180887372015, |
| "grad_norm": 0.3072647148220716, |
| "learning_rate": 6.911530461461588e-06, |
| "loss": 0.2063, |
| "step": 1656 |
| }, |
| { |
| "epoch": 3.770193401592719, |
| "grad_norm": 0.34161134198142956, |
| "learning_rate": 6.887491871561492e-06, |
| "loss": 0.2454, |
| "step": 1657 |
| }, |
| { |
| "epoch": 3.7724687144482365, |
| "grad_norm": 0.32664499006330927, |
| "learning_rate": 6.863486459739432e-06, |
| "loss": 0.2649, |
| "step": 1658 |
| }, |
| { |
| "epoch": 3.774744027303754, |
| "grad_norm": 0.2910768160518619, |
| "learning_rate": 6.839514286735387e-06, |
| "loss": 0.2164, |
| "step": 1659 |
| }, |
| { |
| "epoch": 3.777019340159272, |
| "grad_norm": 0.3188256092091695, |
| "learning_rate": 6.815575413205235e-06, |
| "loss": 0.2289, |
| "step": 1660 |
| }, |
| { |
| "epoch": 3.7792946530147895, |
| "grad_norm": 0.3218510980676637, |
| "learning_rate": 6.791669899720585e-06, |
| "loss": 0.244, |
| "step": 1661 |
| }, |
| { |
| "epoch": 3.781569965870307, |
| "grad_norm": 0.3166561361853286, |
| "learning_rate": 6.767797806768632e-06, |
| "loss": 0.2283, |
| "step": 1662 |
| }, |
| { |
| "epoch": 3.783845278725825, |
| "grad_norm": 0.30145917906713504, |
| "learning_rate": 6.743959194752045e-06, |
| "loss": 0.2155, |
| "step": 1663 |
| }, |
| { |
| "epoch": 3.7861205915813425, |
| "grad_norm": 0.2983439047380337, |
| "learning_rate": 6.720154123988736e-06, |
| "loss": 0.2092, |
| "step": 1664 |
| }, |
| { |
| "epoch": 3.78839590443686, |
| "grad_norm": 0.36687788457985926, |
| "learning_rate": 6.696382654711777e-06, |
| "loss": 0.2517, |
| "step": 1665 |
| }, |
| { |
| "epoch": 3.7906712172923775, |
| "grad_norm": 0.3666781923402803, |
| "learning_rate": 6.672644847069194e-06, |
| "loss": 0.2592, |
| "step": 1666 |
| }, |
| { |
| "epoch": 3.792946530147895, |
| "grad_norm": 0.29020154536513587, |
| "learning_rate": 6.648940761123861e-06, |
| "loss": 0.2371, |
| "step": 1667 |
| }, |
| { |
| "epoch": 3.795221843003413, |
| "grad_norm": 0.3113324715095254, |
| "learning_rate": 6.6252704568533276e-06, |
| "loss": 0.2205, |
| "step": 1668 |
| }, |
| { |
| "epoch": 3.7974971558589306, |
| "grad_norm": 0.31286782876307456, |
| "learning_rate": 6.6016339941496545e-06, |
| "loss": 0.1956, |
| "step": 1669 |
| }, |
| { |
| "epoch": 3.799772468714448, |
| "grad_norm": 0.34208468192794533, |
| "learning_rate": 6.578031432819263e-06, |
| "loss": 0.2232, |
| "step": 1670 |
| }, |
| { |
| "epoch": 3.802047781569966, |
| "grad_norm": 0.32683834732407896, |
| "learning_rate": 6.5544628325828395e-06, |
| "loss": 0.2547, |
| "step": 1671 |
| }, |
| { |
| "epoch": 3.8043230944254836, |
| "grad_norm": 0.31381099568132315, |
| "learning_rate": 6.530928253075095e-06, |
| "loss": 0.2558, |
| "step": 1672 |
| }, |
| { |
| "epoch": 3.806598407281001, |
| "grad_norm": 0.32223668259824584, |
| "learning_rate": 6.507427753844686e-06, |
| "loss": 0.2058, |
| "step": 1673 |
| }, |
| { |
| "epoch": 3.8088737201365186, |
| "grad_norm": 0.349394908203502, |
| "learning_rate": 6.483961394354017e-06, |
| "loss": 0.2578, |
| "step": 1674 |
| }, |
| { |
| "epoch": 3.8111490329920366, |
| "grad_norm": 0.30275090569381213, |
| "learning_rate": 6.460529233979127e-06, |
| "loss": 0.2182, |
| "step": 1675 |
| }, |
| { |
| "epoch": 3.813424345847554, |
| "grad_norm": 0.6374668756457942, |
| "learning_rate": 6.4371313320095184e-06, |
| "loss": 0.2717, |
| "step": 1676 |
| }, |
| { |
| "epoch": 3.8156996587030716, |
| "grad_norm": 0.3346189406584071, |
| "learning_rate": 6.413767747648003e-06, |
| "loss": 0.2278, |
| "step": 1677 |
| }, |
| { |
| "epoch": 3.8179749715585896, |
| "grad_norm": 0.29711987988698924, |
| "learning_rate": 6.390438540010555e-06, |
| "loss": 0.2525, |
| "step": 1678 |
| }, |
| { |
| "epoch": 3.820250284414107, |
| "grad_norm": 0.3191024033166926, |
| "learning_rate": 6.3671437681262e-06, |
| "loss": 0.2334, |
| "step": 1679 |
| }, |
| { |
| "epoch": 3.8225255972696246, |
| "grad_norm": 0.32524532366123116, |
| "learning_rate": 6.343883490936791e-06, |
| "loss": 0.243, |
| "step": 1680 |
| }, |
| { |
| "epoch": 3.824800910125142, |
| "grad_norm": 0.3062541748746329, |
| "learning_rate": 6.3206577672969275e-06, |
| "loss": 0.2294, |
| "step": 1681 |
| }, |
| { |
| "epoch": 3.8270762229806596, |
| "grad_norm": 0.2989301976941865, |
| "learning_rate": 6.297466655973763e-06, |
| "loss": 0.2118, |
| "step": 1682 |
| }, |
| { |
| "epoch": 3.8293515358361776, |
| "grad_norm": 0.32480587573609554, |
| "learning_rate": 6.27431021564688e-06, |
| "loss": 0.2532, |
| "step": 1683 |
| }, |
| { |
| "epoch": 3.831626848691695, |
| "grad_norm": 0.2927691847070395, |
| "learning_rate": 6.251188504908146e-06, |
| "loss": 0.2378, |
| "step": 1684 |
| }, |
| { |
| "epoch": 3.8339021615472126, |
| "grad_norm": 0.3225685587581803, |
| "learning_rate": 6.228101582261532e-06, |
| "loss": 0.252, |
| "step": 1685 |
| }, |
| { |
| "epoch": 3.8361774744027306, |
| "grad_norm": 0.3120723897409664, |
| "learning_rate": 6.205049506122982e-06, |
| "loss": 0.2455, |
| "step": 1686 |
| }, |
| { |
| "epoch": 3.838452787258248, |
| "grad_norm": 0.305763622169105, |
| "learning_rate": 6.182032334820313e-06, |
| "loss": 0.2243, |
| "step": 1687 |
| }, |
| { |
| "epoch": 3.8407281001137656, |
| "grad_norm": 0.30591586026809525, |
| "learning_rate": 6.159050126592971e-06, |
| "loss": 0.2241, |
| "step": 1688 |
| }, |
| { |
| "epoch": 3.843003412969283, |
| "grad_norm": 0.3254019572609186, |
| "learning_rate": 6.1361029395919746e-06, |
| "loss": 0.2408, |
| "step": 1689 |
| }, |
| { |
| "epoch": 3.8452787258248007, |
| "grad_norm": 0.299444349239614, |
| "learning_rate": 6.113190831879698e-06, |
| "loss": 0.204, |
| "step": 1690 |
| }, |
| { |
| "epoch": 3.8475540386803186, |
| "grad_norm": 0.32388225987773905, |
| "learning_rate": 6.09031386142978e-06, |
| "loss": 0.2135, |
| "step": 1691 |
| }, |
| { |
| "epoch": 3.849829351535836, |
| "grad_norm": 0.29890541047871094, |
| "learning_rate": 6.067472086126946e-06, |
| "loss": 0.2041, |
| "step": 1692 |
| }, |
| { |
| "epoch": 3.8521046643913537, |
| "grad_norm": 0.37306649437408845, |
| "learning_rate": 6.044665563766863e-06, |
| "loss": 0.2873, |
| "step": 1693 |
| }, |
| { |
| "epoch": 3.8543799772468716, |
| "grad_norm": 0.31341717595458574, |
| "learning_rate": 6.021894352055999e-06, |
| "loss": 0.2161, |
| "step": 1694 |
| }, |
| { |
| "epoch": 3.856655290102389, |
| "grad_norm": 0.30672897543736083, |
| "learning_rate": 5.999158508611496e-06, |
| "loss": 0.215, |
| "step": 1695 |
| }, |
| { |
| "epoch": 3.8589306029579067, |
| "grad_norm": 0.3314305934662205, |
| "learning_rate": 5.976458090960968e-06, |
| "loss": 0.2289, |
| "step": 1696 |
| }, |
| { |
| "epoch": 3.861205915813424, |
| "grad_norm": 0.30488419979629056, |
| "learning_rate": 5.9537931565424355e-06, |
| "loss": 0.2384, |
| "step": 1697 |
| }, |
| { |
| "epoch": 3.8634812286689417, |
| "grad_norm": 0.33073897968252053, |
| "learning_rate": 5.931163762704095e-06, |
| "loss": 0.243, |
| "step": 1698 |
| }, |
| { |
| "epoch": 3.8657565415244597, |
| "grad_norm": 0.30130774200700605, |
| "learning_rate": 5.908569966704247e-06, |
| "loss": 0.234, |
| "step": 1699 |
| }, |
| { |
| "epoch": 3.868031854379977, |
| "grad_norm": 0.2781595694315222, |
| "learning_rate": 5.886011825711117e-06, |
| "loss": 0.2349, |
| "step": 1700 |
| }, |
| { |
| "epoch": 3.8703071672354947, |
| "grad_norm": 0.3306004490674625, |
| "learning_rate": 5.863489396802693e-06, |
| "loss": 0.2772, |
| "step": 1701 |
| }, |
| { |
| "epoch": 3.8725824800910127, |
| "grad_norm": 0.29953193615493356, |
| "learning_rate": 5.8410027369666215e-06, |
| "loss": 0.2272, |
| "step": 1702 |
| }, |
| { |
| "epoch": 3.87485779294653, |
| "grad_norm": 0.31428588424946236, |
| "learning_rate": 5.818551903100045e-06, |
| "loss": 0.2528, |
| "step": 1703 |
| }, |
| { |
| "epoch": 3.8771331058020477, |
| "grad_norm": 0.30490740383671394, |
| "learning_rate": 5.7961369520094365e-06, |
| "loss": 0.174, |
| "step": 1704 |
| }, |
| { |
| "epoch": 3.8794084186575652, |
| "grad_norm": 0.34149110657492043, |
| "learning_rate": 5.773757940410503e-06, |
| "loss": 0.2552, |
| "step": 1705 |
| }, |
| { |
| "epoch": 3.881683731513083, |
| "grad_norm": 0.31442759218870003, |
| "learning_rate": 5.7514149249279915e-06, |
| "loss": 0.2149, |
| "step": 1706 |
| }, |
| { |
| "epoch": 3.8839590443686007, |
| "grad_norm": 0.3054708293276751, |
| "learning_rate": 5.729107962095583e-06, |
| "loss": 0.2143, |
| "step": 1707 |
| }, |
| { |
| "epoch": 3.8862343572241183, |
| "grad_norm": 0.32096087445485844, |
| "learning_rate": 5.7068371083557425e-06, |
| "loss": 0.2439, |
| "step": 1708 |
| }, |
| { |
| "epoch": 3.888509670079636, |
| "grad_norm": 0.3315098330191521, |
| "learning_rate": 5.684602420059544e-06, |
| "loss": 0.2381, |
| "step": 1709 |
| }, |
| { |
| "epoch": 3.8907849829351537, |
| "grad_norm": 0.3298488700402563, |
| "learning_rate": 5.6624039534665775e-06, |
| "loss": 0.2254, |
| "step": 1710 |
| }, |
| { |
| "epoch": 3.8930602957906713, |
| "grad_norm": 0.3071846393117354, |
| "learning_rate": 5.640241764744778e-06, |
| "loss": 0.2169, |
| "step": 1711 |
| }, |
| { |
| "epoch": 3.8953356086461888, |
| "grad_norm": 0.32783291817800214, |
| "learning_rate": 5.618115909970277e-06, |
| "loss": 0.2764, |
| "step": 1712 |
| }, |
| { |
| "epoch": 3.8976109215017063, |
| "grad_norm": 0.3445664041400783, |
| "learning_rate": 5.5960264451272874e-06, |
| "loss": 0.2304, |
| "step": 1713 |
| }, |
| { |
| "epoch": 3.8998862343572243, |
| "grad_norm": 0.30215319207944386, |
| "learning_rate": 5.573973426107926e-06, |
| "loss": 0.1803, |
| "step": 1714 |
| }, |
| { |
| "epoch": 3.9021615472127418, |
| "grad_norm": 0.30959760486419285, |
| "learning_rate": 5.55195690871211e-06, |
| "loss": 0.2265, |
| "step": 1715 |
| }, |
| { |
| "epoch": 3.9044368600682593, |
| "grad_norm": 0.32219954567635933, |
| "learning_rate": 5.529976948647399e-06, |
| "loss": 0.2654, |
| "step": 1716 |
| }, |
| { |
| "epoch": 3.9067121729237773, |
| "grad_norm": 0.2952888972067496, |
| "learning_rate": 5.5080336015288305e-06, |
| "loss": 0.2169, |
| "step": 1717 |
| }, |
| { |
| "epoch": 3.908987485779295, |
| "grad_norm": 0.33608629315045474, |
| "learning_rate": 5.486126922878827e-06, |
| "loss": 0.2257, |
| "step": 1718 |
| }, |
| { |
| "epoch": 3.9112627986348123, |
| "grad_norm": 0.38313895035840695, |
| "learning_rate": 5.464256968127024e-06, |
| "loss": 0.2548, |
| "step": 1719 |
| }, |
| { |
| "epoch": 3.91353811149033, |
| "grad_norm": 0.29776975666195865, |
| "learning_rate": 5.442423792610118e-06, |
| "loss": 0.2036, |
| "step": 1720 |
| }, |
| { |
| "epoch": 3.9158134243458473, |
| "grad_norm": 6.2431277746955445, |
| "learning_rate": 5.4206274515717735e-06, |
| "loss": 0.3037, |
| "step": 1721 |
| }, |
| { |
| "epoch": 3.9180887372013653, |
| "grad_norm": 0.28750011822850136, |
| "learning_rate": 5.3988680001624246e-06, |
| "loss": 0.2043, |
| "step": 1722 |
| }, |
| { |
| "epoch": 3.920364050056883, |
| "grad_norm": 0.3534671305395048, |
| "learning_rate": 5.377145493439189e-06, |
| "loss": 0.2488, |
| "step": 1723 |
| }, |
| { |
| "epoch": 3.9226393629124003, |
| "grad_norm": 0.3387306762933544, |
| "learning_rate": 5.3554599863656964e-06, |
| "loss": 0.2459, |
| "step": 1724 |
| }, |
| { |
| "epoch": 3.9249146757679183, |
| "grad_norm": 0.3380266671759454, |
| "learning_rate": 5.333811533811945e-06, |
| "loss": 0.252, |
| "step": 1725 |
| }, |
| { |
| "epoch": 3.927189988623436, |
| "grad_norm": 0.3245287829380904, |
| "learning_rate": 5.312200190554197e-06, |
| "loss": 0.2136, |
| "step": 1726 |
| }, |
| { |
| "epoch": 3.9294653014789533, |
| "grad_norm": 0.30177056610765146, |
| "learning_rate": 5.290626011274811e-06, |
| "loss": 0.2157, |
| "step": 1727 |
| }, |
| { |
| "epoch": 3.931740614334471, |
| "grad_norm": 0.3184248640213574, |
| "learning_rate": 5.269089050562095e-06, |
| "loss": 0.2413, |
| "step": 1728 |
| }, |
| { |
| "epoch": 3.9340159271899884, |
| "grad_norm": 0.3449914811634588, |
| "learning_rate": 5.24758936291021e-06, |
| "loss": 0.2774, |
| "step": 1729 |
| }, |
| { |
| "epoch": 3.9362912400455063, |
| "grad_norm": 0.29415305944975656, |
| "learning_rate": 5.226127002718984e-06, |
| "loss": 0.2211, |
| "step": 1730 |
| }, |
| { |
| "epoch": 3.938566552901024, |
| "grad_norm": 0.3063318019988386, |
| "learning_rate": 5.204702024293809e-06, |
| "loss": 0.2348, |
| "step": 1731 |
| }, |
| { |
| "epoch": 3.9408418657565414, |
| "grad_norm": 0.31018938854217054, |
| "learning_rate": 5.1833144818454984e-06, |
| "loss": 0.2326, |
| "step": 1732 |
| }, |
| { |
| "epoch": 3.9431171786120593, |
| "grad_norm": 0.31877450093183923, |
| "learning_rate": 5.161964429490118e-06, |
| "loss": 0.2323, |
| "step": 1733 |
| }, |
| { |
| "epoch": 3.945392491467577, |
| "grad_norm": 0.3129394494579511, |
| "learning_rate": 5.140651921248896e-06, |
| "loss": 0.2156, |
| "step": 1734 |
| }, |
| { |
| "epoch": 3.9476678043230944, |
| "grad_norm": 0.31451145749768966, |
| "learning_rate": 5.119377011048066e-06, |
| "loss": 0.205, |
| "step": 1735 |
| }, |
| { |
| "epoch": 3.949943117178612, |
| "grad_norm": 0.31972587932927893, |
| "learning_rate": 5.098139752718705e-06, |
| "loss": 0.2147, |
| "step": 1736 |
| }, |
| { |
| "epoch": 3.9522184300341294, |
| "grad_norm": 0.3097361980800767, |
| "learning_rate": 5.07694019999665e-06, |
| "loss": 0.2493, |
| "step": 1737 |
| }, |
| { |
| "epoch": 3.9544937428896474, |
| "grad_norm": 0.28718295253995646, |
| "learning_rate": 5.055778406522302e-06, |
| "loss": 0.2076, |
| "step": 1738 |
| }, |
| { |
| "epoch": 3.956769055745165, |
| "grad_norm": 0.3304463564819114, |
| "learning_rate": 5.03465442584055e-06, |
| "loss": 0.2384, |
| "step": 1739 |
| }, |
| { |
| "epoch": 3.9590443686006824, |
| "grad_norm": 0.2894887585174249, |
| "learning_rate": 5.013568311400599e-06, |
| "loss": 0.205, |
| "step": 1740 |
| }, |
| { |
| "epoch": 3.9613196814562004, |
| "grad_norm": 0.355332008884822, |
| "learning_rate": 4.992520116555824e-06, |
| "loss": 0.2662, |
| "step": 1741 |
| }, |
| { |
| "epoch": 3.963594994311718, |
| "grad_norm": 0.317299732708024, |
| "learning_rate": 4.971509894563675e-06, |
| "loss": 0.214, |
| "step": 1742 |
| }, |
| { |
| "epoch": 3.9658703071672354, |
| "grad_norm": 0.3022323304304223, |
| "learning_rate": 4.9505376985855205e-06, |
| "loss": 0.2071, |
| "step": 1743 |
| }, |
| { |
| "epoch": 3.968145620022753, |
| "grad_norm": 0.3240147150371773, |
| "learning_rate": 4.929603581686493e-06, |
| "loss": 0.2562, |
| "step": 1744 |
| }, |
| { |
| "epoch": 3.970420932878271, |
| "grad_norm": 0.2920458914250409, |
| "learning_rate": 4.908707596835396e-06, |
| "loss": 0.2288, |
| "step": 1745 |
| }, |
| { |
| "epoch": 3.9726962457337884, |
| "grad_norm": 0.34721554596739446, |
| "learning_rate": 4.887849796904545e-06, |
| "loss": 0.2738, |
| "step": 1746 |
| }, |
| { |
| "epoch": 3.974971558589306, |
| "grad_norm": 0.2790951894936184, |
| "learning_rate": 4.867030234669623e-06, |
| "loss": 0.1989, |
| "step": 1747 |
| }, |
| { |
| "epoch": 3.977246871444824, |
| "grad_norm": 0.30847017475707605, |
| "learning_rate": 4.846248962809583e-06, |
| "loss": 0.223, |
| "step": 1748 |
| }, |
| { |
| "epoch": 3.9795221843003414, |
| "grad_norm": 0.31553898501131733, |
| "learning_rate": 4.825506033906473e-06, |
| "loss": 0.2499, |
| "step": 1749 |
| }, |
| { |
| "epoch": 3.981797497155859, |
| "grad_norm": 0.30621288235785427, |
| "learning_rate": 4.804801500445338e-06, |
| "loss": 0.2343, |
| "step": 1750 |
| }, |
| { |
| "epoch": 3.9840728100113765, |
| "grad_norm": 0.31119300827944296, |
| "learning_rate": 4.784135414814073e-06, |
| "loss": 0.2293, |
| "step": 1751 |
| }, |
| { |
| "epoch": 3.986348122866894, |
| "grad_norm": 0.315135314468061, |
| "learning_rate": 4.763507829303273e-06, |
| "loss": 0.2426, |
| "step": 1752 |
| }, |
| { |
| "epoch": 3.988623435722412, |
| "grad_norm": 0.30300641894384645, |
| "learning_rate": 4.742918796106133e-06, |
| "loss": 0.2327, |
| "step": 1753 |
| }, |
| { |
| "epoch": 3.9908987485779295, |
| "grad_norm": 0.28620975306710267, |
| "learning_rate": 4.7223683673183065e-06, |
| "loss": 0.221, |
| "step": 1754 |
| }, |
| { |
| "epoch": 3.993174061433447, |
| "grad_norm": 0.35135584656251523, |
| "learning_rate": 4.701856594937744e-06, |
| "loss": 0.2737, |
| "step": 1755 |
| }, |
| { |
| "epoch": 3.995449374288965, |
| "grad_norm": 0.30154724781040915, |
| "learning_rate": 4.68138353086461e-06, |
| "loss": 0.2218, |
| "step": 1756 |
| }, |
| { |
| "epoch": 3.9977246871444825, |
| "grad_norm": 0.3270433705829516, |
| "learning_rate": 4.6609492269011035e-06, |
| "loss": 0.2238, |
| "step": 1757 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.45117243369326687, |
| "learning_rate": 4.640553734751372e-06, |
| "loss": 0.3146, |
| "step": 1758 |
| }, |
| { |
| "epoch": 4.0022753128555175, |
| "grad_norm": 0.4769075032451865, |
| "learning_rate": 4.620197106021352e-06, |
| "loss": 0.2012, |
| "step": 1759 |
| }, |
| { |
| "epoch": 4.004550625711035, |
| "grad_norm": 0.3976079400523734, |
| "learning_rate": 4.5998793922186315e-06, |
| "loss": 0.1877, |
| "step": 1760 |
| }, |
| { |
| "epoch": 4.006825938566553, |
| "grad_norm": 0.3093737942856265, |
| "learning_rate": 4.579600644752351e-06, |
| "loss": 0.1623, |
| "step": 1761 |
| }, |
| { |
| "epoch": 4.009101251422071, |
| "grad_norm": 0.3078795723720559, |
| "learning_rate": 4.5593609149330595e-06, |
| "loss": 0.2004, |
| "step": 1762 |
| }, |
| { |
| "epoch": 4.0113765642775885, |
| "grad_norm": 0.3945750519655033, |
| "learning_rate": 4.539160253972554e-06, |
| "loss": 0.1868, |
| "step": 1763 |
| }, |
| { |
| "epoch": 4.013651877133106, |
| "grad_norm": 0.4965258610062153, |
| "learning_rate": 4.518998712983813e-06, |
| "loss": 0.2092, |
| "step": 1764 |
| }, |
| { |
| "epoch": 4.0159271899886235, |
| "grad_norm": 0.38386284945859933, |
| "learning_rate": 4.498876342980796e-06, |
| "loss": 0.168, |
| "step": 1765 |
| }, |
| { |
| "epoch": 4.018202502844141, |
| "grad_norm": 0.3478458205726723, |
| "learning_rate": 4.478793194878372e-06, |
| "loss": 0.1832, |
| "step": 1766 |
| }, |
| { |
| "epoch": 4.020477815699659, |
| "grad_norm": 0.31275256183291067, |
| "learning_rate": 4.458749319492173e-06, |
| "loss": 0.1595, |
| "step": 1767 |
| }, |
| { |
| "epoch": 4.022753128555176, |
| "grad_norm": 0.37246833572422183, |
| "learning_rate": 4.438744767538437e-06, |
| "loss": 0.174, |
| "step": 1768 |
| }, |
| { |
| "epoch": 4.025028441410694, |
| "grad_norm": 0.3836746699164272, |
| "learning_rate": 4.4187795896339215e-06, |
| "loss": 0.1718, |
| "step": 1769 |
| }, |
| { |
| "epoch": 4.027303754266212, |
| "grad_norm": 0.3799208669671832, |
| "learning_rate": 4.39885383629576e-06, |
| "loss": 0.1911, |
| "step": 1770 |
| }, |
| { |
| "epoch": 4.0295790671217295, |
| "grad_norm": 0.3362384313237527, |
| "learning_rate": 4.378967557941312e-06, |
| "loss": 0.1724, |
| "step": 1771 |
| }, |
| { |
| "epoch": 4.031854379977247, |
| "grad_norm": 0.2893434745348152, |
| "learning_rate": 4.359120804888079e-06, |
| "loss": 0.1408, |
| "step": 1772 |
| }, |
| { |
| "epoch": 4.034129692832765, |
| "grad_norm": 0.29531089760755574, |
| "learning_rate": 4.339313627353534e-06, |
| "loss": 0.1492, |
| "step": 1773 |
| }, |
| { |
| "epoch": 4.036405005688282, |
| "grad_norm": 0.35813511824881245, |
| "learning_rate": 4.319546075455021e-06, |
| "loss": 0.1926, |
| "step": 1774 |
| }, |
| { |
| "epoch": 4.0386803185438, |
| "grad_norm": 0.3569884122339448, |
| "learning_rate": 4.299818199209629e-06, |
| "loss": 0.1869, |
| "step": 1775 |
| }, |
| { |
| "epoch": 4.040955631399317, |
| "grad_norm": 0.33200820587055785, |
| "learning_rate": 4.280130048534037e-06, |
| "loss": 0.1765, |
| "step": 1776 |
| }, |
| { |
| "epoch": 4.043230944254835, |
| "grad_norm": 0.29622461605612205, |
| "learning_rate": 4.2604816732444275e-06, |
| "loss": 0.1566, |
| "step": 1777 |
| }, |
| { |
| "epoch": 4.045506257110353, |
| "grad_norm": 0.292514545715088, |
| "learning_rate": 4.240873123056337e-06, |
| "loss": 0.1563, |
| "step": 1778 |
| }, |
| { |
| "epoch": 4.047781569965871, |
| "grad_norm": 0.31986845280561066, |
| "learning_rate": 4.2213044475845225e-06, |
| "loss": 0.1746, |
| "step": 1779 |
| }, |
| { |
| "epoch": 4.050056882821388, |
| "grad_norm": 0.33983512793590304, |
| "learning_rate": 4.201775696342862e-06, |
| "loss": 0.1592, |
| "step": 1780 |
| }, |
| { |
| "epoch": 4.052332195676906, |
| "grad_norm": 0.31379639034080037, |
| "learning_rate": 4.182286918744198e-06, |
| "loss": 0.2042, |
| "step": 1781 |
| }, |
| { |
| "epoch": 4.054607508532423, |
| "grad_norm": 0.3023478945890041, |
| "learning_rate": 4.162838164100249e-06, |
| "loss": 0.1579, |
| "step": 1782 |
| }, |
| { |
| "epoch": 4.056882821387941, |
| "grad_norm": 0.2969213560175581, |
| "learning_rate": 4.143429481621457e-06, |
| "loss": 0.1743, |
| "step": 1783 |
| }, |
| { |
| "epoch": 4.059158134243458, |
| "grad_norm": 0.3119541374701204, |
| "learning_rate": 4.124060920416859e-06, |
| "loss": 0.1659, |
| "step": 1784 |
| }, |
| { |
| "epoch": 4.061433447098976, |
| "grad_norm": 0.31438726822031443, |
| "learning_rate": 4.104732529493991e-06, |
| "loss": 0.1768, |
| "step": 1785 |
| }, |
| { |
| "epoch": 4.063708759954494, |
| "grad_norm": 0.32453073231477475, |
| "learning_rate": 4.085444357758747e-06, |
| "loss": 0.1684, |
| "step": 1786 |
| }, |
| { |
| "epoch": 4.065984072810012, |
| "grad_norm": 0.3181510377112917, |
| "learning_rate": 4.066196454015241e-06, |
| "loss": 0.174, |
| "step": 1787 |
| }, |
| { |
| "epoch": 4.068259385665529, |
| "grad_norm": 0.285844082778199, |
| "learning_rate": 4.046988866965715e-06, |
| "loss": 0.1685, |
| "step": 1788 |
| }, |
| { |
| "epoch": 4.070534698521047, |
| "grad_norm": 0.32331948067270616, |
| "learning_rate": 4.0278216452103855e-06, |
| "loss": 0.1844, |
| "step": 1789 |
| }, |
| { |
| "epoch": 4.072810011376564, |
| "grad_norm": 0.2862103052814747, |
| "learning_rate": 4.008694837247345e-06, |
| "loss": 0.1526, |
| "step": 1790 |
| }, |
| { |
| "epoch": 4.075085324232082, |
| "grad_norm": 0.28875087074361716, |
| "learning_rate": 3.9896084914724295e-06, |
| "loss": 0.1742, |
| "step": 1791 |
| }, |
| { |
| "epoch": 4.077360637087599, |
| "grad_norm": 0.2702349719704281, |
| "learning_rate": 3.970562656179078e-06, |
| "loss": 0.1647, |
| "step": 1792 |
| }, |
| { |
| "epoch": 4.079635949943118, |
| "grad_norm": 0.2773168310451333, |
| "learning_rate": 3.951557379558248e-06, |
| "loss": 0.1684, |
| "step": 1793 |
| }, |
| { |
| "epoch": 4.081911262798635, |
| "grad_norm": 0.30690349747480467, |
| "learning_rate": 3.932592709698266e-06, |
| "loss": 0.1742, |
| "step": 1794 |
| }, |
| { |
| "epoch": 4.084186575654153, |
| "grad_norm": 0.3017798594775822, |
| "learning_rate": 3.913668694584705e-06, |
| "loss": 0.1889, |
| "step": 1795 |
| }, |
| { |
| "epoch": 4.08646188850967, |
| "grad_norm": 0.2956134096062524, |
| "learning_rate": 3.894785382100284e-06, |
| "loss": 0.2176, |
| "step": 1796 |
| }, |
| { |
| "epoch": 4.088737201365188, |
| "grad_norm": 0.3051750160939217, |
| "learning_rate": 3.875942820024716e-06, |
| "loss": 0.1815, |
| "step": 1797 |
| }, |
| { |
| "epoch": 4.091012514220705, |
| "grad_norm": 0.3130918040476627, |
| "learning_rate": 3.857141056034623e-06, |
| "loss": 0.1689, |
| "step": 1798 |
| }, |
| { |
| "epoch": 4.093287827076223, |
| "grad_norm": 0.2842894915597038, |
| "learning_rate": 3.838380137703395e-06, |
| "loss": 0.1655, |
| "step": 1799 |
| }, |
| { |
| "epoch": 4.09556313993174, |
| "grad_norm": 0.29715175234366936, |
| "learning_rate": 3.819660112501053e-06, |
| "loss": 0.1796, |
| "step": 1800 |
| }, |
| { |
| "epoch": 4.097838452787259, |
| "grad_norm": 0.29377940866244734, |
| "learning_rate": 3.8009810277941684e-06, |
| "loss": 0.1639, |
| "step": 1801 |
| }, |
| { |
| "epoch": 4.100113765642776, |
| "grad_norm": 0.29276927104650885, |
| "learning_rate": 3.7823429308457216e-06, |
| "loss": 0.1973, |
| "step": 1802 |
| }, |
| { |
| "epoch": 4.102389078498294, |
| "grad_norm": 0.2994435046602159, |
| "learning_rate": 3.763745868814963e-06, |
| "loss": 0.1804, |
| "step": 1803 |
| }, |
| { |
| "epoch": 4.104664391353811, |
| "grad_norm": 0.28482583960056823, |
| "learning_rate": 3.7451898887573414e-06, |
| "loss": 0.1523, |
| "step": 1804 |
| }, |
| { |
| "epoch": 4.106939704209329, |
| "grad_norm": 0.28838680832452307, |
| "learning_rate": 3.72667503762433e-06, |
| "loss": 0.1579, |
| "step": 1805 |
| }, |
| { |
| "epoch": 4.109215017064846, |
| "grad_norm": 0.270794150814169, |
| "learning_rate": 3.7082013622633574e-06, |
| "loss": 0.1802, |
| "step": 1806 |
| }, |
| { |
| "epoch": 4.111490329920364, |
| "grad_norm": 0.28536534956566173, |
| "learning_rate": 3.6897689094176614e-06, |
| "loss": 0.155, |
| "step": 1807 |
| }, |
| { |
| "epoch": 4.113765642775881, |
| "grad_norm": 0.2943625378053496, |
| "learning_rate": 3.671377725726164e-06, |
| "loss": 0.1482, |
| "step": 1808 |
| }, |
| { |
| "epoch": 4.1160409556314, |
| "grad_norm": 0.30861999050570416, |
| "learning_rate": 3.653027857723379e-06, |
| "loss": 0.1741, |
| "step": 1809 |
| }, |
| { |
| "epoch": 4.118316268486917, |
| "grad_norm": 0.2923682813079667, |
| "learning_rate": 3.6347193518392776e-06, |
| "loss": 0.1523, |
| "step": 1810 |
| }, |
| { |
| "epoch": 4.120591581342435, |
| "grad_norm": 0.2895713734766889, |
| "learning_rate": 3.6164522543991675e-06, |
| "loss": 0.1847, |
| "step": 1811 |
| }, |
| { |
| "epoch": 4.122866894197952, |
| "grad_norm": 0.2977731114241597, |
| "learning_rate": 3.5982266116235918e-06, |
| "loss": 0.1849, |
| "step": 1812 |
| }, |
| { |
| "epoch": 4.12514220705347, |
| "grad_norm": 0.2915909276442138, |
| "learning_rate": 3.5800424696281933e-06, |
| "loss": 0.1835, |
| "step": 1813 |
| }, |
| { |
| "epoch": 4.127417519908987, |
| "grad_norm": 0.2964235964857184, |
| "learning_rate": 3.5618998744236112e-06, |
| "loss": 0.1877, |
| "step": 1814 |
| }, |
| { |
| "epoch": 4.129692832764505, |
| "grad_norm": 0.2744018510234478, |
| "learning_rate": 3.543798871915367e-06, |
| "loss": 0.1762, |
| "step": 1815 |
| }, |
| { |
| "epoch": 4.131968145620022, |
| "grad_norm": 0.28501466540089593, |
| "learning_rate": 3.5257395079037293e-06, |
| "loss": 0.1727, |
| "step": 1816 |
| }, |
| { |
| "epoch": 4.134243458475541, |
| "grad_norm": 0.3124331135487368, |
| "learning_rate": 3.5077218280836143e-06, |
| "loss": 0.1898, |
| "step": 1817 |
| }, |
| { |
| "epoch": 4.136518771331058, |
| "grad_norm": 0.27779228301613235, |
| "learning_rate": 3.4897458780444815e-06, |
| "loss": 0.1827, |
| "step": 1818 |
| }, |
| { |
| "epoch": 4.138794084186576, |
| "grad_norm": 0.29143118659774375, |
| "learning_rate": 3.4718117032701736e-06, |
| "loss": 0.1855, |
| "step": 1819 |
| }, |
| { |
| "epoch": 4.141069397042093, |
| "grad_norm": 0.2955170890916771, |
| "learning_rate": 3.453919349138859e-06, |
| "loss": 0.1892, |
| "step": 1820 |
| }, |
| { |
| "epoch": 4.143344709897611, |
| "grad_norm": 0.2960059703618523, |
| "learning_rate": 3.4360688609228696e-06, |
| "loss": 0.1992, |
| "step": 1821 |
| }, |
| { |
| "epoch": 4.145620022753128, |
| "grad_norm": 0.3018565283140866, |
| "learning_rate": 3.418260283788617e-06, |
| "loss": 0.1778, |
| "step": 1822 |
| }, |
| { |
| "epoch": 4.147895335608646, |
| "grad_norm": 0.29023871017690334, |
| "learning_rate": 3.4004936627964667e-06, |
| "loss": 0.1688, |
| "step": 1823 |
| }, |
| { |
| "epoch": 4.150170648464163, |
| "grad_norm": 0.2761367828407954, |
| "learning_rate": 3.382769042900613e-06, |
| "loss": 0.1893, |
| "step": 1824 |
| }, |
| { |
| "epoch": 4.152445961319682, |
| "grad_norm": 0.30011109570647254, |
| "learning_rate": 3.365086468948988e-06, |
| "loss": 0.1524, |
| "step": 1825 |
| }, |
| { |
| "epoch": 4.154721274175199, |
| "grad_norm": 0.30881472569270824, |
| "learning_rate": 3.3474459856831352e-06, |
| "loss": 0.1697, |
| "step": 1826 |
| }, |
| { |
| "epoch": 4.156996587030717, |
| "grad_norm": 0.2948790323073478, |
| "learning_rate": 3.3298476377380907e-06, |
| "loss": 0.1788, |
| "step": 1827 |
| }, |
| { |
| "epoch": 4.159271899886234, |
| "grad_norm": 0.28948403698960046, |
| "learning_rate": 3.312291469642286e-06, |
| "loss": 0.1775, |
| "step": 1828 |
| }, |
| { |
| "epoch": 4.161547212741752, |
| "grad_norm": 0.44070767588943566, |
| "learning_rate": 3.294777525817414e-06, |
| "loss": 0.2049, |
| "step": 1829 |
| }, |
| { |
| "epoch": 4.163822525597269, |
| "grad_norm": 0.2852153227163277, |
| "learning_rate": 3.277305850578345e-06, |
| "loss": 0.19, |
| "step": 1830 |
| }, |
| { |
| "epoch": 4.166097838452787, |
| "grad_norm": 0.299312680649051, |
| "learning_rate": 3.2598764881329915e-06, |
| "loss": 0.1784, |
| "step": 1831 |
| }, |
| { |
| "epoch": 4.168373151308305, |
| "grad_norm": 0.2771732420759297, |
| "learning_rate": 3.242489482582194e-06, |
| "loss": 0.1529, |
| "step": 1832 |
| }, |
| { |
| "epoch": 4.170648464163823, |
| "grad_norm": 0.28136004468362263, |
| "learning_rate": 3.225144877919635e-06, |
| "loss": 0.1675, |
| "step": 1833 |
| }, |
| { |
| "epoch": 4.17292377701934, |
| "grad_norm": 0.26408170571926015, |
| "learning_rate": 3.2078427180317084e-06, |
| "loss": 0.1751, |
| "step": 1834 |
| }, |
| { |
| "epoch": 4.175199089874858, |
| "grad_norm": 0.2945328639456804, |
| "learning_rate": 3.1905830466973975e-06, |
| "loss": 0.1822, |
| "step": 1835 |
| }, |
| { |
| "epoch": 4.177474402730375, |
| "grad_norm": 0.2943639900150499, |
| "learning_rate": 3.173365907588195e-06, |
| "loss": 0.1674, |
| "step": 1836 |
| }, |
| { |
| "epoch": 4.179749715585893, |
| "grad_norm": 0.2995630964889057, |
| "learning_rate": 3.1561913442679694e-06, |
| "loss": 0.1787, |
| "step": 1837 |
| }, |
| { |
| "epoch": 4.18202502844141, |
| "grad_norm": 0.28199152495272156, |
| "learning_rate": 3.1390594001928654e-06, |
| "loss": 0.1761, |
| "step": 1838 |
| }, |
| { |
| "epoch": 4.184300341296928, |
| "grad_norm": 0.29468647972746453, |
| "learning_rate": 3.1219701187111817e-06, |
| "loss": 0.179, |
| "step": 1839 |
| }, |
| { |
| "epoch": 4.186575654152446, |
| "grad_norm": 0.29082483162051903, |
| "learning_rate": 3.1049235430632696e-06, |
| "loss": 0.1955, |
| "step": 1840 |
| }, |
| { |
| "epoch": 4.188850967007964, |
| "grad_norm": 0.29879641184857786, |
| "learning_rate": 3.0879197163814334e-06, |
| "loss": 0.1724, |
| "step": 1841 |
| }, |
| { |
| "epoch": 4.191126279863481, |
| "grad_norm": 0.31145266853383446, |
| "learning_rate": 3.0709586816898086e-06, |
| "loss": 0.2368, |
| "step": 1842 |
| }, |
| { |
| "epoch": 4.193401592718999, |
| "grad_norm": 0.2721191961469994, |
| "learning_rate": 3.054040481904246e-06, |
| "loss": 0.134, |
| "step": 1843 |
| }, |
| { |
| "epoch": 4.1956769055745164, |
| "grad_norm": 0.27510529396254574, |
| "learning_rate": 3.037165159832223e-06, |
| "loss": 0.1499, |
| "step": 1844 |
| }, |
| { |
| "epoch": 4.197952218430034, |
| "grad_norm": 0.2797481261232522, |
| "learning_rate": 3.0203327581727195e-06, |
| "loss": 0.1666, |
| "step": 1845 |
| }, |
| { |
| "epoch": 4.2002275312855515, |
| "grad_norm": 0.306155624592477, |
| "learning_rate": 3.0035433195161266e-06, |
| "loss": 0.17, |
| "step": 1846 |
| }, |
| { |
| "epoch": 4.202502844141069, |
| "grad_norm": 0.28488195762459484, |
| "learning_rate": 2.986796886344112e-06, |
| "loss": 0.1831, |
| "step": 1847 |
| }, |
| { |
| "epoch": 4.204778156996587, |
| "grad_norm": 0.28418092083664215, |
| "learning_rate": 2.9700935010295316e-06, |
| "loss": 0.1528, |
| "step": 1848 |
| }, |
| { |
| "epoch": 4.207053469852105, |
| "grad_norm": 0.2945372190049564, |
| "learning_rate": 2.953433205836327e-06, |
| "loss": 0.1585, |
| "step": 1849 |
| }, |
| { |
| "epoch": 4.2093287827076225, |
| "grad_norm": 0.27504913273286286, |
| "learning_rate": 2.9368160429194127e-06, |
| "loss": 0.1718, |
| "step": 1850 |
| }, |
| { |
| "epoch": 4.21160409556314, |
| "grad_norm": 0.2880017693010912, |
| "learning_rate": 2.920242054324547e-06, |
| "loss": 0.1598, |
| "step": 1851 |
| }, |
| { |
| "epoch": 4.2138794084186575, |
| "grad_norm": 0.2983460026372298, |
| "learning_rate": 2.9037112819882685e-06, |
| "loss": 0.1737, |
| "step": 1852 |
| }, |
| { |
| "epoch": 4.216154721274175, |
| "grad_norm": 0.2711852394125882, |
| "learning_rate": 2.887223767737759e-06, |
| "loss": 0.1713, |
| "step": 1853 |
| }, |
| { |
| "epoch": 4.2184300341296925, |
| "grad_norm": 0.28858227659794555, |
| "learning_rate": 2.8707795532907457e-06, |
| "loss": 0.1474, |
| "step": 1854 |
| }, |
| { |
| "epoch": 4.22070534698521, |
| "grad_norm": 0.27749228116646407, |
| "learning_rate": 2.8543786802553943e-06, |
| "loss": 0.1627, |
| "step": 1855 |
| }, |
| { |
| "epoch": 4.2229806598407285, |
| "grad_norm": 0.29530178216415215, |
| "learning_rate": 2.8380211901302e-06, |
| "loss": 0.2047, |
| "step": 1856 |
| }, |
| { |
| "epoch": 4.225255972696246, |
| "grad_norm": 0.3023235137542973, |
| "learning_rate": 2.8217071243039027e-06, |
| "loss": 0.2001, |
| "step": 1857 |
| }, |
| { |
| "epoch": 4.2275312855517635, |
| "grad_norm": 0.29111559436168455, |
| "learning_rate": 2.8054365240553627e-06, |
| "loss": 0.1691, |
| "step": 1858 |
| }, |
| { |
| "epoch": 4.229806598407281, |
| "grad_norm": 0.28642161220324286, |
| "learning_rate": 2.789209430553448e-06, |
| "loss": 0.1825, |
| "step": 1859 |
| }, |
| { |
| "epoch": 4.2320819112627985, |
| "grad_norm": 0.28236465353986595, |
| "learning_rate": 2.773025884856957e-06, |
| "loss": 0.187, |
| "step": 1860 |
| }, |
| { |
| "epoch": 4.234357224118316, |
| "grad_norm": 0.2898860096952971, |
| "learning_rate": 2.756885927914499e-06, |
| "loss": 0.1411, |
| "step": 1861 |
| }, |
| { |
| "epoch": 4.236632536973834, |
| "grad_norm": 0.2799644334188099, |
| "learning_rate": 2.740789600564391e-06, |
| "loss": 0.1544, |
| "step": 1862 |
| }, |
| { |
| "epoch": 4.238907849829351, |
| "grad_norm": 0.283940319165611, |
| "learning_rate": 2.724736943534554e-06, |
| "loss": 0.1562, |
| "step": 1863 |
| }, |
| { |
| "epoch": 4.2411831626848695, |
| "grad_norm": 0.29108290944445414, |
| "learning_rate": 2.7087279974424064e-06, |
| "loss": 0.1814, |
| "step": 1864 |
| }, |
| { |
| "epoch": 4.243458475540387, |
| "grad_norm": 0.2881703274709376, |
| "learning_rate": 2.692762802794775e-06, |
| "loss": 0.1733, |
| "step": 1865 |
| }, |
| { |
| "epoch": 4.2457337883959045, |
| "grad_norm": 0.2866066755967616, |
| "learning_rate": 2.6768413999877885e-06, |
| "loss": 0.1772, |
| "step": 1866 |
| }, |
| { |
| "epoch": 4.248009101251422, |
| "grad_norm": 0.2871068941071818, |
| "learning_rate": 2.660963829306753e-06, |
| "loss": 0.1868, |
| "step": 1867 |
| }, |
| { |
| "epoch": 4.25028441410694, |
| "grad_norm": 0.28265703450221186, |
| "learning_rate": 2.645130130926086e-06, |
| "loss": 0.1845, |
| "step": 1868 |
| }, |
| { |
| "epoch": 4.252559726962457, |
| "grad_norm": 0.28517829274735246, |
| "learning_rate": 2.629340344909186e-06, |
| "loss": 0.159, |
| "step": 1869 |
| }, |
| { |
| "epoch": 4.254835039817975, |
| "grad_norm": 0.28641183845888735, |
| "learning_rate": 2.6135945112083506e-06, |
| "loss": 0.1842, |
| "step": 1870 |
| }, |
| { |
| "epoch": 4.257110352673493, |
| "grad_norm": 0.2959083940515715, |
| "learning_rate": 2.5978926696646567e-06, |
| "loss": 0.1846, |
| "step": 1871 |
| }, |
| { |
| "epoch": 4.2593856655290105, |
| "grad_norm": 0.3125252382625625, |
| "learning_rate": 2.5822348600078684e-06, |
| "loss": 0.2053, |
| "step": 1872 |
| }, |
| { |
| "epoch": 4.261660978384528, |
| "grad_norm": 0.29353899348561113, |
| "learning_rate": 2.56662112185635e-06, |
| "loss": 0.1565, |
| "step": 1873 |
| }, |
| { |
| "epoch": 4.263936291240046, |
| "grad_norm": 0.30462610206364243, |
| "learning_rate": 2.5510514947169474e-06, |
| "loss": 0.1805, |
| "step": 1874 |
| }, |
| { |
| "epoch": 4.266211604095563, |
| "grad_norm": 0.27048278700046385, |
| "learning_rate": 2.535526017984884e-06, |
| "loss": 0.1596, |
| "step": 1875 |
| }, |
| { |
| "epoch": 4.268486916951081, |
| "grad_norm": 0.3074253140038664, |
| "learning_rate": 2.520044730943687e-06, |
| "loss": 0.1681, |
| "step": 1876 |
| }, |
| { |
| "epoch": 4.270762229806598, |
| "grad_norm": 0.2965521216090326, |
| "learning_rate": 2.5046076727650647e-06, |
| "loss": 0.1677, |
| "step": 1877 |
| }, |
| { |
| "epoch": 4.273037542662116, |
| "grad_norm": 0.28241542497027616, |
| "learning_rate": 2.4892148825088167e-06, |
| "loss": 0.1736, |
| "step": 1878 |
| }, |
| { |
| "epoch": 4.275312855517634, |
| "grad_norm": 0.2910565396763097, |
| "learning_rate": 2.473866399122733e-06, |
| "loss": 0.2, |
| "step": 1879 |
| }, |
| { |
| "epoch": 4.277588168373152, |
| "grad_norm": 0.26706170621906083, |
| "learning_rate": 2.458562261442483e-06, |
| "loss": 0.1633, |
| "step": 1880 |
| }, |
| { |
| "epoch": 4.279863481228669, |
| "grad_norm": 0.337322390050979, |
| "learning_rate": 2.4433025081915516e-06, |
| "loss": 0.1909, |
| "step": 1881 |
| }, |
| { |
| "epoch": 4.282138794084187, |
| "grad_norm": 0.29527568345775845, |
| "learning_rate": 2.428087177981109e-06, |
| "loss": 0.1621, |
| "step": 1882 |
| }, |
| { |
| "epoch": 4.284414106939704, |
| "grad_norm": 0.2772630547807417, |
| "learning_rate": 2.4129163093099183e-06, |
| "loss": 0.1722, |
| "step": 1883 |
| }, |
| { |
| "epoch": 4.286689419795222, |
| "grad_norm": 0.29035733460933594, |
| "learning_rate": 2.3977899405642502e-06, |
| "loss": 0.1331, |
| "step": 1884 |
| }, |
| { |
| "epoch": 4.288964732650739, |
| "grad_norm": 0.28924669219995647, |
| "learning_rate": 2.3827081100177797e-06, |
| "loss": 0.1552, |
| "step": 1885 |
| }, |
| { |
| "epoch": 4.291240045506257, |
| "grad_norm": 0.293749628581807, |
| "learning_rate": 2.36767085583149e-06, |
| "loss": 0.1877, |
| "step": 1886 |
| }, |
| { |
| "epoch": 4.293515358361775, |
| "grad_norm": 0.29530983346945033, |
| "learning_rate": 2.352678216053563e-06, |
| "loss": 0.171, |
| "step": 1887 |
| }, |
| { |
| "epoch": 4.295790671217293, |
| "grad_norm": 0.29070568416114295, |
| "learning_rate": 2.337730228619297e-06, |
| "loss": 0.2007, |
| "step": 1888 |
| }, |
| { |
| "epoch": 4.29806598407281, |
| "grad_norm": 0.28420095387571376, |
| "learning_rate": 2.322826931351028e-06, |
| "loss": 0.1502, |
| "step": 1889 |
| }, |
| { |
| "epoch": 4.300341296928328, |
| "grad_norm": 0.3047011945298094, |
| "learning_rate": 2.307968361957993e-06, |
| "loss": 0.1593, |
| "step": 1890 |
| }, |
| { |
| "epoch": 4.302616609783845, |
| "grad_norm": 0.2735842227975423, |
| "learning_rate": 2.293154558036257e-06, |
| "loss": 0.1705, |
| "step": 1891 |
| }, |
| { |
| "epoch": 4.304891922639363, |
| "grad_norm": 0.3006310279420523, |
| "learning_rate": 2.278385557068623e-06, |
| "loss": 0.1888, |
| "step": 1892 |
| }, |
| { |
| "epoch": 4.30716723549488, |
| "grad_norm": 0.2839083760791352, |
| "learning_rate": 2.2636613964245348e-06, |
| "loss": 0.1623, |
| "step": 1893 |
| }, |
| { |
| "epoch": 4.309442548350399, |
| "grad_norm": 0.2773286092711842, |
| "learning_rate": 2.2489821133599744e-06, |
| "loss": 0.1467, |
| "step": 1894 |
| }, |
| { |
| "epoch": 4.311717861205916, |
| "grad_norm": 0.2830527267043027, |
| "learning_rate": 2.2343477450173665e-06, |
| "loss": 0.1524, |
| "step": 1895 |
| }, |
| { |
| "epoch": 4.313993174061434, |
| "grad_norm": 0.29987047576137166, |
| "learning_rate": 2.2197583284254853e-06, |
| "loss": 0.1648, |
| "step": 1896 |
| }, |
| { |
| "epoch": 4.316268486916951, |
| "grad_norm": 0.29598096279832914, |
| "learning_rate": 2.205213900499392e-06, |
| "loss": 0.1902, |
| "step": 1897 |
| }, |
| { |
| "epoch": 4.318543799772469, |
| "grad_norm": 0.28814294396320594, |
| "learning_rate": 2.1907144980402852e-06, |
| "loss": 0.1644, |
| "step": 1898 |
| }, |
| { |
| "epoch": 4.320819112627986, |
| "grad_norm": 0.301351812170094, |
| "learning_rate": 2.1762601577354482e-06, |
| "loss": 0.189, |
| "step": 1899 |
| }, |
| { |
| "epoch": 4.323094425483504, |
| "grad_norm": 0.3018885038085118, |
| "learning_rate": 2.161850916158148e-06, |
| "loss": 0.1699, |
| "step": 1900 |
| }, |
| { |
| "epoch": 4.325369738339021, |
| "grad_norm": 0.2869854738978282, |
| "learning_rate": 2.147486809767538e-06, |
| "loss": 0.1773, |
| "step": 1901 |
| }, |
| { |
| "epoch": 4.327645051194539, |
| "grad_norm": 0.28405930271661783, |
| "learning_rate": 2.1331678749085706e-06, |
| "loss": 0.173, |
| "step": 1902 |
| }, |
| { |
| "epoch": 4.329920364050057, |
| "grad_norm": 0.28386739272398476, |
| "learning_rate": 2.118894147811896e-06, |
| "loss": 0.2033, |
| "step": 1903 |
| }, |
| { |
| "epoch": 4.332195676905575, |
| "grad_norm": 0.285225964317923, |
| "learning_rate": 2.1046656645937725e-06, |
| "loss": 0.1947, |
| "step": 1904 |
| }, |
| { |
| "epoch": 4.334470989761092, |
| "grad_norm": 0.2730586271912445, |
| "learning_rate": 2.0904824612560046e-06, |
| "loss": 0.1643, |
| "step": 1905 |
| }, |
| { |
| "epoch": 4.33674630261661, |
| "grad_norm": 0.28470372889885964, |
| "learning_rate": 2.076344573685798e-06, |
| "loss": 0.1842, |
| "step": 1906 |
| }, |
| { |
| "epoch": 4.339021615472127, |
| "grad_norm": 0.2896583206658402, |
| "learning_rate": 2.0622520376557074e-06, |
| "loss": 0.2125, |
| "step": 1907 |
| }, |
| { |
| "epoch": 4.341296928327645, |
| "grad_norm": 0.28492206440633683, |
| "learning_rate": 2.048204888823542e-06, |
| "loss": 0.1766, |
| "step": 1908 |
| }, |
| { |
| "epoch": 4.343572241183162, |
| "grad_norm": 0.2776716747062078, |
| "learning_rate": 2.0342031627322644e-06, |
| "loss": 0.1571, |
| "step": 1909 |
| }, |
| { |
| "epoch": 4.345847554038681, |
| "grad_norm": 0.30420878779082033, |
| "learning_rate": 2.020246894809912e-06, |
| "loss": 0.1809, |
| "step": 1910 |
| }, |
| { |
| "epoch": 4.348122866894198, |
| "grad_norm": 0.29233315451399383, |
| "learning_rate": 2.0063361203694897e-06, |
| "loss": 0.192, |
| "step": 1911 |
| }, |
| { |
| "epoch": 4.350398179749716, |
| "grad_norm": 0.29427195621230223, |
| "learning_rate": 1.992470874608894e-06, |
| "loss": 0.1832, |
| "step": 1912 |
| }, |
| { |
| "epoch": 4.352673492605233, |
| "grad_norm": 0.28092217612998305, |
| "learning_rate": 1.978651192610841e-06, |
| "loss": 0.1687, |
| "step": 1913 |
| }, |
| { |
| "epoch": 4.354948805460751, |
| "grad_norm": 0.3008121187898929, |
| "learning_rate": 1.964877109342733e-06, |
| "loss": 0.1919, |
| "step": 1914 |
| }, |
| { |
| "epoch": 4.357224118316268, |
| "grad_norm": 0.285257260726303, |
| "learning_rate": 1.9511486596566054e-06, |
| "loss": 0.1717, |
| "step": 1915 |
| }, |
| { |
| "epoch": 4.359499431171786, |
| "grad_norm": 0.28989930245491863, |
| "learning_rate": 1.9374658782890332e-06, |
| "loss": 0.1732, |
| "step": 1916 |
| }, |
| { |
| "epoch": 4.361774744027303, |
| "grad_norm": 0.2903557658703925, |
| "learning_rate": 1.923828799861034e-06, |
| "loss": 0.1571, |
| "step": 1917 |
| }, |
| { |
| "epoch": 4.364050056882822, |
| "grad_norm": 0.2800855565851557, |
| "learning_rate": 1.9102374588779883e-06, |
| "loss": 0.1478, |
| "step": 1918 |
| }, |
| { |
| "epoch": 4.366325369738339, |
| "grad_norm": 0.27784082913006314, |
| "learning_rate": 1.8966918897295472e-06, |
| "loss": 0.1556, |
| "step": 1919 |
| }, |
| { |
| "epoch": 4.368600682593857, |
| "grad_norm": 0.2765254308847868, |
| "learning_rate": 1.8831921266895348e-06, |
| "loss": 0.2131, |
| "step": 1920 |
| }, |
| { |
| "epoch": 4.370875995449374, |
| "grad_norm": 0.3047570532309943, |
| "learning_rate": 1.869738203915903e-06, |
| "loss": 0.1765, |
| "step": 1921 |
| }, |
| { |
| "epoch": 4.373151308304892, |
| "grad_norm": 0.28003907661446176, |
| "learning_rate": 1.8563301554505876e-06, |
| "loss": 0.1629, |
| "step": 1922 |
| }, |
| { |
| "epoch": 4.375426621160409, |
| "grad_norm": 0.2970382076063424, |
| "learning_rate": 1.8429680152194619e-06, |
| "loss": 0.1729, |
| "step": 1923 |
| }, |
| { |
| "epoch": 4.377701934015927, |
| "grad_norm": 0.2925407037838341, |
| "learning_rate": 1.8296518170322409e-06, |
| "loss": 0.1792, |
| "step": 1924 |
| }, |
| { |
| "epoch": 4.379977246871444, |
| "grad_norm": 0.3026444999079674, |
| "learning_rate": 1.8163815945823881e-06, |
| "loss": 0.1783, |
| "step": 1925 |
| }, |
| { |
| "epoch": 4.382252559726963, |
| "grad_norm": 0.2778803748916032, |
| "learning_rate": 1.803157381447047e-06, |
| "loss": 0.1694, |
| "step": 1926 |
| }, |
| { |
| "epoch": 4.38452787258248, |
| "grad_norm": 0.28888117266682606, |
| "learning_rate": 1.7899792110869364e-06, |
| "loss": 0.1285, |
| "step": 1927 |
| }, |
| { |
| "epoch": 4.386803185437998, |
| "grad_norm": 0.2885464253312428, |
| "learning_rate": 1.7768471168462652e-06, |
| "loss": 0.1823, |
| "step": 1928 |
| }, |
| { |
| "epoch": 4.389078498293515, |
| "grad_norm": 0.28847188145684455, |
| "learning_rate": 1.7637611319526881e-06, |
| "loss": 0.2026, |
| "step": 1929 |
| }, |
| { |
| "epoch": 4.391353811149033, |
| "grad_norm": 0.2793182449132348, |
| "learning_rate": 1.7507212895171632e-06, |
| "loss": 0.1746, |
| "step": 1930 |
| }, |
| { |
| "epoch": 4.39362912400455, |
| "grad_norm": 0.29292601622964687, |
| "learning_rate": 1.7377276225339e-06, |
| "loss": 0.1806, |
| "step": 1931 |
| }, |
| { |
| "epoch": 4.395904436860068, |
| "grad_norm": 0.28209452442347205, |
| "learning_rate": 1.7247801638802842e-06, |
| "loss": 0.1795, |
| "step": 1932 |
| }, |
| { |
| "epoch": 4.398179749715586, |
| "grad_norm": 0.28464106504951486, |
| "learning_rate": 1.711878946316774e-06, |
| "loss": 0.1968, |
| "step": 1933 |
| }, |
| { |
| "epoch": 4.400455062571104, |
| "grad_norm": 0.2772080474047025, |
| "learning_rate": 1.699024002486831e-06, |
| "loss": 0.1856, |
| "step": 1934 |
| }, |
| { |
| "epoch": 4.402730375426621, |
| "grad_norm": 0.265831728673992, |
| "learning_rate": 1.6862153649168211e-06, |
| "loss": 0.1552, |
| "step": 1935 |
| }, |
| { |
| "epoch": 4.405005688282139, |
| "grad_norm": 0.2787073981628462, |
| "learning_rate": 1.6734530660159532e-06, |
| "loss": 0.1398, |
| "step": 1936 |
| }, |
| { |
| "epoch": 4.407281001137656, |
| "grad_norm": 0.2841532952793566, |
| "learning_rate": 1.660737138076187e-06, |
| "loss": 0.1535, |
| "step": 1937 |
| }, |
| { |
| "epoch": 4.409556313993174, |
| "grad_norm": 0.30484118224343415, |
| "learning_rate": 1.6480676132721484e-06, |
| "loss": 0.1788, |
| "step": 1938 |
| }, |
| { |
| "epoch": 4.4118316268486915, |
| "grad_norm": 0.2780969954617563, |
| "learning_rate": 1.635444523661045e-06, |
| "loss": 0.1804, |
| "step": 1939 |
| }, |
| { |
| "epoch": 4.414106939704209, |
| "grad_norm": 0.2805039981184527, |
| "learning_rate": 1.6228679011826032e-06, |
| "loss": 0.1641, |
| "step": 1940 |
| }, |
| { |
| "epoch": 4.4163822525597265, |
| "grad_norm": 0.2955433059424568, |
| "learning_rate": 1.610337777658968e-06, |
| "loss": 0.1741, |
| "step": 1941 |
| }, |
| { |
| "epoch": 4.418657565415245, |
| "grad_norm": 0.29651803614171446, |
| "learning_rate": 1.5978541847946383e-06, |
| "loss": 0.1738, |
| "step": 1942 |
| }, |
| { |
| "epoch": 4.420932878270762, |
| "grad_norm": 0.2865596574026191, |
| "learning_rate": 1.585417154176363e-06, |
| "loss": 0.168, |
| "step": 1943 |
| }, |
| { |
| "epoch": 4.42320819112628, |
| "grad_norm": 0.2715688446862325, |
| "learning_rate": 1.5730267172730873e-06, |
| "loss": 0.1518, |
| "step": 1944 |
| }, |
| { |
| "epoch": 4.4254835039817975, |
| "grad_norm": 0.27806911710441523, |
| "learning_rate": 1.5606829054358686e-06, |
| "loss": 0.1952, |
| "step": 1945 |
| }, |
| { |
| "epoch": 4.427758816837315, |
| "grad_norm": 0.29787549728839613, |
| "learning_rate": 1.548385749897774e-06, |
| "loss": 0.1902, |
| "step": 1946 |
| }, |
| { |
| "epoch": 4.4300341296928325, |
| "grad_norm": 0.28023191525733876, |
| "learning_rate": 1.5361352817738228e-06, |
| "loss": 0.1722, |
| "step": 1947 |
| }, |
| { |
| "epoch": 4.43230944254835, |
| "grad_norm": 0.28967761875456094, |
| "learning_rate": 1.5239315320609116e-06, |
| "loss": 0.1619, |
| "step": 1948 |
| }, |
| { |
| "epoch": 4.434584755403868, |
| "grad_norm": 0.2721067778091864, |
| "learning_rate": 1.511774531637722e-06, |
| "loss": 0.172, |
| "step": 1949 |
| }, |
| { |
| "epoch": 4.436860068259386, |
| "grad_norm": 0.305432121933115, |
| "learning_rate": 1.499664311264648e-06, |
| "loss": 0.1689, |
| "step": 1950 |
| }, |
| { |
| "epoch": 4.4391353811149035, |
| "grad_norm": 0.26894904879069875, |
| "learning_rate": 1.4876009015837124e-06, |
| "loss": 0.2014, |
| "step": 1951 |
| }, |
| { |
| "epoch": 4.441410693970421, |
| "grad_norm": 0.27691927726903726, |
| "learning_rate": 1.475584333118505e-06, |
| "loss": 0.1716, |
| "step": 1952 |
| }, |
| { |
| "epoch": 4.4436860068259385, |
| "grad_norm": 0.27467787878431865, |
| "learning_rate": 1.4636146362740888e-06, |
| "loss": 0.1682, |
| "step": 1953 |
| }, |
| { |
| "epoch": 4.445961319681456, |
| "grad_norm": 0.2872238149705631, |
| "learning_rate": 1.4516918413369285e-06, |
| "loss": 0.1541, |
| "step": 1954 |
| }, |
| { |
| "epoch": 4.4482366325369735, |
| "grad_norm": 0.29846788798446056, |
| "learning_rate": 1.4398159784748144e-06, |
| "loss": 0.1537, |
| "step": 1955 |
| }, |
| { |
| "epoch": 4.450511945392491, |
| "grad_norm": 0.2908559333263553, |
| "learning_rate": 1.4279870777367899e-06, |
| "loss": 0.2251, |
| "step": 1956 |
| }, |
| { |
| "epoch": 4.4527872582480095, |
| "grad_norm": 0.29080353198439524, |
| "learning_rate": 1.416205169053071e-06, |
| "loss": 0.1673, |
| "step": 1957 |
| }, |
| { |
| "epoch": 4.455062571103527, |
| "grad_norm": 0.2783122256879772, |
| "learning_rate": 1.4044702822349731e-06, |
| "loss": 0.1829, |
| "step": 1958 |
| }, |
| { |
| "epoch": 4.4573378839590445, |
| "grad_norm": 0.29610239322941884, |
| "learning_rate": 1.3927824469748274e-06, |
| "loss": 0.181, |
| "step": 1959 |
| }, |
| { |
| "epoch": 4.459613196814562, |
| "grad_norm": 0.29321444810574243, |
| "learning_rate": 1.3811416928459177e-06, |
| "loss": 0.178, |
| "step": 1960 |
| }, |
| { |
| "epoch": 4.4618885096700796, |
| "grad_norm": 0.2792011932790621, |
| "learning_rate": 1.3695480493024027e-06, |
| "loss": 0.1658, |
| "step": 1961 |
| }, |
| { |
| "epoch": 4.464163822525597, |
| "grad_norm": 0.2808332313261231, |
| "learning_rate": 1.358001545679235e-06, |
| "loss": 0.1803, |
| "step": 1962 |
| }, |
| { |
| "epoch": 4.466439135381115, |
| "grad_norm": 0.3074530335022185, |
| "learning_rate": 1.3465022111920823e-06, |
| "loss": 0.1732, |
| "step": 1963 |
| }, |
| { |
| "epoch": 4.468714448236632, |
| "grad_norm": 0.28964736313004563, |
| "learning_rate": 1.3350500749372808e-06, |
| "loss": 0.141, |
| "step": 1964 |
| }, |
| { |
| "epoch": 4.4709897610921505, |
| "grad_norm": 0.2719958918001761, |
| "learning_rate": 1.3236451658917293e-06, |
| "loss": 0.1976, |
| "step": 1965 |
| }, |
| { |
| "epoch": 4.473265073947668, |
| "grad_norm": 0.2924179782646019, |
| "learning_rate": 1.3122875129128355e-06, |
| "loss": 0.1745, |
| "step": 1966 |
| }, |
| { |
| "epoch": 4.4755403868031856, |
| "grad_norm": 0.2996967323657901, |
| "learning_rate": 1.3009771447384313e-06, |
| "loss": 0.1666, |
| "step": 1967 |
| }, |
| { |
| "epoch": 4.477815699658703, |
| "grad_norm": 0.25889338362216696, |
| "learning_rate": 1.289714089986711e-06, |
| "loss": 0.1612, |
| "step": 1968 |
| }, |
| { |
| "epoch": 4.480091012514221, |
| "grad_norm": 0.27753924235718963, |
| "learning_rate": 1.2784983771561544e-06, |
| "loss": 0.186, |
| "step": 1969 |
| }, |
| { |
| "epoch": 4.482366325369738, |
| "grad_norm": 0.29480688552138207, |
| "learning_rate": 1.2673300346254447e-06, |
| "loss": 0.1824, |
| "step": 1970 |
| }, |
| { |
| "epoch": 4.484641638225256, |
| "grad_norm": 0.29040361553018085, |
| "learning_rate": 1.2562090906534153e-06, |
| "loss": 0.2132, |
| "step": 1971 |
| }, |
| { |
| "epoch": 4.486916951080774, |
| "grad_norm": 0.27954190765759973, |
| "learning_rate": 1.2451355733789616e-06, |
| "loss": 0.1869, |
| "step": 1972 |
| }, |
| { |
| "epoch": 4.489192263936292, |
| "grad_norm": 0.2805230943656468, |
| "learning_rate": 1.2341095108209866e-06, |
| "loss": 0.1753, |
| "step": 1973 |
| }, |
| { |
| "epoch": 4.491467576791809, |
| "grad_norm": 0.29244508925798035, |
| "learning_rate": 1.2231309308783157e-06, |
| "loss": 0.1506, |
| "step": 1974 |
| }, |
| { |
| "epoch": 4.493742889647327, |
| "grad_norm": 0.2787002830771308, |
| "learning_rate": 1.2121998613296259e-06, |
| "loss": 0.1879, |
| "step": 1975 |
| }, |
| { |
| "epoch": 4.496018202502844, |
| "grad_norm": 0.2941140207001762, |
| "learning_rate": 1.2013163298333842e-06, |
| "loss": 0.1797, |
| "step": 1976 |
| }, |
| { |
| "epoch": 4.498293515358362, |
| "grad_norm": 0.27528088765213415, |
| "learning_rate": 1.1904803639277773e-06, |
| "loss": 0.1939, |
| "step": 1977 |
| }, |
| { |
| "epoch": 4.500568828213879, |
| "grad_norm": 0.27949573204598344, |
| "learning_rate": 1.1796919910306359e-06, |
| "loss": 0.1935, |
| "step": 1978 |
| }, |
| { |
| "epoch": 4.502844141069397, |
| "grad_norm": 0.29184060626111324, |
| "learning_rate": 1.1689512384393597e-06, |
| "loss": 0.172, |
| "step": 1979 |
| }, |
| { |
| "epoch": 4.505119453924914, |
| "grad_norm": 0.28235052123789983, |
| "learning_rate": 1.1582581333308784e-06, |
| "loss": 0.171, |
| "step": 1980 |
| }, |
| { |
| "epoch": 4.507394766780433, |
| "grad_norm": 0.294521787307336, |
| "learning_rate": 1.1476127027615336e-06, |
| "loss": 0.1598, |
| "step": 1981 |
| }, |
| { |
| "epoch": 4.50967007963595, |
| "grad_norm": 0.2849206624146031, |
| "learning_rate": 1.1370149736670589e-06, |
| "loss": 0.1771, |
| "step": 1982 |
| }, |
| { |
| "epoch": 4.511945392491468, |
| "grad_norm": 0.2819395265085714, |
| "learning_rate": 1.12646497286248e-06, |
| "loss": 0.1443, |
| "step": 1983 |
| }, |
| { |
| "epoch": 4.514220705346985, |
| "grad_norm": 0.2839388508662871, |
| "learning_rate": 1.1159627270420615e-06, |
| "loss": 0.172, |
| "step": 1984 |
| }, |
| { |
| "epoch": 4.516496018202503, |
| "grad_norm": 0.28864243658579875, |
| "learning_rate": 1.1055082627792357e-06, |
| "loss": 0.1547, |
| "step": 1985 |
| }, |
| { |
| "epoch": 4.51877133105802, |
| "grad_norm": 0.30765219362613433, |
| "learning_rate": 1.0951016065265341e-06, |
| "loss": 0.1648, |
| "step": 1986 |
| }, |
| { |
| "epoch": 4.521046643913538, |
| "grad_norm": 0.26711896993626416, |
| "learning_rate": 1.0847427846155157e-06, |
| "loss": 0.1841, |
| "step": 1987 |
| }, |
| { |
| "epoch": 4.523321956769056, |
| "grad_norm": 0.27032220055464995, |
| "learning_rate": 1.0744318232567185e-06, |
| "loss": 0.1601, |
| "step": 1988 |
| }, |
| { |
| "epoch": 4.525597269624574, |
| "grad_norm": 0.2821744851476248, |
| "learning_rate": 1.0641687485395691e-06, |
| "loss": 0.1883, |
| "step": 1989 |
| }, |
| { |
| "epoch": 4.527872582480091, |
| "grad_norm": 0.7871524095136547, |
| "learning_rate": 1.0539535864323391e-06, |
| "loss": 0.1749, |
| "step": 1990 |
| }, |
| { |
| "epoch": 4.530147895335609, |
| "grad_norm": 0.29128322927257533, |
| "learning_rate": 1.0437863627820555e-06, |
| "loss": 0.1494, |
| "step": 1991 |
| }, |
| { |
| "epoch": 4.532423208191126, |
| "grad_norm": 0.2817846298238865, |
| "learning_rate": 1.0336671033144573e-06, |
| "loss": 0.1558, |
| "step": 1992 |
| }, |
| { |
| "epoch": 4.534698521046644, |
| "grad_norm": 0.2787263682512981, |
| "learning_rate": 1.0235958336339259e-06, |
| "loss": 0.1503, |
| "step": 1993 |
| }, |
| { |
| "epoch": 4.536973833902161, |
| "grad_norm": 0.30180138304813886, |
| "learning_rate": 1.0135725792234052e-06, |
| "loss": 0.1737, |
| "step": 1994 |
| }, |
| { |
| "epoch": 4.53924914675768, |
| "grad_norm": 0.3026589362256529, |
| "learning_rate": 1.0035973654443466e-06, |
| "loss": 0.1546, |
| "step": 1995 |
| }, |
| { |
| "epoch": 4.541524459613197, |
| "grad_norm": 0.2808793452470622, |
| "learning_rate": 9.936702175366642e-07, |
| "loss": 0.1769, |
| "step": 1996 |
| }, |
| { |
| "epoch": 4.543799772468715, |
| "grad_norm": 0.28062446297994786, |
| "learning_rate": 9.837911606186323e-07, |
| "loss": 0.1908, |
| "step": 1997 |
| }, |
| { |
| "epoch": 4.546075085324232, |
| "grad_norm": 0.283034769731874, |
| "learning_rate": 9.739602196868558e-07, |
| "loss": 0.1672, |
| "step": 1998 |
| }, |
| { |
| "epoch": 4.54835039817975, |
| "grad_norm": 0.26885302611911016, |
| "learning_rate": 9.641774196161836e-07, |
| "loss": 0.1747, |
| "step": 1999 |
| }, |
| { |
| "epoch": 4.550625711035267, |
| "grad_norm": 0.28787602733944306, |
| "learning_rate": 9.544427851596661e-07, |
| "loss": 0.163, |
| "step": 2000 |
| }, |
| { |
| "epoch": 4.552901023890785, |
| "grad_norm": 0.2763668454393897, |
| "learning_rate": 9.447563409484739e-07, |
| "loss": 0.2064, |
| "step": 2001 |
| }, |
| { |
| "epoch": 4.555176336746302, |
| "grad_norm": 0.2788164498517762, |
| "learning_rate": 9.351181114918506e-07, |
| "loss": 0.1724, |
| "step": 2002 |
| }, |
| { |
| "epoch": 4.55745164960182, |
| "grad_norm": 0.28335342939031766, |
| "learning_rate": 9.255281211770284e-07, |
| "loss": 0.1791, |
| "step": 2003 |
| }, |
| { |
| "epoch": 4.559726962457338, |
| "grad_norm": 0.2650324119047187, |
| "learning_rate": 9.15986394269206e-07, |
| "loss": 0.1311, |
| "step": 2004 |
| }, |
| { |
| "epoch": 4.562002275312856, |
| "grad_norm": 0.2765044538049791, |
| "learning_rate": 9.064929549114421e-07, |
| "loss": 0.1539, |
| "step": 2005 |
| }, |
| { |
| "epoch": 4.564277588168373, |
| "grad_norm": 0.2821192439754218, |
| "learning_rate": 8.970478271246307e-07, |
| "loss": 0.1905, |
| "step": 2006 |
| }, |
| { |
| "epoch": 4.566552901023891, |
| "grad_norm": 0.2902969512335743, |
| "learning_rate": 8.876510348074108e-07, |
| "loss": 0.1787, |
| "step": 2007 |
| }, |
| { |
| "epoch": 4.568828213879408, |
| "grad_norm": 0.288015139227802, |
| "learning_rate": 8.783026017361274e-07, |
| "loss": 0.1836, |
| "step": 2008 |
| }, |
| { |
| "epoch": 4.571103526734926, |
| "grad_norm": 0.29975893178963964, |
| "learning_rate": 8.690025515647682e-07, |
| "loss": 0.1718, |
| "step": 2009 |
| }, |
| { |
| "epoch": 4.573378839590443, |
| "grad_norm": 0.2762337937692315, |
| "learning_rate": 8.597509078248923e-07, |
| "loss": 0.1584, |
| "step": 2010 |
| }, |
| { |
| "epoch": 4.575654152445962, |
| "grad_norm": 0.27817524325145293, |
| "learning_rate": 8.505476939255741e-07, |
| "loss": 0.1739, |
| "step": 2011 |
| }, |
| { |
| "epoch": 4.577929465301479, |
| "grad_norm": 0.29507089156780547, |
| "learning_rate": 8.413929331533643e-07, |
| "loss": 0.1726, |
| "step": 2012 |
| }, |
| { |
| "epoch": 4.580204778156997, |
| "grad_norm": 0.279622887867422, |
| "learning_rate": 8.322866486721959e-07, |
| "loss": 0.1659, |
| "step": 2013 |
| }, |
| { |
| "epoch": 4.582480091012514, |
| "grad_norm": 0.2752351248049167, |
| "learning_rate": 8.232288635233598e-07, |
| "loss": 0.2172, |
| "step": 2014 |
| }, |
| { |
| "epoch": 4.584755403868032, |
| "grad_norm": 0.3000052844155821, |
| "learning_rate": 8.142196006254144e-07, |
| "loss": 0.1732, |
| "step": 2015 |
| }, |
| { |
| "epoch": 4.587030716723549, |
| "grad_norm": 0.2846970904267727, |
| "learning_rate": 8.052588827741603e-07, |
| "loss": 0.156, |
| "step": 2016 |
| }, |
| { |
| "epoch": 4.589306029579067, |
| "grad_norm": 0.28172746152505274, |
| "learning_rate": 7.963467326425567e-07, |
| "loss": 0.2044, |
| "step": 2017 |
| }, |
| { |
| "epoch": 4.591581342434584, |
| "grad_norm": 0.29195791070167354, |
| "learning_rate": 7.874831727806764e-07, |
| "loss": 0.1706, |
| "step": 2018 |
| }, |
| { |
| "epoch": 4.593856655290102, |
| "grad_norm": 0.2659143548975755, |
| "learning_rate": 7.786682256156464e-07, |
| "loss": 0.1806, |
| "step": 2019 |
| }, |
| { |
| "epoch": 4.59613196814562, |
| "grad_norm": 0.2650641816698844, |
| "learning_rate": 7.699019134515917e-07, |
| "loss": 0.1478, |
| "step": 2020 |
| }, |
| { |
| "epoch": 4.598407281001138, |
| "grad_norm": 0.2705433799082718, |
| "learning_rate": 7.611842584695739e-07, |
| "loss": 0.1766, |
| "step": 2021 |
| }, |
| { |
| "epoch": 4.600682593856655, |
| "grad_norm": 0.2933000010470503, |
| "learning_rate": 7.525152827275462e-07, |
| "loss": 0.1501, |
| "step": 2022 |
| }, |
| { |
| "epoch": 4.602957906712173, |
| "grad_norm": 0.27973283445352826, |
| "learning_rate": 7.438950081602824e-07, |
| "loss": 0.1658, |
| "step": 2023 |
| }, |
| { |
| "epoch": 4.60523321956769, |
| "grad_norm": 0.28510624819401426, |
| "learning_rate": 7.353234565793332e-07, |
| "loss": 0.1804, |
| "step": 2024 |
| }, |
| { |
| "epoch": 4.607508532423208, |
| "grad_norm": 0.28306854477557686, |
| "learning_rate": 7.268006496729762e-07, |
| "loss": 0.1963, |
| "step": 2025 |
| }, |
| { |
| "epoch": 4.609783845278725, |
| "grad_norm": 0.295611257023681, |
| "learning_rate": 7.183266090061324e-07, |
| "loss": 0.1549, |
| "step": 2026 |
| }, |
| { |
| "epoch": 4.612059158134244, |
| "grad_norm": 0.28424148773989993, |
| "learning_rate": 7.099013560203505e-07, |
| "loss": 0.1492, |
| "step": 2027 |
| }, |
| { |
| "epoch": 4.614334470989761, |
| "grad_norm": 0.28015465791620475, |
| "learning_rate": 7.015249120337242e-07, |
| "loss": 0.1923, |
| "step": 2028 |
| }, |
| { |
| "epoch": 4.616609783845279, |
| "grad_norm": 0.28384048033951154, |
| "learning_rate": 6.931972982408486e-07, |
| "loss": 0.1859, |
| "step": 2029 |
| }, |
| { |
| "epoch": 4.618885096700796, |
| "grad_norm": 0.2722259762819922, |
| "learning_rate": 6.849185357127686e-07, |
| "loss": 0.1628, |
| "step": 2030 |
| }, |
| { |
| "epoch": 4.621160409556314, |
| "grad_norm": 0.3079090242998302, |
| "learning_rate": 6.766886453969168e-07, |
| "loss": 0.1741, |
| "step": 2031 |
| }, |
| { |
| "epoch": 4.623435722411831, |
| "grad_norm": 0.27119843108967856, |
| "learning_rate": 6.685076481170715e-07, |
| "loss": 0.1691, |
| "step": 2032 |
| }, |
| { |
| "epoch": 4.625711035267349, |
| "grad_norm": 0.28656343897509196, |
| "learning_rate": 6.603755645733012e-07, |
| "loss": 0.1762, |
| "step": 2033 |
| }, |
| { |
| "epoch": 4.627986348122867, |
| "grad_norm": 0.28516364364971947, |
| "learning_rate": 6.522924153419019e-07, |
| "loss": 0.1544, |
| "step": 2034 |
| }, |
| { |
| "epoch": 4.630261660978385, |
| "grad_norm": 0.2678058152557827, |
| "learning_rate": 6.442582208753578e-07, |
| "loss": 0.1607, |
| "step": 2035 |
| }, |
| { |
| "epoch": 4.632536973833902, |
| "grad_norm": 0.2818467878057201, |
| "learning_rate": 6.362730015022855e-07, |
| "loss": 0.1781, |
| "step": 2036 |
| }, |
| { |
| "epoch": 4.63481228668942, |
| "grad_norm": 0.28705706508095613, |
| "learning_rate": 6.283367774273785e-07, |
| "loss": 0.1478, |
| "step": 2037 |
| }, |
| { |
| "epoch": 4.637087599544937, |
| "grad_norm": 0.2731517221285001, |
| "learning_rate": 6.204495687313627e-07, |
| "loss": 0.1695, |
| "step": 2038 |
| }, |
| { |
| "epoch": 4.639362912400455, |
| "grad_norm": 0.28683037296597896, |
| "learning_rate": 6.126113953709389e-07, |
| "loss": 0.1646, |
| "step": 2039 |
| }, |
| { |
| "epoch": 4.6416382252559725, |
| "grad_norm": 0.27722178521797003, |
| "learning_rate": 6.048222771787382e-07, |
| "loss": 0.1819, |
| "step": 2040 |
| }, |
| { |
| "epoch": 4.64391353811149, |
| "grad_norm": 0.2903974402660376, |
| "learning_rate": 5.970822338632709e-07, |
| "loss": 0.1662, |
| "step": 2041 |
| }, |
| { |
| "epoch": 4.6461888509670075, |
| "grad_norm": 0.2868561185405596, |
| "learning_rate": 5.893912850088668e-07, |
| "loss": 0.1593, |
| "step": 2042 |
| }, |
| { |
| "epoch": 4.648464163822526, |
| "grad_norm": 0.27903422660884797, |
| "learning_rate": 5.81749450075646e-07, |
| "loss": 0.1709, |
| "step": 2043 |
| }, |
| { |
| "epoch": 4.650739476678043, |
| "grad_norm": 0.2846468286225907, |
| "learning_rate": 5.741567483994503e-07, |
| "loss": 0.1894, |
| "step": 2044 |
| }, |
| { |
| "epoch": 4.653014789533561, |
| "grad_norm": 0.29921985053486083, |
| "learning_rate": 5.666131991917989e-07, |
| "loss": 0.183, |
| "step": 2045 |
| }, |
| { |
| "epoch": 4.6552901023890785, |
| "grad_norm": 0.2705631695784153, |
| "learning_rate": 5.591188215398524e-07, |
| "loss": 0.1637, |
| "step": 2046 |
| }, |
| { |
| "epoch": 4.657565415244596, |
| "grad_norm": 0.2804359500444224, |
| "learning_rate": 5.5167363440634e-07, |
| "loss": 0.1795, |
| "step": 2047 |
| }, |
| { |
| "epoch": 4.6598407281001135, |
| "grad_norm": 0.27327279446058655, |
| "learning_rate": 5.442776566295393e-07, |
| "loss": 0.1446, |
| "step": 2048 |
| }, |
| { |
| "epoch": 4.662116040955631, |
| "grad_norm": 0.2766472513272786, |
| "learning_rate": 5.369309069232098e-07, |
| "loss": 0.1671, |
| "step": 2049 |
| }, |
| { |
| "epoch": 4.664391353811149, |
| "grad_norm": 0.27251263166270223, |
| "learning_rate": 5.296334038765483e-07, |
| "loss": 0.164, |
| "step": 2050 |
| }, |
| { |
| "epoch": 4.666666666666667, |
| "grad_norm": 0.28798270256549147, |
| "learning_rate": 5.22385165954149e-07, |
| "loss": 0.172, |
| "step": 2051 |
| }, |
| { |
| "epoch": 4.6689419795221845, |
| "grad_norm": 0.29135865846989056, |
| "learning_rate": 5.151862114959572e-07, |
| "loss": 0.1911, |
| "step": 2052 |
| }, |
| { |
| "epoch": 4.671217292377702, |
| "grad_norm": 0.30326455275234954, |
| "learning_rate": 5.080365587172021e-07, |
| "loss": 0.1675, |
| "step": 2053 |
| }, |
| { |
| "epoch": 4.6734926052332195, |
| "grad_norm": 0.30492590779764683, |
| "learning_rate": 5.009362257083861e-07, |
| "loss": 0.1655, |
| "step": 2054 |
| }, |
| { |
| "epoch": 4.675767918088737, |
| "grad_norm": 0.27971636539782796, |
| "learning_rate": 4.938852304352026e-07, |
| "loss": 0.1789, |
| "step": 2055 |
| }, |
| { |
| "epoch": 4.678043230944255, |
| "grad_norm": 0.27888012850239485, |
| "learning_rate": 4.868835907385183e-07, |
| "loss": 0.1639, |
| "step": 2056 |
| }, |
| { |
| "epoch": 4.680318543799773, |
| "grad_norm": 0.2852215033386593, |
| "learning_rate": 4.799313243343195e-07, |
| "loss": 0.1571, |
| "step": 2057 |
| }, |
| { |
| "epoch": 4.6825938566552905, |
| "grad_norm": 0.2542474079594607, |
| "learning_rate": 4.7302844881365273e-07, |
| "loss": 0.1581, |
| "step": 2058 |
| }, |
| { |
| "epoch": 4.684869169510808, |
| "grad_norm": 0.29933681216203895, |
| "learning_rate": 4.66174981642602e-07, |
| "loss": 0.1774, |
| "step": 2059 |
| }, |
| { |
| "epoch": 4.6871444823663255, |
| "grad_norm": 0.29791808335229336, |
| "learning_rate": 4.593709401622359e-07, |
| "loss": 0.1626, |
| "step": 2060 |
| }, |
| { |
| "epoch": 4.689419795221843, |
| "grad_norm": 0.29723177433851855, |
| "learning_rate": 4.5261634158855873e-07, |
| "loss": 0.1765, |
| "step": 2061 |
| }, |
| { |
| "epoch": 4.691695108077361, |
| "grad_norm": 0.2890583967251704, |
| "learning_rate": 4.459112030124746e-07, |
| "loss": 0.1906, |
| "step": 2062 |
| }, |
| { |
| "epoch": 4.693970420932878, |
| "grad_norm": 0.28742325857083567, |
| "learning_rate": 4.392555413997346e-07, |
| "loss": 0.1664, |
| "step": 2063 |
| }, |
| { |
| "epoch": 4.696245733788396, |
| "grad_norm": 0.2792348986230206, |
| "learning_rate": 4.326493735909099e-07, |
| "loss": 0.1908, |
| "step": 2064 |
| }, |
| { |
| "epoch": 4.698521046643913, |
| "grad_norm": 0.2813521785281826, |
| "learning_rate": 4.2609271630133174e-07, |
| "loss": 0.1688, |
| "step": 2065 |
| }, |
| { |
| "epoch": 4.7007963594994315, |
| "grad_norm": 0.26469530740883623, |
| "learning_rate": 4.195855861210607e-07, |
| "loss": 0.1797, |
| "step": 2066 |
| }, |
| { |
| "epoch": 4.703071672354949, |
| "grad_norm": 0.264192160968152, |
| "learning_rate": 4.1312799951483964e-07, |
| "loss": 0.1848, |
| "step": 2067 |
| }, |
| { |
| "epoch": 4.705346985210467, |
| "grad_norm": 0.2813125672200287, |
| "learning_rate": 4.0671997282205165e-07, |
| "loss": 0.1631, |
| "step": 2068 |
| }, |
| { |
| "epoch": 4.707622298065984, |
| "grad_norm": 0.2790958711410099, |
| "learning_rate": 4.0036152225668257e-07, |
| "loss": 0.1688, |
| "step": 2069 |
| }, |
| { |
| "epoch": 4.709897610921502, |
| "grad_norm": 0.2765276490546693, |
| "learning_rate": 3.9405266390727836e-07, |
| "loss": 0.1647, |
| "step": 2070 |
| }, |
| { |
| "epoch": 4.712172923777019, |
| "grad_norm": 0.29796867137025485, |
| "learning_rate": 3.877934137368988e-07, |
| "loss": 0.1652, |
| "step": 2071 |
| }, |
| { |
| "epoch": 4.714448236632537, |
| "grad_norm": 0.2806553335956702, |
| "learning_rate": 3.8158378758308634e-07, |
| "loss": 0.1722, |
| "step": 2072 |
| }, |
| { |
| "epoch": 4.716723549488055, |
| "grad_norm": 0.2821445087667387, |
| "learning_rate": 3.754238011578237e-07, |
| "loss": 0.1754, |
| "step": 2073 |
| }, |
| { |
| "epoch": 4.718998862343573, |
| "grad_norm": 0.29472595215090663, |
| "learning_rate": 3.693134700474854e-07, |
| "loss": 0.183, |
| "step": 2074 |
| }, |
| { |
| "epoch": 4.72127417519909, |
| "grad_norm": 0.2840198880867471, |
| "learning_rate": 3.632528097128085e-07, |
| "loss": 0.1758, |
| "step": 2075 |
| }, |
| { |
| "epoch": 4.723549488054608, |
| "grad_norm": 0.2694843262307711, |
| "learning_rate": 3.5724183548885514e-07, |
| "loss": 0.2065, |
| "step": 2076 |
| }, |
| { |
| "epoch": 4.725824800910125, |
| "grad_norm": 0.28265532948248634, |
| "learning_rate": 3.512805625849569e-07, |
| "loss": 0.197, |
| "step": 2077 |
| }, |
| { |
| "epoch": 4.728100113765643, |
| "grad_norm": 0.29936116017536235, |
| "learning_rate": 3.4536900608469924e-07, |
| "loss": 0.1521, |
| "step": 2078 |
| }, |
| { |
| "epoch": 4.73037542662116, |
| "grad_norm": 0.2788020784215453, |
| "learning_rate": 3.395071809458661e-07, |
| "loss": 0.161, |
| "step": 2079 |
| }, |
| { |
| "epoch": 4.732650739476678, |
| "grad_norm": 0.2753131318262618, |
| "learning_rate": 3.336951020004087e-07, |
| "loss": 0.1875, |
| "step": 2080 |
| }, |
| { |
| "epoch": 4.734926052332195, |
| "grad_norm": 0.27164247082837706, |
| "learning_rate": 3.279327839544122e-07, |
| "loss": 0.1714, |
| "step": 2081 |
| }, |
| { |
| "epoch": 4.737201365187714, |
| "grad_norm": 0.2859412688523052, |
| "learning_rate": 3.2222024138804264e-07, |
| "loss": 0.1543, |
| "step": 2082 |
| }, |
| { |
| "epoch": 4.739476678043231, |
| "grad_norm": 0.2803506336195304, |
| "learning_rate": 3.165574887555334e-07, |
| "loss": 0.1461, |
| "step": 2083 |
| }, |
| { |
| "epoch": 4.741751990898749, |
| "grad_norm": 0.27359098725884834, |
| "learning_rate": 3.109445403851319e-07, |
| "loss": 0.1654, |
| "step": 2084 |
| }, |
| { |
| "epoch": 4.744027303754266, |
| "grad_norm": 0.2768874816031207, |
| "learning_rate": 3.05381410479062e-07, |
| "loss": 0.1632, |
| "step": 2085 |
| }, |
| { |
| "epoch": 4.746302616609784, |
| "grad_norm": 0.27909644994464433, |
| "learning_rate": 2.998681131134995e-07, |
| "loss": 0.1925, |
| "step": 2086 |
| }, |
| { |
| "epoch": 4.748577929465301, |
| "grad_norm": 0.27589370596456175, |
| "learning_rate": 2.9440466223853656e-07, |
| "loss": 0.153, |
| "step": 2087 |
| }, |
| { |
| "epoch": 4.750853242320819, |
| "grad_norm": 0.28659963483934087, |
| "learning_rate": 2.8899107167812634e-07, |
| "loss": 0.1794, |
| "step": 2088 |
| }, |
| { |
| "epoch": 4.753128555176337, |
| "grad_norm": 0.2802335924025782, |
| "learning_rate": 2.8362735513007387e-07, |
| "loss": 0.2002, |
| "step": 2089 |
| }, |
| { |
| "epoch": 4.755403868031855, |
| "grad_norm": 0.2837564569638717, |
| "learning_rate": 2.783135261659831e-07, |
| "loss": 0.1426, |
| "step": 2090 |
| }, |
| { |
| "epoch": 4.757679180887372, |
| "grad_norm": 0.2724450386394914, |
| "learning_rate": 2.730495982312342e-07, |
| "loss": 0.1363, |
| "step": 2091 |
| }, |
| { |
| "epoch": 4.75995449374289, |
| "grad_norm": 0.2736902453279952, |
| "learning_rate": 2.678355846449465e-07, |
| "loss": 0.161, |
| "step": 2092 |
| }, |
| { |
| "epoch": 4.762229806598407, |
| "grad_norm": 0.2821596651544852, |
| "learning_rate": 2.6267149859993347e-07, |
| "loss": 0.1815, |
| "step": 2093 |
| }, |
| { |
| "epoch": 4.764505119453925, |
| "grad_norm": 0.2764937025615303, |
| "learning_rate": 2.5755735316268514e-07, |
| "loss": 0.1806, |
| "step": 2094 |
| }, |
| { |
| "epoch": 4.766780432309442, |
| "grad_norm": 0.2810702288129479, |
| "learning_rate": 2.524931612733328e-07, |
| "loss": 0.1332, |
| "step": 2095 |
| }, |
| { |
| "epoch": 4.769055745164961, |
| "grad_norm": 0.29044871796679766, |
| "learning_rate": 2.47478935745602e-07, |
| "loss": 0.1612, |
| "step": 2096 |
| }, |
| { |
| "epoch": 4.771331058020478, |
| "grad_norm": 0.2683096871030952, |
| "learning_rate": 2.4251468926680175e-07, |
| "loss": 0.1661, |
| "step": 2097 |
| }, |
| { |
| "epoch": 4.773606370875996, |
| "grad_norm": 0.2945643148140594, |
| "learning_rate": 2.3760043439776892e-07, |
| "loss": 0.1813, |
| "step": 2098 |
| }, |
| { |
| "epoch": 4.775881683731513, |
| "grad_norm": 0.2893737837113469, |
| "learning_rate": 2.327361835728592e-07, |
| "loss": 0.1498, |
| "step": 2099 |
| }, |
| { |
| "epoch": 4.778156996587031, |
| "grad_norm": 0.286127100981063, |
| "learning_rate": 2.279219490998985e-07, |
| "loss": 0.2069, |
| "step": 2100 |
| }, |
| { |
| "epoch": 4.780432309442548, |
| "grad_norm": 0.2760250960457435, |
| "learning_rate": 2.231577431601606e-07, |
| "loss": 0.1459, |
| "step": 2101 |
| }, |
| { |
| "epoch": 4.782707622298066, |
| "grad_norm": 0.28125460188963713, |
| "learning_rate": 2.184435778083338e-07, |
| "loss": 0.1778, |
| "step": 2102 |
| }, |
| { |
| "epoch": 4.784982935153583, |
| "grad_norm": 0.26720575727894325, |
| "learning_rate": 2.1377946497249225e-07, |
| "loss": 0.1476, |
| "step": 2103 |
| }, |
| { |
| "epoch": 4.787258248009101, |
| "grad_norm": 0.28726572485784596, |
| "learning_rate": 2.091654164540602e-07, |
| "loss": 0.1678, |
| "step": 2104 |
| }, |
| { |
| "epoch": 4.789533560864619, |
| "grad_norm": 0.2960648414045368, |
| "learning_rate": 2.0460144392778768e-07, |
| "loss": 0.1665, |
| "step": 2105 |
| }, |
| { |
| "epoch": 4.791808873720137, |
| "grad_norm": 0.2851934467857292, |
| "learning_rate": 2.0008755894172172e-07, |
| "loss": 0.1666, |
| "step": 2106 |
| }, |
| { |
| "epoch": 4.794084186575654, |
| "grad_norm": 0.29867023568951107, |
| "learning_rate": 1.956237729171706e-07, |
| "loss": 0.1609, |
| "step": 2107 |
| }, |
| { |
| "epoch": 4.796359499431172, |
| "grad_norm": 0.27203307996084625, |
| "learning_rate": 1.912100971486841e-07, |
| "loss": 0.1728, |
| "step": 2108 |
| }, |
| { |
| "epoch": 4.798634812286689, |
| "grad_norm": 0.29093396081678047, |
| "learning_rate": 1.868465428040156e-07, |
| "loss": 0.1785, |
| "step": 2109 |
| }, |
| { |
| "epoch": 4.800910125142207, |
| "grad_norm": 0.2775746820409794, |
| "learning_rate": 1.8253312092409992e-07, |
| "loss": 0.1677, |
| "step": 2110 |
| }, |
| { |
| "epoch": 4.803185437997724, |
| "grad_norm": 0.27453488428691564, |
| "learning_rate": 1.782698424230267e-07, |
| "loss": 0.1854, |
| "step": 2111 |
| }, |
| { |
| "epoch": 4.805460750853243, |
| "grad_norm": 0.27937845048345333, |
| "learning_rate": 1.740567180880004e-07, |
| "loss": 0.1774, |
| "step": 2112 |
| }, |
| { |
| "epoch": 4.80773606370876, |
| "grad_norm": 0.2982003035523139, |
| "learning_rate": 1.6989375857933588e-07, |
| "loss": 0.1588, |
| "step": 2113 |
| }, |
| { |
| "epoch": 4.810011376564278, |
| "grad_norm": 0.27122001773024823, |
| "learning_rate": 1.6578097443040286e-07, |
| "loss": 0.1639, |
| "step": 2114 |
| }, |
| { |
| "epoch": 4.812286689419795, |
| "grad_norm": 0.2910168540094759, |
| "learning_rate": 1.6171837604762597e-07, |
| "loss": 0.1812, |
| "step": 2115 |
| }, |
| { |
| "epoch": 4.814562002275313, |
| "grad_norm": 0.2847303758330056, |
| "learning_rate": 1.577059737104447e-07, |
| "loss": 0.1667, |
| "step": 2116 |
| }, |
| { |
| "epoch": 4.81683731513083, |
| "grad_norm": 0.26816369266991275, |
| "learning_rate": 1.5374377757128245e-07, |
| "loss": 0.1707, |
| "step": 2117 |
| }, |
| { |
| "epoch": 4.819112627986348, |
| "grad_norm": 0.2848979545208066, |
| "learning_rate": 1.4983179765553523e-07, |
| "loss": 0.1886, |
| "step": 2118 |
| }, |
| { |
| "epoch": 4.821387940841865, |
| "grad_norm": 0.2619835505950323, |
| "learning_rate": 1.4597004386153635e-07, |
| "loss": 0.1701, |
| "step": 2119 |
| }, |
| { |
| "epoch": 4.823663253697383, |
| "grad_norm": 0.2808499371759947, |
| "learning_rate": 1.421585259605318e-07, |
| "loss": 0.1677, |
| "step": 2120 |
| }, |
| { |
| "epoch": 4.825938566552901, |
| "grad_norm": 0.2916709025066011, |
| "learning_rate": 1.3839725359666267e-07, |
| "loss": 0.1836, |
| "step": 2121 |
| }, |
| { |
| "epoch": 4.828213879408419, |
| "grad_norm": 0.2754848446765079, |
| "learning_rate": 1.3468623628692945e-07, |
| "loss": 0.1433, |
| "step": 2122 |
| }, |
| { |
| "epoch": 4.830489192263936, |
| "grad_norm": 0.30298090295810787, |
| "learning_rate": 1.3102548342118105e-07, |
| "loss": 0.1909, |
| "step": 2123 |
| }, |
| { |
| "epoch": 4.832764505119454, |
| "grad_norm": 0.28860241950399845, |
| "learning_rate": 1.274150042620792e-07, |
| "loss": 0.2022, |
| "step": 2124 |
| }, |
| { |
| "epoch": 4.835039817974971, |
| "grad_norm": 0.26800108458504196, |
| "learning_rate": 1.2385480794507853e-07, |
| "loss": 0.16, |
| "step": 2125 |
| }, |
| { |
| "epoch": 4.837315130830489, |
| "grad_norm": 0.2853323557437661, |
| "learning_rate": 1.20344903478411e-07, |
| "loss": 0.202, |
| "step": 2126 |
| }, |
| { |
| "epoch": 4.839590443686006, |
| "grad_norm": 0.2733948236798397, |
| "learning_rate": 1.1688529974305251e-07, |
| "loss": 0.1737, |
| "step": 2127 |
| }, |
| { |
| "epoch": 4.841865756541525, |
| "grad_norm": 0.28961179550636856, |
| "learning_rate": 1.1347600549270532e-07, |
| "loss": 0.1734, |
| "step": 2128 |
| }, |
| { |
| "epoch": 4.844141069397042, |
| "grad_norm": 0.28913141216509214, |
| "learning_rate": 1.1011702935377789e-07, |
| "loss": 0.1769, |
| "step": 2129 |
| }, |
| { |
| "epoch": 4.84641638225256, |
| "grad_norm": 0.27173575099170844, |
| "learning_rate": 1.0680837982535607e-07, |
| "loss": 0.1619, |
| "step": 2130 |
| }, |
| { |
| "epoch": 4.848691695108077, |
| "grad_norm": 0.2737763078214319, |
| "learning_rate": 1.0355006527919209e-07, |
| "loss": 0.1705, |
| "step": 2131 |
| }, |
| { |
| "epoch": 4.850967007963595, |
| "grad_norm": 0.26823155637529944, |
| "learning_rate": 1.0034209395967554e-07, |
| "loss": 0.1719, |
| "step": 2132 |
| }, |
| { |
| "epoch": 4.853242320819112, |
| "grad_norm": 0.27890526953507444, |
| "learning_rate": 9.718447398381348e-08, |
| "loss": 0.1477, |
| "step": 2133 |
| }, |
| { |
| "epoch": 4.85551763367463, |
| "grad_norm": 0.26588733805101394, |
| "learning_rate": 9.407721334120601e-08, |
| "loss": 0.1825, |
| "step": 2134 |
| }, |
| { |
| "epoch": 4.857792946530148, |
| "grad_norm": 0.29092629682676213, |
| "learning_rate": 9.102031989404403e-08, |
| "loss": 0.1523, |
| "step": 2135 |
| }, |
| { |
| "epoch": 4.860068259385666, |
| "grad_norm": 0.29047716670487517, |
| "learning_rate": 8.80138013770604e-08, |
| "loss": 0.1752, |
| "step": 2136 |
| }, |
| { |
| "epoch": 4.862343572241183, |
| "grad_norm": 0.26357179549459897, |
| "learning_rate": 8.505766539753879e-08, |
| "loss": 0.1711, |
| "step": 2137 |
| }, |
| { |
| "epoch": 4.864618885096701, |
| "grad_norm": 0.2767793820851537, |
| "learning_rate": 8.215191943527378e-08, |
| "loss": 0.1839, |
| "step": 2138 |
| }, |
| { |
| "epoch": 4.8668941979522184, |
| "grad_norm": 0.2752011773302518, |
| "learning_rate": 7.929657084256858e-08, |
| "loss": 0.1387, |
| "step": 2139 |
| }, |
| { |
| "epoch": 4.869169510807736, |
| "grad_norm": 0.29282854702207456, |
| "learning_rate": 7.649162684419731e-08, |
| "loss": 0.1856, |
| "step": 2140 |
| }, |
| { |
| "epoch": 4.8714448236632535, |
| "grad_norm": 0.29086067102522595, |
| "learning_rate": 7.373709453740497e-08, |
| "loss": 0.1738, |
| "step": 2141 |
| }, |
| { |
| "epoch": 4.873720136518771, |
| "grad_norm": 0.27826866182859905, |
| "learning_rate": 7.10329808918786e-08, |
| "loss": 0.1433, |
| "step": 2142 |
| }, |
| { |
| "epoch": 4.8759954493742885, |
| "grad_norm": 0.27848071371945615, |
| "learning_rate": 6.837929274973399e-08, |
| "loss": 0.2025, |
| "step": 2143 |
| }, |
| { |
| "epoch": 4.878270762229807, |
| "grad_norm": 0.28174616730635366, |
| "learning_rate": 6.57760368254956e-08, |
| "loss": 0.169, |
| "step": 2144 |
| }, |
| { |
| "epoch": 4.8805460750853245, |
| "grad_norm": 0.28216646768926074, |
| "learning_rate": 6.322321970608337e-08, |
| "loss": 0.1998, |
| "step": 2145 |
| }, |
| { |
| "epoch": 4.882821387940842, |
| "grad_norm": 0.2839738511495771, |
| "learning_rate": 6.072084785079257e-08, |
| "loss": 0.1881, |
| "step": 2146 |
| }, |
| { |
| "epoch": 4.8850967007963595, |
| "grad_norm": 0.2719170431364124, |
| "learning_rate": 5.826892759128066e-08, |
| "loss": 0.1637, |
| "step": 2147 |
| }, |
| { |
| "epoch": 4.887372013651877, |
| "grad_norm": 0.2900758080734818, |
| "learning_rate": 5.5867465131547174e-08, |
| "loss": 0.2036, |
| "step": 2148 |
| }, |
| { |
| "epoch": 4.8896473265073945, |
| "grad_norm": 0.27675763159719285, |
| "learning_rate": 5.35164665479182e-08, |
| "loss": 0.1753, |
| "step": 2149 |
| }, |
| { |
| "epoch": 4.891922639362912, |
| "grad_norm": 0.2682400680433344, |
| "learning_rate": 5.121593778903755e-08, |
| "loss": 0.1654, |
| "step": 2150 |
| }, |
| { |
| "epoch": 4.8941979522184305, |
| "grad_norm": 0.2834066342426588, |
| "learning_rate": 4.896588467584451e-08, |
| "loss": 0.1727, |
| "step": 2151 |
| }, |
| { |
| "epoch": 4.896473265073948, |
| "grad_norm": 0.28432645164270115, |
| "learning_rate": 4.676631290156497e-08, |
| "loss": 0.1694, |
| "step": 2152 |
| }, |
| { |
| "epoch": 4.8987485779294655, |
| "grad_norm": 0.2705477739865789, |
| "learning_rate": 4.4617228031687e-08, |
| "loss": 0.1916, |
| "step": 2153 |
| }, |
| { |
| "epoch": 4.901023890784983, |
| "grad_norm": 0.28281818023901295, |
| "learning_rate": 4.2518635503960845e-08, |
| "loss": 0.1919, |
| "step": 2154 |
| }, |
| { |
| "epoch": 4.9032992036405005, |
| "grad_norm": 0.275802639540135, |
| "learning_rate": 4.047054062837452e-08, |
| "loss": 0.1598, |
| "step": 2155 |
| }, |
| { |
| "epoch": 4.905574516496018, |
| "grad_norm": 0.2716919729625155, |
| "learning_rate": 3.8472948587144895e-08, |
| "loss": 0.1694, |
| "step": 2156 |
| }, |
| { |
| "epoch": 4.907849829351536, |
| "grad_norm": 0.28826008139527803, |
| "learning_rate": 3.652586443470219e-08, |
| "loss": 0.1645, |
| "step": 2157 |
| }, |
| { |
| "epoch": 4.910125142207053, |
| "grad_norm": 0.27948590042199084, |
| "learning_rate": 3.46292930976766e-08, |
| "loss": 0.1624, |
| "step": 2158 |
| }, |
| { |
| "epoch": 4.912400455062571, |
| "grad_norm": 0.26789167961996163, |
| "learning_rate": 3.2783239374893914e-08, |
| "loss": 0.1697, |
| "step": 2159 |
| }, |
| { |
| "epoch": 4.914675767918089, |
| "grad_norm": 0.2670318829401166, |
| "learning_rate": 3.0987707937351066e-08, |
| "loss": 0.1837, |
| "step": 2160 |
| }, |
| { |
| "epoch": 4.9169510807736065, |
| "grad_norm": 0.27546556570733666, |
| "learning_rate": 2.9242703328211662e-08, |
| "loss": 0.1556, |
| "step": 2161 |
| }, |
| { |
| "epoch": 4.919226393629124, |
| "grad_norm": 0.2725902589660314, |
| "learning_rate": 2.7548229962794938e-08, |
| "loss": 0.1507, |
| "step": 2162 |
| }, |
| { |
| "epoch": 4.921501706484642, |
| "grad_norm": 0.2747503934785236, |
| "learning_rate": 2.59042921285646e-08, |
| "loss": 0.1849, |
| "step": 2163 |
| }, |
| { |
| "epoch": 4.923777019340159, |
| "grad_norm": 0.29811239655450217, |
| "learning_rate": 2.4310893985113328e-08, |
| "loss": 0.1761, |
| "step": 2164 |
| }, |
| { |
| "epoch": 4.926052332195677, |
| "grad_norm": 0.29286564186734176, |
| "learning_rate": 2.2768039564151635e-08, |
| "loss": 0.1631, |
| "step": 2165 |
| }, |
| { |
| "epoch": 4.928327645051194, |
| "grad_norm": 0.2824169805110718, |
| "learning_rate": 2.1275732769505675e-08, |
| "loss": 0.1669, |
| "step": 2166 |
| }, |
| { |
| "epoch": 4.9306029579067125, |
| "grad_norm": 0.29684890488399457, |
| "learning_rate": 1.983397737710613e-08, |
| "loss": 0.1758, |
| "step": 2167 |
| }, |
| { |
| "epoch": 4.93287827076223, |
| "grad_norm": 0.2653707831593049, |
| "learning_rate": 1.8442777034970437e-08, |
| "loss": 0.1806, |
| "step": 2168 |
| }, |
| { |
| "epoch": 4.935153583617748, |
| "grad_norm": 0.27223470203609895, |
| "learning_rate": 1.7102135263198372e-08, |
| "loss": 0.2211, |
| "step": 2169 |
| }, |
| { |
| "epoch": 4.937428896473265, |
| "grad_norm": 0.2864468681497177, |
| "learning_rate": 1.5812055453963136e-08, |
| "loss": 0.166, |
| "step": 2170 |
| }, |
| { |
| "epoch": 4.939704209328783, |
| "grad_norm": 0.2797238676132464, |
| "learning_rate": 1.4572540871506947e-08, |
| "loss": 0.168, |
| "step": 2171 |
| }, |
| { |
| "epoch": 4.9419795221843, |
| "grad_norm": 0.2848478065778815, |
| "learning_rate": 1.3383594652125464e-08, |
| "loss": 0.2005, |
| "step": 2172 |
| }, |
| { |
| "epoch": 4.944254835039818, |
| "grad_norm": 0.2931191481258792, |
| "learning_rate": 1.2245219804163377e-08, |
| "loss": 0.2171, |
| "step": 2173 |
| }, |
| { |
| "epoch": 4.946530147895336, |
| "grad_norm": 0.2709247922397299, |
| "learning_rate": 1.1157419208005505e-08, |
| "loss": 0.2141, |
| "step": 2174 |
| }, |
| { |
| "epoch": 4.948805460750854, |
| "grad_norm": 0.269577290444941, |
| "learning_rate": 1.01201956160768e-08, |
| "loss": 0.1655, |
| "step": 2175 |
| }, |
| { |
| "epoch": 4.951080773606371, |
| "grad_norm": 0.28366020700511824, |
| "learning_rate": 9.13355165282015e-09, |
| "loss": 0.1655, |
| "step": 2176 |
| }, |
| { |
| "epoch": 4.953356086461889, |
| "grad_norm": 0.2752590720590919, |
| "learning_rate": 8.197489814705251e-09, |
| "loss": 0.1724, |
| "step": 2177 |
| }, |
| { |
| "epoch": 4.955631399317406, |
| "grad_norm": 0.2751081587825422, |
| "learning_rate": 7.31201247021085e-09, |
| "loss": 0.186, |
| "step": 2178 |
| }, |
| { |
| "epoch": 4.957906712172924, |
| "grad_norm": 0.29165567713398455, |
| "learning_rate": 6.477121859831404e-09, |
| "loss": 0.14, |
| "step": 2179 |
| }, |
| { |
| "epoch": 4.960182025028441, |
| "grad_norm": 0.26807798272654737, |
| "learning_rate": 5.692820096054874e-09, |
| "loss": 0.1915, |
| "step": 2180 |
| }, |
| { |
| "epoch": 4.962457337883959, |
| "grad_norm": 0.307986343441169, |
| "learning_rate": 4.959109163369391e-09, |
| "loss": 0.17, |
| "step": 2181 |
| }, |
| { |
| "epoch": 4.964732650739476, |
| "grad_norm": 0.28646812677908806, |
| "learning_rate": 4.275990918256589e-09, |
| "loss": 0.1604, |
| "step": 2182 |
| }, |
| { |
| "epoch": 4.967007963594995, |
| "grad_norm": 0.28434276408514697, |
| "learning_rate": 3.6434670891871694e-09, |
| "loss": 0.1653, |
| "step": 2183 |
| }, |
| { |
| "epoch": 4.969283276450512, |
| "grad_norm": 0.28547418520789625, |
| "learning_rate": 3.061539276609793e-09, |
| "loss": 0.1963, |
| "step": 2184 |
| }, |
| { |
| "epoch": 4.97155858930603, |
| "grad_norm": 0.2858738833930566, |
| "learning_rate": 2.530208952953306e-09, |
| "loss": 0.1919, |
| "step": 2185 |
| }, |
| { |
| "epoch": 4.973833902161547, |
| "grad_norm": 0.3116700983501146, |
| "learning_rate": 2.049477462622296e-09, |
| "loss": 0.154, |
| "step": 2186 |
| }, |
| { |
| "epoch": 4.976109215017065, |
| "grad_norm": 0.27812011451558866, |
| "learning_rate": 1.6193460219970924e-09, |
| "loss": 0.1861, |
| "step": 2187 |
| }, |
| { |
| "epoch": 4.978384527872582, |
| "grad_norm": 0.28680689327047326, |
| "learning_rate": 1.2398157194182248e-09, |
| "loss": 0.1636, |
| "step": 2188 |
| }, |
| { |
| "epoch": 4.9806598407281, |
| "grad_norm": 0.27175734346430375, |
| "learning_rate": 9.108875151975227e-10, |
| "loss": 0.1453, |
| "step": 2189 |
| }, |
| { |
| "epoch": 4.982935153583618, |
| "grad_norm": 0.28732427721080184, |
| "learning_rate": 6.325622416136767e-10, |
| "loss": 0.1984, |
| "step": 2190 |
| }, |
| { |
| "epoch": 4.985210466439136, |
| "grad_norm": 0.2795942358884219, |
| "learning_rate": 4.048406028966945e-10, |
| "loss": 0.1735, |
| "step": 2191 |
| }, |
| { |
| "epoch": 4.987485779294653, |
| "grad_norm": 0.2901912120404457, |
| "learning_rate": 2.2772317524566434e-10, |
| "loss": 0.1743, |
| "step": 2192 |
| }, |
| { |
| "epoch": 4.989761092150171, |
| "grad_norm": 0.2822809573112233, |
| "learning_rate": 1.0121040681321249e-10, |
| "loss": 0.1844, |
| "step": 2193 |
| }, |
| { |
| "epoch": 4.992036405005688, |
| "grad_norm": 0.29485501566563027, |
| "learning_rate": 2.530261770772313e-11, |
| "loss": 0.1665, |
| "step": 2194 |
| }, |
| { |
| "epoch": 4.994311717861206, |
| "grad_norm": 0.2749861414237793, |
| "learning_rate": 0.0, |
| "loss": 0.2018, |
| "step": 2195 |
| }, |
| { |
| "epoch": 4.994311717861206, |
| "step": 2195, |
| "total_flos": 3.168901699486089e+18, |
| "train_loss": 0.34217921912262816, |
| "train_runtime": 81219.8428, |
| "train_samples_per_second": 3.463, |
| "train_steps_per_second": 0.027 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 2195, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.168901699486089e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|