| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 2986, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0003348961821835231, |
| "grad_norm": 18.186641693115234, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 2.0732, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0006697923643670462, |
| "grad_norm": 15.181599617004395, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 2.0167, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0010046885465505692, |
| "grad_norm": 14.661438941955566, |
| "learning_rate": 6e-06, |
| "loss": 2.0268, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0013395847287340924, |
| "grad_norm": 9.589187622070312, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 2.0335, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0016744809109176155, |
| "grad_norm": 5.236238479614258, |
| "learning_rate": 1e-05, |
| "loss": 1.8674, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0020093770931011385, |
| "grad_norm": 3.8239383697509766, |
| "learning_rate": 1.2e-05, |
| "loss": 1.8922, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.002344273275284662, |
| "grad_norm": 2.503788709640503, |
| "learning_rate": 1.4000000000000001e-05, |
| "loss": 1.8812, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0026791694574681848, |
| "grad_norm": 1.736107349395752, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.8371, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.003014065639651708, |
| "grad_norm": 2.421985149383545, |
| "learning_rate": 1.8e-05, |
| "loss": 1.8273, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.003348961821835231, |
| "grad_norm": 2.248610496520996, |
| "learning_rate": 2e-05, |
| "loss": 1.7187, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.003683858004018754, |
| "grad_norm": 1.246288776397705, |
| "learning_rate": 2.2000000000000003e-05, |
| "loss": 1.8992, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.004018754186202277, |
| "grad_norm": 1.4386337995529175, |
| "learning_rate": 2.4e-05, |
| "loss": 1.7567, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.004353650368385801, |
| "grad_norm": 0.9633839726448059, |
| "learning_rate": 2.6000000000000002e-05, |
| "loss": 1.7409, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.004688546550569324, |
| "grad_norm": 0.8049834966659546, |
| "learning_rate": 2.8000000000000003e-05, |
| "loss": 1.669, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.005023442732752847, |
| "grad_norm": 0.8306618928909302, |
| "learning_rate": 3e-05, |
| "loss": 1.7372, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0053583389149363695, |
| "grad_norm": 0.9875802397727966, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 1.7664, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.0056932350971198925, |
| "grad_norm": 0.9656491875648499, |
| "learning_rate": 3.4000000000000007e-05, |
| "loss": 1.7711, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.006028131279303416, |
| "grad_norm": 0.8717008829116821, |
| "learning_rate": 3.6e-05, |
| "loss": 1.6949, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.006363027461486939, |
| "grad_norm": 0.8283945918083191, |
| "learning_rate": 3.8e-05, |
| "loss": 1.7468, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.006697923643670462, |
| "grad_norm": 0.7493966221809387, |
| "learning_rate": 4e-05, |
| "loss": 1.6974, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.007032819825853985, |
| "grad_norm": 0.7758145928382874, |
| "learning_rate": 4.2e-05, |
| "loss": 1.7485, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.007367716008037508, |
| "grad_norm": 0.7642164826393127, |
| "learning_rate": 4.4000000000000006e-05, |
| "loss": 1.745, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.007702612190221032, |
| "grad_norm": 0.706461489200592, |
| "learning_rate": 4.600000000000001e-05, |
| "loss": 1.6891, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.008037508372404554, |
| "grad_norm": 0.7163651585578918, |
| "learning_rate": 4.8e-05, |
| "loss": 1.7129, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.008372404554588079, |
| "grad_norm": 0.8007297515869141, |
| "learning_rate": 5e-05, |
| "loss": 1.6796, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.008707300736771601, |
| "grad_norm": 0.7303621768951416, |
| "learning_rate": 5.2000000000000004e-05, |
| "loss": 1.5926, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.009042196918955124, |
| "grad_norm": 0.8409650325775146, |
| "learning_rate": 5.4000000000000005e-05, |
| "loss": 1.7859, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.009377093101138647, |
| "grad_norm": 0.7095215320587158, |
| "learning_rate": 5.6000000000000006e-05, |
| "loss": 1.654, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.00971198928332217, |
| "grad_norm": 0.7262499332427979, |
| "learning_rate": 5.8e-05, |
| "loss": 1.7337, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.010046885465505693, |
| "grad_norm": 0.7079520225524902, |
| "learning_rate": 6e-05, |
| "loss": 1.7591, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.010381781647689216, |
| "grad_norm": 0.6347719430923462, |
| "learning_rate": 6.2e-05, |
| "loss": 1.6914, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.010716677829872739, |
| "grad_norm": 0.6208151578903198, |
| "learning_rate": 6.400000000000001e-05, |
| "loss": 1.6545, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.011051574012056262, |
| "grad_norm": 0.5771459341049194, |
| "learning_rate": 6.6e-05, |
| "loss": 1.6261, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.011386470194239785, |
| "grad_norm": 0.6044191122055054, |
| "learning_rate": 6.800000000000001e-05, |
| "loss": 1.6579, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.01172136637642331, |
| "grad_norm": 0.6431082487106323, |
| "learning_rate": 7e-05, |
| "loss": 1.6837, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.012056262558606833, |
| "grad_norm": 0.6023712158203125, |
| "learning_rate": 7.2e-05, |
| "loss": 1.6224, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.012391158740790355, |
| "grad_norm": 0.6101360321044922, |
| "learning_rate": 7.4e-05, |
| "loss": 1.6735, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.012726054922973878, |
| "grad_norm": 0.5465442538261414, |
| "learning_rate": 7.6e-05, |
| "loss": 1.5562, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.013060951105157401, |
| "grad_norm": 0.6528838872909546, |
| "learning_rate": 7.800000000000001e-05, |
| "loss": 1.7578, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.013395847287340924, |
| "grad_norm": 0.5717447400093079, |
| "learning_rate": 8e-05, |
| "loss": 1.6505, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.013730743469524447, |
| "grad_norm": 0.6488905549049377, |
| "learning_rate": 8.2e-05, |
| "loss": 1.6638, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.01406563965170797, |
| "grad_norm": 0.558308482170105, |
| "learning_rate": 8.4e-05, |
| "loss": 1.6428, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.014400535833891493, |
| "grad_norm": 0.608172595500946, |
| "learning_rate": 8.6e-05, |
| "loss": 1.6953, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.014735432016075016, |
| "grad_norm": 0.5436323881149292, |
| "learning_rate": 8.800000000000001e-05, |
| "loss": 1.701, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.01507032819825854, |
| "grad_norm": 0.5608189105987549, |
| "learning_rate": 9e-05, |
| "loss": 1.6088, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.015405224380442064, |
| "grad_norm": 0.5891563296318054, |
| "learning_rate": 9.200000000000001e-05, |
| "loss": 1.6878, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.015740120562625585, |
| "grad_norm": 0.5707667469978333, |
| "learning_rate": 9.4e-05, |
| "loss": 1.6171, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.016075016744809108, |
| "grad_norm": 0.5519657731056213, |
| "learning_rate": 9.6e-05, |
| "loss": 1.6526, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.01640991292699263, |
| "grad_norm": 0.5898794531822205, |
| "learning_rate": 9.8e-05, |
| "loss": 1.7157, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.016744809109176157, |
| "grad_norm": 0.5518242716789246, |
| "learning_rate": 0.0001, |
| "loss": 1.6441, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.01707970529135968, |
| "grad_norm": 0.5902829766273499, |
| "learning_rate": 0.0001, |
| "loss": 1.6424, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.017414601473543203, |
| "grad_norm": 0.5596480965614319, |
| "learning_rate": 0.0001, |
| "loss": 1.7147, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.017749497655726726, |
| "grad_norm": 0.5877808332443237, |
| "learning_rate": 0.0001, |
| "loss": 1.6327, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.01808439383791025, |
| "grad_norm": 0.543509840965271, |
| "learning_rate": 0.0001, |
| "loss": 1.7011, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.018419290020093772, |
| "grad_norm": 0.5165537595748901, |
| "learning_rate": 0.0001, |
| "loss": 1.5977, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.018754186202277295, |
| "grad_norm": 0.5444625616073608, |
| "learning_rate": 0.0001, |
| "loss": 1.6508, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.019089082384460818, |
| "grad_norm": 0.5317073464393616, |
| "learning_rate": 0.0001, |
| "loss": 1.6175, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.01942397856664434, |
| "grad_norm": 0.570904016494751, |
| "learning_rate": 0.0001, |
| "loss": 1.7569, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.019758874748827863, |
| "grad_norm": 0.5219286680221558, |
| "learning_rate": 0.0001, |
| "loss": 1.6522, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.020093770931011386, |
| "grad_norm": 0.5607163310050964, |
| "learning_rate": 0.0001, |
| "loss": 1.6803, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.02042866711319491, |
| "grad_norm": 0.5346683859825134, |
| "learning_rate": 0.0001, |
| "loss": 1.6452, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.020763563295378432, |
| "grad_norm": 0.5436587333679199, |
| "learning_rate": 0.0001, |
| "loss": 1.6682, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.021098459477561955, |
| "grad_norm": 0.5531876683235168, |
| "learning_rate": 0.0001, |
| "loss": 1.6845, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.021433355659745478, |
| "grad_norm": 0.5084998607635498, |
| "learning_rate": 0.0001, |
| "loss": 1.682, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.021768251841929, |
| "grad_norm": 0.5299241542816162, |
| "learning_rate": 0.0001, |
| "loss": 1.5749, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.022103148024112524, |
| "grad_norm": 0.5054894089698792, |
| "learning_rate": 0.0001, |
| "loss": 1.6374, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.022438044206296047, |
| "grad_norm": 0.5474890470504761, |
| "learning_rate": 0.0001, |
| "loss": 1.7065, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.02277294038847957, |
| "grad_norm": 0.5523289442062378, |
| "learning_rate": 0.0001, |
| "loss": 1.8205, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.023107836570663093, |
| "grad_norm": 0.5250942707061768, |
| "learning_rate": 0.0001, |
| "loss": 1.5829, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.02344273275284662, |
| "grad_norm": 0.5237876772880554, |
| "learning_rate": 0.0001, |
| "loss": 1.65, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.023777628935030142, |
| "grad_norm": 0.514022707939148, |
| "learning_rate": 0.0001, |
| "loss": 1.5468, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.024112525117213665, |
| "grad_norm": 0.503179132938385, |
| "learning_rate": 0.0001, |
| "loss": 1.541, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.024447421299397188, |
| "grad_norm": 0.5128785371780396, |
| "learning_rate": 0.0001, |
| "loss": 1.6285, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.02478231748158071, |
| "grad_norm": 0.5173330903053284, |
| "learning_rate": 0.0001, |
| "loss": 1.6583, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.025117213663764234, |
| "grad_norm": 0.5024128556251526, |
| "learning_rate": 0.0001, |
| "loss": 1.608, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.025452109845947757, |
| "grad_norm": 0.5092707872390747, |
| "learning_rate": 0.0001, |
| "loss": 1.6552, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.02578700602813128, |
| "grad_norm": 0.4851345717906952, |
| "learning_rate": 0.0001, |
| "loss": 1.5511, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.026121902210314803, |
| "grad_norm": 0.5057558417320251, |
| "learning_rate": 0.0001, |
| "loss": 1.6468, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.026456798392498326, |
| "grad_norm": 0.5011716485023499, |
| "learning_rate": 0.0001, |
| "loss": 1.6291, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.02679169457468185, |
| "grad_norm": 0.5014387965202332, |
| "learning_rate": 0.0001, |
| "loss": 1.6706, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.02712659075686537, |
| "grad_norm": 0.5116551518440247, |
| "learning_rate": 0.0001, |
| "loss": 1.6637, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.027461486939048894, |
| "grad_norm": 0.4828563928604126, |
| "learning_rate": 0.0001, |
| "loss": 1.6061, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.027796383121232417, |
| "grad_norm": 0.4810364842414856, |
| "learning_rate": 0.0001, |
| "loss": 1.6528, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.02813127930341594, |
| "grad_norm": 0.49675026535987854, |
| "learning_rate": 0.0001, |
| "loss": 1.6463, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.028466175485599463, |
| "grad_norm": 0.4998936951160431, |
| "learning_rate": 0.0001, |
| "loss": 1.5671, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.028801071667782986, |
| "grad_norm": 0.5172111392021179, |
| "learning_rate": 0.0001, |
| "loss": 1.7286, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.02913596784996651, |
| "grad_norm": 0.48785221576690674, |
| "learning_rate": 0.0001, |
| "loss": 1.5728, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.029470864032150032, |
| "grad_norm": 0.4777396321296692, |
| "learning_rate": 0.0001, |
| "loss": 1.6219, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.029805760214333555, |
| "grad_norm": 0.5374659299850464, |
| "learning_rate": 0.0001, |
| "loss": 1.7086, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.03014065639651708, |
| "grad_norm": 0.4848657548427582, |
| "learning_rate": 0.0001, |
| "loss": 1.6158, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.030475552578700604, |
| "grad_norm": 0.5165850520133972, |
| "learning_rate": 0.0001, |
| "loss": 1.6394, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.030810448760884127, |
| "grad_norm": 0.501922607421875, |
| "learning_rate": 0.0001, |
| "loss": 1.6589, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.03114534494306765, |
| "grad_norm": 0.5083390474319458, |
| "learning_rate": 0.0001, |
| "loss": 1.6182, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.03148024112525117, |
| "grad_norm": 0.49391478300094604, |
| "learning_rate": 0.0001, |
| "loss": 1.6031, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.031815137307434696, |
| "grad_norm": 0.5007357597351074, |
| "learning_rate": 0.0001, |
| "loss": 1.7042, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.032150033489618215, |
| "grad_norm": 0.5300001502037048, |
| "learning_rate": 0.0001, |
| "loss": 1.6702, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.03248492967180174, |
| "grad_norm": 0.46860334277153015, |
| "learning_rate": 0.0001, |
| "loss": 1.5321, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.03281982585398526, |
| "grad_norm": 0.5419498682022095, |
| "learning_rate": 0.0001, |
| "loss": 1.6481, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.03315472203616879, |
| "grad_norm": 0.4763254225254059, |
| "learning_rate": 0.0001, |
| "loss": 1.621, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.033489618218352314, |
| "grad_norm": 0.4968387484550476, |
| "learning_rate": 0.0001, |
| "loss": 1.6008, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.033824514400535834, |
| "grad_norm": 0.48966512084007263, |
| "learning_rate": 0.0001, |
| "loss": 1.6324, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.03415941058271936, |
| "grad_norm": 0.49071934819221497, |
| "learning_rate": 0.0001, |
| "loss": 1.6715, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.03449430676490288, |
| "grad_norm": 0.5435018539428711, |
| "learning_rate": 0.0001, |
| "loss": 1.7803, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.034829202947086406, |
| "grad_norm": 0.5261979699134827, |
| "learning_rate": 0.0001, |
| "loss": 1.7211, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.035164099129269925, |
| "grad_norm": 0.5578387379646301, |
| "learning_rate": 0.0001, |
| "loss": 1.7508, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.03549899531145345, |
| "grad_norm": 0.5257611274719238, |
| "learning_rate": 0.0001, |
| "loss": 1.5434, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.03583389149363697, |
| "grad_norm": 0.5152483582496643, |
| "learning_rate": 0.0001, |
| "loss": 1.7124, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.0361687876758205, |
| "grad_norm": 0.4882405698299408, |
| "learning_rate": 0.0001, |
| "loss": 1.5853, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.03650368385800402, |
| "grad_norm": 0.5341641306877136, |
| "learning_rate": 0.0001, |
| "loss": 1.63, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.036838580040187544, |
| "grad_norm": 0.47165918350219727, |
| "learning_rate": 0.0001, |
| "loss": 1.6256, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.03717347622237106, |
| "grad_norm": 0.48669424653053284, |
| "learning_rate": 0.0001, |
| "loss": 1.6785, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.03750837240455459, |
| "grad_norm": 0.502474308013916, |
| "learning_rate": 0.0001, |
| "loss": 1.6176, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.03784326858673811, |
| "grad_norm": 0.46959713101387024, |
| "learning_rate": 0.0001, |
| "loss": 1.5865, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.038178164768921635, |
| "grad_norm": 0.5385250449180603, |
| "learning_rate": 0.0001, |
| "loss": 1.6965, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.038513060951105155, |
| "grad_norm": 0.5123361349105835, |
| "learning_rate": 0.0001, |
| "loss": 1.637, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.03884795713328868, |
| "grad_norm": 0.49659615755081177, |
| "learning_rate": 0.0001, |
| "loss": 1.6763, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.0391828533154722, |
| "grad_norm": 0.47849777340888977, |
| "learning_rate": 0.0001, |
| "loss": 1.5615, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.03951774949765573, |
| "grad_norm": 0.4773821234703064, |
| "learning_rate": 0.0001, |
| "loss": 1.6418, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.039852645679839246, |
| "grad_norm": 0.46657007932662964, |
| "learning_rate": 0.0001, |
| "loss": 1.5907, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.04018754186202277, |
| "grad_norm": 0.48586177825927734, |
| "learning_rate": 0.0001, |
| "loss": 1.6218, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.0405224380442063, |
| "grad_norm": 0.49452975392341614, |
| "learning_rate": 0.0001, |
| "loss": 1.6308, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.04085733422638982, |
| "grad_norm": 0.4575989246368408, |
| "learning_rate": 0.0001, |
| "loss": 1.5618, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.041192230408573345, |
| "grad_norm": 0.4881477355957031, |
| "learning_rate": 0.0001, |
| "loss": 1.6237, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.041527126590756865, |
| "grad_norm": 0.4715660512447357, |
| "learning_rate": 0.0001, |
| "loss": 1.6247, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.04186202277294039, |
| "grad_norm": 0.4727221429347992, |
| "learning_rate": 0.0001, |
| "loss": 1.5856, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.04219691895512391, |
| "grad_norm": 0.47755780816078186, |
| "learning_rate": 0.0001, |
| "loss": 1.5104, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.04253181513730744, |
| "grad_norm": 0.4786100387573242, |
| "learning_rate": 0.0001, |
| "loss": 1.536, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.042866711319490956, |
| "grad_norm": 0.5105636119842529, |
| "learning_rate": 0.0001, |
| "loss": 1.7084, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.04320160750167448, |
| "grad_norm": 0.5502948760986328, |
| "learning_rate": 0.0001, |
| "loss": 1.7168, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.043536503683858, |
| "grad_norm": 0.4909312129020691, |
| "learning_rate": 0.0001, |
| "loss": 1.7093, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.04387139986604153, |
| "grad_norm": 0.46086591482162476, |
| "learning_rate": 0.0001, |
| "loss": 1.5041, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.04420629604822505, |
| "grad_norm": 0.49003592133522034, |
| "learning_rate": 0.0001, |
| "loss": 1.6106, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.044541192230408574, |
| "grad_norm": 0.4714739918708801, |
| "learning_rate": 0.0001, |
| "loss": 1.6323, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.044876088412592094, |
| "grad_norm": 0.46458280086517334, |
| "learning_rate": 0.0001, |
| "loss": 1.5321, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.04521098459477562, |
| "grad_norm": 0.48216310143470764, |
| "learning_rate": 0.0001, |
| "loss": 1.668, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.04554588077695914, |
| "grad_norm": 0.49191856384277344, |
| "learning_rate": 0.0001, |
| "loss": 1.5979, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.045880776959142666, |
| "grad_norm": 0.48731929063796997, |
| "learning_rate": 0.0001, |
| "loss": 1.5877, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.046215673141326186, |
| "grad_norm": 0.4742424786090851, |
| "learning_rate": 0.0001, |
| "loss": 1.654, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.04655056932350971, |
| "grad_norm": 0.4645659327507019, |
| "learning_rate": 0.0001, |
| "loss": 1.5754, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.04688546550569324, |
| "grad_norm": 0.48509088158607483, |
| "learning_rate": 0.0001, |
| "loss": 1.5807, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.04722036168787676, |
| "grad_norm": 0.45820191502571106, |
| "learning_rate": 0.0001, |
| "loss": 1.6338, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.047555257870060284, |
| "grad_norm": 0.4655672609806061, |
| "learning_rate": 0.0001, |
| "loss": 1.6341, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.047890154052243804, |
| "grad_norm": 0.4950549006462097, |
| "learning_rate": 0.0001, |
| "loss": 1.6622, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.04822505023442733, |
| "grad_norm": 0.46763336658477783, |
| "learning_rate": 0.0001, |
| "loss": 1.5912, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.04855994641661085, |
| "grad_norm": 0.47102072834968567, |
| "learning_rate": 0.0001, |
| "loss": 1.5721, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.048894842598794376, |
| "grad_norm": 0.4916516840457916, |
| "learning_rate": 0.0001, |
| "loss": 1.6117, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.049229738780977896, |
| "grad_norm": 0.4632984399795532, |
| "learning_rate": 0.0001, |
| "loss": 1.6008, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.04956463496316142, |
| "grad_norm": 0.45667365193367004, |
| "learning_rate": 0.0001, |
| "loss": 1.5391, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.04989953114534494, |
| "grad_norm": 0.4753417670726776, |
| "learning_rate": 0.0001, |
| "loss": 1.6279, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.05023442732752847, |
| "grad_norm": 0.48078927397727966, |
| "learning_rate": 0.0001, |
| "loss": 1.6326, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.05056932350971199, |
| "grad_norm": 0.4711712896823883, |
| "learning_rate": 0.0001, |
| "loss": 1.643, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.050904219691895514, |
| "grad_norm": 0.5034206509590149, |
| "learning_rate": 0.0001, |
| "loss": 1.6122, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.05123911587407903, |
| "grad_norm": 0.46327561140060425, |
| "learning_rate": 0.0001, |
| "loss": 1.6089, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.05157401205626256, |
| "grad_norm": 0.4954410493373871, |
| "learning_rate": 0.0001, |
| "loss": 1.6108, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.05190890823844608, |
| "grad_norm": 0.5449566841125488, |
| "learning_rate": 0.0001, |
| "loss": 1.7258, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.052243804420629605, |
| "grad_norm": 0.4606022238731384, |
| "learning_rate": 0.0001, |
| "loss": 1.5401, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.052578700602813125, |
| "grad_norm": 0.5017722249031067, |
| "learning_rate": 0.0001, |
| "loss": 1.6968, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.05291359678499665, |
| "grad_norm": 0.4925632178783417, |
| "learning_rate": 0.0001, |
| "loss": 1.6701, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.05324849296718017, |
| "grad_norm": 0.4624950587749481, |
| "learning_rate": 0.0001, |
| "loss": 1.5978, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.0535833891493637, |
| "grad_norm": 0.5322679281234741, |
| "learning_rate": 0.0001, |
| "loss": 1.6278, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.053918285331547224, |
| "grad_norm": 0.4694281816482544, |
| "learning_rate": 0.0001, |
| "loss": 1.6588, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.05425318151373074, |
| "grad_norm": 0.5067780017852783, |
| "learning_rate": 0.0001, |
| "loss": 1.5661, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.05458807769591427, |
| "grad_norm": 0.4347548484802246, |
| "learning_rate": 0.0001, |
| "loss": 1.5589, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.05492297387809779, |
| "grad_norm": 0.5048144459724426, |
| "learning_rate": 0.0001, |
| "loss": 1.6149, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.055257870060281315, |
| "grad_norm": 0.48803550004959106, |
| "learning_rate": 0.0001, |
| "loss": 1.59, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.055592766242464835, |
| "grad_norm": 0.4865317940711975, |
| "learning_rate": 0.0001, |
| "loss": 1.5392, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.05592766242464836, |
| "grad_norm": 0.4582239091396332, |
| "learning_rate": 0.0001, |
| "loss": 1.6371, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.05626255860683188, |
| "grad_norm": 0.5068498253822327, |
| "learning_rate": 0.0001, |
| "loss": 1.665, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.05659745478901541, |
| "grad_norm": 0.4794592559337616, |
| "learning_rate": 0.0001, |
| "loss": 1.629, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.056932350971198926, |
| "grad_norm": 0.4870087802410126, |
| "learning_rate": 0.0001, |
| "loss": 1.5944, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.05726724715338245, |
| "grad_norm": 0.4849359095096588, |
| "learning_rate": 0.0001, |
| "loss": 1.6089, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.05760214333556597, |
| "grad_norm": 0.4833835959434509, |
| "learning_rate": 0.0001, |
| "loss": 1.5886, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.0579370395177495, |
| "grad_norm": 0.46753743290901184, |
| "learning_rate": 0.0001, |
| "loss": 1.6548, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.05827193569993302, |
| "grad_norm": 0.49789801239967346, |
| "learning_rate": 0.0001, |
| "loss": 1.6392, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.058606831882116545, |
| "grad_norm": 0.46485090255737305, |
| "learning_rate": 0.0001, |
| "loss": 1.6543, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.058941728064300064, |
| "grad_norm": 0.49583113193511963, |
| "learning_rate": 0.0001, |
| "loss": 1.6233, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.05927662424648359, |
| "grad_norm": 0.4523816704750061, |
| "learning_rate": 0.0001, |
| "loss": 1.5896, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.05961152042866711, |
| "grad_norm": 0.4893614649772644, |
| "learning_rate": 0.0001, |
| "loss": 1.6138, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.059946416610850636, |
| "grad_norm": 0.47194191813468933, |
| "learning_rate": 0.0001, |
| "loss": 1.6718, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.06028131279303416, |
| "grad_norm": 0.46762990951538086, |
| "learning_rate": 0.0001, |
| "loss": 1.6478, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.06061620897521768, |
| "grad_norm": 0.47963568568229675, |
| "learning_rate": 0.0001, |
| "loss": 1.7146, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.06095110515740121, |
| "grad_norm": 0.47497475147247314, |
| "learning_rate": 0.0001, |
| "loss": 1.598, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.06128600133958473, |
| "grad_norm": 0.45387428998947144, |
| "learning_rate": 0.0001, |
| "loss": 1.5978, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.061620897521768254, |
| "grad_norm": 0.45365142822265625, |
| "learning_rate": 0.0001, |
| "loss": 1.598, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.061955793703951774, |
| "grad_norm": 0.4785883128643036, |
| "learning_rate": 0.0001, |
| "loss": 1.674, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.0622906898861353, |
| "grad_norm": 0.49143221974372864, |
| "learning_rate": 0.0001, |
| "loss": 1.6021, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.06262558606831882, |
| "grad_norm": 0.49770817160606384, |
| "learning_rate": 0.0001, |
| "loss": 1.6205, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.06296048225050234, |
| "grad_norm": 0.4437146782875061, |
| "learning_rate": 0.0001, |
| "loss": 1.4854, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.06329537843268587, |
| "grad_norm": 0.46671444177627563, |
| "learning_rate": 0.0001, |
| "loss": 1.6339, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.06363027461486939, |
| "grad_norm": 0.45990461111068726, |
| "learning_rate": 0.0001, |
| "loss": 1.607, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.06396517079705291, |
| "grad_norm": 0.49323973059654236, |
| "learning_rate": 0.0001, |
| "loss": 1.6806, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.06430006697923643, |
| "grad_norm": 0.4419143497943878, |
| "learning_rate": 0.0001, |
| "loss": 1.569, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.06463496316141996, |
| "grad_norm": 0.47556862235069275, |
| "learning_rate": 0.0001, |
| "loss": 1.5985, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.06496985934360348, |
| "grad_norm": 0.462699294090271, |
| "learning_rate": 0.0001, |
| "loss": 1.6084, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.065304755525787, |
| "grad_norm": 0.46956902742385864, |
| "learning_rate": 0.0001, |
| "loss": 1.6413, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.06563965170797052, |
| "grad_norm": 0.4640052020549774, |
| "learning_rate": 0.0001, |
| "loss": 1.6613, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.06597454789015406, |
| "grad_norm": 0.46174609661102295, |
| "learning_rate": 0.0001, |
| "loss": 1.6004, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.06630944407233758, |
| "grad_norm": 0.4698941707611084, |
| "learning_rate": 0.0001, |
| "loss": 1.6381, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.0666443402545211, |
| "grad_norm": 0.4754740297794342, |
| "learning_rate": 0.0001, |
| "loss": 1.6448, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.06697923643670463, |
| "grad_norm": 0.4478375315666199, |
| "learning_rate": 0.0001, |
| "loss": 1.5242, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.06731413261888815, |
| "grad_norm": 0.48765239119529724, |
| "learning_rate": 0.0001, |
| "loss": 1.6898, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.06764902880107167, |
| "grad_norm": 0.4749129116535187, |
| "learning_rate": 0.0001, |
| "loss": 1.7313, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.06798392498325519, |
| "grad_norm": 0.46355462074279785, |
| "learning_rate": 0.0001, |
| "loss": 1.6475, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.06831882116543872, |
| "grad_norm": 0.46045640110969543, |
| "learning_rate": 0.0001, |
| "loss": 1.5821, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.06865371734762224, |
| "grad_norm": 0.46230217814445496, |
| "learning_rate": 0.0001, |
| "loss": 1.532, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.06898861352980576, |
| "grad_norm": 0.4512956142425537, |
| "learning_rate": 0.0001, |
| "loss": 1.5849, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.06932350971198928, |
| "grad_norm": 0.4672728180885315, |
| "learning_rate": 0.0001, |
| "loss": 1.6527, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.06965840589417281, |
| "grad_norm": 0.5102751851081848, |
| "learning_rate": 0.0001, |
| "loss": 1.6008, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.06999330207635633, |
| "grad_norm": 0.47706693410873413, |
| "learning_rate": 0.0001, |
| "loss": 1.5342, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.07032819825853985, |
| "grad_norm": 0.4652723968029022, |
| "learning_rate": 0.0001, |
| "loss": 1.6368, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.07066309444072337, |
| "grad_norm": 0.4909239113330841, |
| "learning_rate": 0.0001, |
| "loss": 1.7325, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.0709979906229069, |
| "grad_norm": 0.4498032331466675, |
| "learning_rate": 0.0001, |
| "loss": 1.6275, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.07133288680509042, |
| "grad_norm": 0.47739377617836, |
| "learning_rate": 0.0001, |
| "loss": 1.6555, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.07166778298727394, |
| "grad_norm": 0.4853709936141968, |
| "learning_rate": 0.0001, |
| "loss": 1.7459, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.07200267916945746, |
| "grad_norm": 0.46574491262435913, |
| "learning_rate": 0.0001, |
| "loss": 1.6285, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.072337575351641, |
| "grad_norm": 0.46146032214164734, |
| "learning_rate": 0.0001, |
| "loss": 1.5787, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.07267247153382451, |
| "grad_norm": 0.4617884159088135, |
| "learning_rate": 0.0001, |
| "loss": 1.5852, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.07300736771600803, |
| "grad_norm": 0.45636409521102905, |
| "learning_rate": 0.0001, |
| "loss": 1.6063, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.07334226389819157, |
| "grad_norm": 0.4762187898159027, |
| "learning_rate": 0.0001, |
| "loss": 1.6394, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.07367716008037509, |
| "grad_norm": 0.4896661937236786, |
| "learning_rate": 0.0001, |
| "loss": 1.6506, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.0740120562625586, |
| "grad_norm": 0.46752890944480896, |
| "learning_rate": 0.0001, |
| "loss": 1.5646, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.07434695244474213, |
| "grad_norm": 0.46463754773139954, |
| "learning_rate": 0.0001, |
| "loss": 1.4999, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.07468184862692566, |
| "grad_norm": 0.548875093460083, |
| "learning_rate": 0.0001, |
| "loss": 1.6708, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.07501674480910918, |
| "grad_norm": 0.47329181432724, |
| "learning_rate": 0.0001, |
| "loss": 1.6911, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.0753516409912927, |
| "grad_norm": 0.46423107385635376, |
| "learning_rate": 0.0001, |
| "loss": 1.5469, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.07568653717347622, |
| "grad_norm": 0.4901514947414398, |
| "learning_rate": 0.0001, |
| "loss": 1.6217, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.07602143335565975, |
| "grad_norm": 0.5000611543655396, |
| "learning_rate": 0.0001, |
| "loss": 1.6934, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.07635632953784327, |
| "grad_norm": 0.48425373435020447, |
| "learning_rate": 0.0001, |
| "loss": 1.6358, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.07669122572002679, |
| "grad_norm": 0.48660895228385925, |
| "learning_rate": 0.0001, |
| "loss": 1.6489, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.07702612190221031, |
| "grad_norm": 0.5343098640441895, |
| "learning_rate": 0.0001, |
| "loss": 1.7937, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.07736101808439384, |
| "grad_norm": 0.44870078563690186, |
| "learning_rate": 0.0001, |
| "loss": 1.4956, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.07769591426657736, |
| "grad_norm": 0.44342944025993347, |
| "learning_rate": 0.0001, |
| "loss": 1.4829, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.07803081044876088, |
| "grad_norm": 0.5041643381118774, |
| "learning_rate": 0.0001, |
| "loss": 1.6187, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.0783657066309444, |
| "grad_norm": 0.4803434908390045, |
| "learning_rate": 0.0001, |
| "loss": 1.5341, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.07870060281312793, |
| "grad_norm": 0.4685923457145691, |
| "learning_rate": 0.0001, |
| "loss": 1.612, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.07903549899531145, |
| "grad_norm": 0.5002148151397705, |
| "learning_rate": 0.0001, |
| "loss": 1.5885, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.07937039517749497, |
| "grad_norm": 0.44594261050224304, |
| "learning_rate": 0.0001, |
| "loss": 1.6518, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.07970529135967849, |
| "grad_norm": 0.4505089819431305, |
| "learning_rate": 0.0001, |
| "loss": 1.6333, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.08004018754186203, |
| "grad_norm": 0.47187793254852295, |
| "learning_rate": 0.0001, |
| "loss": 1.6145, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.08037508372404555, |
| "grad_norm": 0.43274542689323425, |
| "learning_rate": 0.0001, |
| "loss": 1.5611, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.08070997990622907, |
| "grad_norm": 0.4760628938674927, |
| "learning_rate": 0.0001, |
| "loss": 1.6176, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.0810448760884126, |
| "grad_norm": 0.4943337142467499, |
| "learning_rate": 0.0001, |
| "loss": 1.6503, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.08137977227059612, |
| "grad_norm": 0.45220503211021423, |
| "learning_rate": 0.0001, |
| "loss": 1.5666, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.08171466845277964, |
| "grad_norm": 0.4890030026435852, |
| "learning_rate": 0.0001, |
| "loss": 1.6096, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.08204956463496316, |
| "grad_norm": 0.426581472158432, |
| "learning_rate": 0.0001, |
| "loss": 1.5234, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.08238446081714669, |
| "grad_norm": 0.5227492451667786, |
| "learning_rate": 0.0001, |
| "loss": 1.7529, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.08271935699933021, |
| "grad_norm": 0.4621161222457886, |
| "learning_rate": 0.0001, |
| "loss": 1.6967, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.08305425318151373, |
| "grad_norm": 0.45487985014915466, |
| "learning_rate": 0.0001, |
| "loss": 1.5204, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.08338914936369725, |
| "grad_norm": 0.4499679505825043, |
| "learning_rate": 0.0001, |
| "loss": 1.6882, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.08372404554588078, |
| "grad_norm": 0.46019238233566284, |
| "learning_rate": 0.0001, |
| "loss": 1.5759, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.0840589417280643, |
| "grad_norm": 0.4673830270767212, |
| "learning_rate": 0.0001, |
| "loss": 1.6, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.08439383791024782, |
| "grad_norm": 0.4210320711135864, |
| "learning_rate": 0.0001, |
| "loss": 1.459, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.08472873409243134, |
| "grad_norm": 0.49863752722740173, |
| "learning_rate": 0.0001, |
| "loss": 1.7663, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.08506363027461487, |
| "grad_norm": 0.5102465748786926, |
| "learning_rate": 0.0001, |
| "loss": 1.6699, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.0853985264567984, |
| "grad_norm": 0.45504051446914673, |
| "learning_rate": 0.0001, |
| "loss": 1.5721, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.08573342263898191, |
| "grad_norm": 0.48080578446388245, |
| "learning_rate": 0.0001, |
| "loss": 1.5565, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.08606831882116543, |
| "grad_norm": 0.4657333791255951, |
| "learning_rate": 0.0001, |
| "loss": 1.6291, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.08640321500334897, |
| "grad_norm": 0.4602212905883789, |
| "learning_rate": 0.0001, |
| "loss": 1.5471, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.08673811118553248, |
| "grad_norm": 0.4672396183013916, |
| "learning_rate": 0.0001, |
| "loss": 1.6641, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.087073007367716, |
| "grad_norm": 0.47578176856040955, |
| "learning_rate": 0.0001, |
| "loss": 1.6204, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.08740790354989954, |
| "grad_norm": 0.45810386538505554, |
| "learning_rate": 0.0001, |
| "loss": 1.6366, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.08774279973208306, |
| "grad_norm": 0.4805028736591339, |
| "learning_rate": 0.0001, |
| "loss": 1.6073, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.08807769591426658, |
| "grad_norm": 0.495786190032959, |
| "learning_rate": 0.0001, |
| "loss": 1.6871, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.0884125920964501, |
| "grad_norm": 0.45714274048805237, |
| "learning_rate": 0.0001, |
| "loss": 1.6176, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.08874748827863363, |
| "grad_norm": 0.4549003839492798, |
| "learning_rate": 0.0001, |
| "loss": 1.576, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.08908238446081715, |
| "grad_norm": 0.4851682782173157, |
| "learning_rate": 0.0001, |
| "loss": 1.6564, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.08941728064300067, |
| "grad_norm": 0.4567672908306122, |
| "learning_rate": 0.0001, |
| "loss": 1.6241, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.08975217682518419, |
| "grad_norm": 0.45151346921920776, |
| "learning_rate": 0.0001, |
| "loss": 1.5876, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.09008707300736772, |
| "grad_norm": 0.4828041195869446, |
| "learning_rate": 0.0001, |
| "loss": 1.5486, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.09042196918955124, |
| "grad_norm": 0.4572749733924866, |
| "learning_rate": 0.0001, |
| "loss": 1.5367, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.09075686537173476, |
| "grad_norm": 0.49878939986228943, |
| "learning_rate": 0.0001, |
| "loss": 1.6074, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.09109176155391828, |
| "grad_norm": 0.4592888653278351, |
| "learning_rate": 0.0001, |
| "loss": 1.6461, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.09142665773610181, |
| "grad_norm": 0.47039178013801575, |
| "learning_rate": 0.0001, |
| "loss": 1.6303, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.09176155391828533, |
| "grad_norm": 0.4495210349559784, |
| "learning_rate": 0.0001, |
| "loss": 1.5465, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.09209645010046885, |
| "grad_norm": 0.4704029858112335, |
| "learning_rate": 0.0001, |
| "loss": 1.6109, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.09243134628265237, |
| "grad_norm": 0.44413095712661743, |
| "learning_rate": 0.0001, |
| "loss": 1.5308, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.0927662424648359, |
| "grad_norm": 0.4834573566913605, |
| "learning_rate": 0.0001, |
| "loss": 1.6865, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.09310113864701942, |
| "grad_norm": 0.47912439703941345, |
| "learning_rate": 0.0001, |
| "loss": 1.6228, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.09343603482920294, |
| "grad_norm": 0.47050291299819946, |
| "learning_rate": 0.0001, |
| "loss": 1.6517, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.09377093101138648, |
| "grad_norm": 0.4590076208114624, |
| "learning_rate": 0.0001, |
| "loss": 1.6135, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.09410582719357, |
| "grad_norm": 0.4425355792045593, |
| "learning_rate": 0.0001, |
| "loss": 1.5617, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.09444072337575352, |
| "grad_norm": 0.4535706043243408, |
| "learning_rate": 0.0001, |
| "loss": 1.6319, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.09477561955793704, |
| "grad_norm": 0.4804530441761017, |
| "learning_rate": 0.0001, |
| "loss": 1.623, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.09511051574012057, |
| "grad_norm": 0.4477209448814392, |
| "learning_rate": 0.0001, |
| "loss": 1.5297, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.09544541192230409, |
| "grad_norm": 0.4630716145038605, |
| "learning_rate": 0.0001, |
| "loss": 1.6254, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.09578030810448761, |
| "grad_norm": 0.4582269787788391, |
| "learning_rate": 0.0001, |
| "loss": 1.5493, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.09611520428667113, |
| "grad_norm": 0.460605263710022, |
| "learning_rate": 0.0001, |
| "loss": 1.5757, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.09645010046885466, |
| "grad_norm": 0.4452902376651764, |
| "learning_rate": 0.0001, |
| "loss": 1.5142, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.09678499665103818, |
| "grad_norm": 0.46841830015182495, |
| "learning_rate": 0.0001, |
| "loss": 1.6504, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.0971198928332217, |
| "grad_norm": 0.4708998203277588, |
| "learning_rate": 0.0001, |
| "loss": 1.6167, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.09745478901540522, |
| "grad_norm": 0.47076770663261414, |
| "learning_rate": 0.0001, |
| "loss": 1.5892, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.09778968519758875, |
| "grad_norm": 0.4641210436820984, |
| "learning_rate": 0.0001, |
| "loss": 1.6524, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.09812458137977227, |
| "grad_norm": 0.45254385471343994, |
| "learning_rate": 0.0001, |
| "loss": 1.5052, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.09845947756195579, |
| "grad_norm": 0.4456687569618225, |
| "learning_rate": 0.0001, |
| "loss": 1.5605, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.09879437374413931, |
| "grad_norm": 0.4839523136615753, |
| "learning_rate": 0.0001, |
| "loss": 1.7271, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.09912926992632284, |
| "grad_norm": 0.44768890738487244, |
| "learning_rate": 0.0001, |
| "loss": 1.543, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.09946416610850636, |
| "grad_norm": 0.4762071669101715, |
| "learning_rate": 0.0001, |
| "loss": 1.5863, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.09979906229068988, |
| "grad_norm": 0.44802209734916687, |
| "learning_rate": 0.0001, |
| "loss": 1.6379, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.10013395847287342, |
| "grad_norm": 0.4678582549095154, |
| "learning_rate": 0.0001, |
| "loss": 1.4977, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.10046885465505694, |
| "grad_norm": 0.44217634201049805, |
| "learning_rate": 0.0001, |
| "loss": 1.5518, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.10080375083724046, |
| "grad_norm": 0.4706065356731415, |
| "learning_rate": 0.0001, |
| "loss": 1.5462, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.10113864701942397, |
| "grad_norm": 0.4437488913536072, |
| "learning_rate": 0.0001, |
| "loss": 1.5496, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.10147354320160751, |
| "grad_norm": 0.5229858160018921, |
| "learning_rate": 0.0001, |
| "loss": 1.6437, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.10180843938379103, |
| "grad_norm": 0.44042670726776123, |
| "learning_rate": 0.0001, |
| "loss": 1.5646, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.10214333556597455, |
| "grad_norm": 0.4837404787540436, |
| "learning_rate": 0.0001, |
| "loss": 1.6794, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.10247823174815807, |
| "grad_norm": 0.4524439871311188, |
| "learning_rate": 0.0001, |
| "loss": 1.6054, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.1028131279303416, |
| "grad_norm": 0.47352391481399536, |
| "learning_rate": 0.0001, |
| "loss": 1.5684, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.10314802411252512, |
| "grad_norm": 0.48607951402664185, |
| "learning_rate": 0.0001, |
| "loss": 1.6231, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.10348292029470864, |
| "grad_norm": 0.49019041657447815, |
| "learning_rate": 0.0001, |
| "loss": 1.6256, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.10381781647689216, |
| "grad_norm": 0.4620254933834076, |
| "learning_rate": 0.0001, |
| "loss": 1.6367, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.10415271265907569, |
| "grad_norm": 0.429916113615036, |
| "learning_rate": 0.0001, |
| "loss": 1.5171, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.10448760884125921, |
| "grad_norm": 0.4744836688041687, |
| "learning_rate": 0.0001, |
| "loss": 1.624, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.10482250502344273, |
| "grad_norm": 0.43478068709373474, |
| "learning_rate": 0.0001, |
| "loss": 1.5532, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.10515740120562625, |
| "grad_norm": 0.5063771605491638, |
| "learning_rate": 0.0001, |
| "loss": 1.628, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.10549229738780978, |
| "grad_norm": 0.4402046799659729, |
| "learning_rate": 0.0001, |
| "loss": 1.5388, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.1058271935699933, |
| "grad_norm": 0.46563291549682617, |
| "learning_rate": 0.0001, |
| "loss": 1.6603, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.10616208975217682, |
| "grad_norm": 0.4675760269165039, |
| "learning_rate": 0.0001, |
| "loss": 1.6199, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.10649698593436034, |
| "grad_norm": 0.46061259508132935, |
| "learning_rate": 0.0001, |
| "loss": 1.6721, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.10683188211654387, |
| "grad_norm": 0.45733362436294556, |
| "learning_rate": 0.0001, |
| "loss": 1.6237, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.1071667782987274, |
| "grad_norm": 0.4503006935119629, |
| "learning_rate": 0.0001, |
| "loss": 1.5131, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.10750167448091091, |
| "grad_norm": 0.4447789788246155, |
| "learning_rate": 0.0001, |
| "loss": 1.4741, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.10783657066309445, |
| "grad_norm": 0.48432567715644836, |
| "learning_rate": 0.0001, |
| "loss": 1.6338, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.10817146684527797, |
| "grad_norm": 0.46932923793792725, |
| "learning_rate": 0.0001, |
| "loss": 1.6176, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.10850636302746149, |
| "grad_norm": 0.4636939764022827, |
| "learning_rate": 0.0001, |
| "loss": 1.5967, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.108841259209645, |
| "grad_norm": 0.47708818316459656, |
| "learning_rate": 0.0001, |
| "loss": 1.6065, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.10917615539182854, |
| "grad_norm": 0.43039318919181824, |
| "learning_rate": 0.0001, |
| "loss": 1.586, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.10951105157401206, |
| "grad_norm": 0.46356332302093506, |
| "learning_rate": 0.0001, |
| "loss": 1.5193, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.10984594775619558, |
| "grad_norm": 0.4567822515964508, |
| "learning_rate": 0.0001, |
| "loss": 1.607, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.1101808439383791, |
| "grad_norm": 0.4588714838027954, |
| "learning_rate": 0.0001, |
| "loss": 1.5343, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.11051574012056263, |
| "grad_norm": 0.48270532488822937, |
| "learning_rate": 0.0001, |
| "loss": 1.6282, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.11085063630274615, |
| "grad_norm": 0.45466941595077515, |
| "learning_rate": 0.0001, |
| "loss": 1.6338, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.11118553248492967, |
| "grad_norm": 0.4509162902832031, |
| "learning_rate": 0.0001, |
| "loss": 1.602, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.11152042866711319, |
| "grad_norm": 0.4739763140678406, |
| "learning_rate": 0.0001, |
| "loss": 1.6797, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.11185532484929672, |
| "grad_norm": 0.4735789895057678, |
| "learning_rate": 0.0001, |
| "loss": 1.6173, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.11219022103148024, |
| "grad_norm": 0.4974208176136017, |
| "learning_rate": 0.0001, |
| "loss": 1.6478, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.11252511721366376, |
| "grad_norm": 0.428434282541275, |
| "learning_rate": 0.0001, |
| "loss": 1.4542, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.11286001339584728, |
| "grad_norm": 0.4694981575012207, |
| "learning_rate": 0.0001, |
| "loss": 1.6672, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.11319490957803081, |
| "grad_norm": 0.4457933306694031, |
| "learning_rate": 0.0001, |
| "loss": 1.5994, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.11352980576021433, |
| "grad_norm": 0.4311225414276123, |
| "learning_rate": 0.0001, |
| "loss": 1.4973, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.11386470194239785, |
| "grad_norm": 0.4609581232070923, |
| "learning_rate": 0.0001, |
| "loss": 1.5811, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.11419959812458139, |
| "grad_norm": 0.4629519581794739, |
| "learning_rate": 0.0001, |
| "loss": 1.6941, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.1145344943067649, |
| "grad_norm": 0.4684126377105713, |
| "learning_rate": 0.0001, |
| "loss": 1.6668, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.11486939048894843, |
| "grad_norm": 0.4475191533565521, |
| "learning_rate": 0.0001, |
| "loss": 1.5619, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.11520428667113194, |
| "grad_norm": 0.45096057653427124, |
| "learning_rate": 0.0001, |
| "loss": 1.6197, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.11553918285331548, |
| "grad_norm": 0.44147443771362305, |
| "learning_rate": 0.0001, |
| "loss": 1.556, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.115874079035499, |
| "grad_norm": 0.4539279639720917, |
| "learning_rate": 0.0001, |
| "loss": 1.6454, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.11620897521768252, |
| "grad_norm": 0.44900423288345337, |
| "learning_rate": 0.0001, |
| "loss": 1.6007, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.11654387139986604, |
| "grad_norm": 0.4491700828075409, |
| "learning_rate": 0.0001, |
| "loss": 1.6662, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.11687876758204957, |
| "grad_norm": 0.4434390962123871, |
| "learning_rate": 0.0001, |
| "loss": 1.6097, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.11721366376423309, |
| "grad_norm": 0.47392427921295166, |
| "learning_rate": 0.0001, |
| "loss": 1.7676, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.11754855994641661, |
| "grad_norm": 0.4450167417526245, |
| "learning_rate": 0.0001, |
| "loss": 1.6478, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.11788345612860013, |
| "grad_norm": 0.44741788506507874, |
| "learning_rate": 0.0001, |
| "loss": 1.4809, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.11821835231078366, |
| "grad_norm": 0.4440721869468689, |
| "learning_rate": 0.0001, |
| "loss": 1.5977, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.11855324849296718, |
| "grad_norm": 0.4559003710746765, |
| "learning_rate": 0.0001, |
| "loss": 1.5861, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.1188881446751507, |
| "grad_norm": 0.4684537649154663, |
| "learning_rate": 0.0001, |
| "loss": 1.6041, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.11922304085733422, |
| "grad_norm": 0.44140082597732544, |
| "learning_rate": 0.0001, |
| "loss": 1.6321, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.11955793703951775, |
| "grad_norm": 0.4456671178340912, |
| "learning_rate": 0.0001, |
| "loss": 1.6585, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.11989283322170127, |
| "grad_norm": 0.4394270181655884, |
| "learning_rate": 0.0001, |
| "loss": 1.6098, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.12022772940388479, |
| "grad_norm": 0.46983978152275085, |
| "learning_rate": 0.0001, |
| "loss": 1.5997, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.12056262558606833, |
| "grad_norm": 0.4956510663032532, |
| "learning_rate": 0.0001, |
| "loss": 1.6731, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.12089752176825184, |
| "grad_norm": 0.47455278038978577, |
| "learning_rate": 0.0001, |
| "loss": 1.6163, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.12123241795043536, |
| "grad_norm": 0.4317326247692108, |
| "learning_rate": 0.0001, |
| "loss": 1.4407, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.12156731413261888, |
| "grad_norm": 0.4892672300338745, |
| "learning_rate": 0.0001, |
| "loss": 1.6077, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.12190221031480242, |
| "grad_norm": 0.44915369153022766, |
| "learning_rate": 0.0001, |
| "loss": 1.5941, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.12223710649698594, |
| "grad_norm": 0.45726680755615234, |
| "learning_rate": 0.0001, |
| "loss": 1.588, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.12257200267916946, |
| "grad_norm": 0.4513232409954071, |
| "learning_rate": 0.0001, |
| "loss": 1.6027, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.12290689886135298, |
| "grad_norm": 0.473565012216568, |
| "learning_rate": 0.0001, |
| "loss": 1.6643, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.12324179504353651, |
| "grad_norm": 0.48082560300827026, |
| "learning_rate": 0.0001, |
| "loss": 1.5707, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.12357669122572003, |
| "grad_norm": 0.44344571232795715, |
| "learning_rate": 0.0001, |
| "loss": 1.5762, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.12391158740790355, |
| "grad_norm": 0.4536781311035156, |
| "learning_rate": 0.0001, |
| "loss": 1.6305, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.12424648359008707, |
| "grad_norm": 0.4507012367248535, |
| "learning_rate": 0.0001, |
| "loss": 1.6478, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.1245813797722706, |
| "grad_norm": 0.4527163505554199, |
| "learning_rate": 0.0001, |
| "loss": 1.5439, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.12491627595445412, |
| "grad_norm": 0.45644611120224, |
| "learning_rate": 0.0001, |
| "loss": 1.6578, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.12525117213663764, |
| "grad_norm": 0.48362550139427185, |
| "learning_rate": 0.0001, |
| "loss": 1.6283, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.12558606831882116, |
| "grad_norm": 0.4587193429470062, |
| "learning_rate": 0.0001, |
| "loss": 1.604, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.12592096450100468, |
| "grad_norm": 0.5075493454933167, |
| "learning_rate": 0.0001, |
| "loss": 1.653, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.1262558606831882, |
| "grad_norm": 0.4548470675945282, |
| "learning_rate": 0.0001, |
| "loss": 1.6488, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.12659075686537175, |
| "grad_norm": 0.49109938740730286, |
| "learning_rate": 0.0001, |
| "loss": 1.5358, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.12692565304755526, |
| "grad_norm": 0.4333401322364807, |
| "learning_rate": 0.0001, |
| "loss": 1.5494, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.12726054922973878, |
| "grad_norm": 0.4399802088737488, |
| "learning_rate": 0.0001, |
| "loss": 1.4847, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.1275954454119223, |
| "grad_norm": 0.467268168926239, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.12793034159410582, |
| "grad_norm": 0.4567405581474304, |
| "learning_rate": 0.0001, |
| "loss": 1.5626, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.12826523777628934, |
| "grad_norm": 0.4765430688858032, |
| "learning_rate": 0.0001, |
| "loss": 1.6036, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.12860013395847286, |
| "grad_norm": 0.46208590269088745, |
| "learning_rate": 0.0001, |
| "loss": 1.6427, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.1289350301406564, |
| "grad_norm": 0.47394025325775146, |
| "learning_rate": 0.0001, |
| "loss": 1.562, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.12926992632283993, |
| "grad_norm": 0.4401998221874237, |
| "learning_rate": 0.0001, |
| "loss": 1.6174, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.12960482250502345, |
| "grad_norm": 0.4623987078666687, |
| "learning_rate": 0.0001, |
| "loss": 1.5635, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.12993971868720697, |
| "grad_norm": 0.459249883890152, |
| "learning_rate": 0.0001, |
| "loss": 1.6743, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.1302746148693905, |
| "grad_norm": 0.4397091567516327, |
| "learning_rate": 0.0001, |
| "loss": 1.5132, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.130609511051574, |
| "grad_norm": 0.4568580687046051, |
| "learning_rate": 0.0001, |
| "loss": 1.5375, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.13094440723375753, |
| "grad_norm": 0.42559221386909485, |
| "learning_rate": 0.0001, |
| "loss": 1.5235, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.13127930341594105, |
| "grad_norm": 0.4833087921142578, |
| "learning_rate": 0.0001, |
| "loss": 1.6442, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.1316141995981246, |
| "grad_norm": 0.43460625410079956, |
| "learning_rate": 0.0001, |
| "loss": 1.5806, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.1319490957803081, |
| "grad_norm": 0.44238537549972534, |
| "learning_rate": 0.0001, |
| "loss": 1.5061, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.13228399196249163, |
| "grad_norm": 0.4920506775379181, |
| "learning_rate": 0.0001, |
| "loss": 1.7213, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.13261888814467515, |
| "grad_norm": 0.4509977102279663, |
| "learning_rate": 0.0001, |
| "loss": 1.611, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.13295378432685867, |
| "grad_norm": 0.47244614362716675, |
| "learning_rate": 0.0001, |
| "loss": 1.6666, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.1332886805090422, |
| "grad_norm": 0.4667363166809082, |
| "learning_rate": 0.0001, |
| "loss": 1.5529, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.1336235766912257, |
| "grad_norm": 0.4608834683895111, |
| "learning_rate": 0.0001, |
| "loss": 1.6098, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.13395847287340926, |
| "grad_norm": 0.431717187166214, |
| "learning_rate": 0.0001, |
| "loss": 1.5438, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.13429336905559278, |
| "grad_norm": 0.5081926584243774, |
| "learning_rate": 0.0001, |
| "loss": 1.7707, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.1346282652377763, |
| "grad_norm": 0.4610143005847931, |
| "learning_rate": 0.0001, |
| "loss": 1.6355, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.13496316141995982, |
| "grad_norm": 0.4362616539001465, |
| "learning_rate": 0.0001, |
| "loss": 1.5324, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.13529805760214333, |
| "grad_norm": 0.4362455904483795, |
| "learning_rate": 0.0001, |
| "loss": 1.6054, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.13563295378432685, |
| "grad_norm": 0.443097323179245, |
| "learning_rate": 0.0001, |
| "loss": 1.5388, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.13596784996651037, |
| "grad_norm": 0.4276910424232483, |
| "learning_rate": 0.0001, |
| "loss": 1.501, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.1363027461486939, |
| "grad_norm": 0.4446798861026764, |
| "learning_rate": 0.0001, |
| "loss": 1.5247, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.13663764233087744, |
| "grad_norm": 0.4959389567375183, |
| "learning_rate": 0.0001, |
| "loss": 1.6415, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.13697253851306096, |
| "grad_norm": 0.4616144001483917, |
| "learning_rate": 0.0001, |
| "loss": 1.6574, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.13730743469524448, |
| "grad_norm": 0.46740710735321045, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.137642330877428, |
| "grad_norm": 0.43289265036582947, |
| "learning_rate": 0.0001, |
| "loss": 1.528, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.13797722705961152, |
| "grad_norm": 0.4461548924446106, |
| "learning_rate": 0.0001, |
| "loss": 1.5542, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.13831212324179504, |
| "grad_norm": 0.4627973139286041, |
| "learning_rate": 0.0001, |
| "loss": 1.6688, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.13864701942397856, |
| "grad_norm": 0.4514913856983185, |
| "learning_rate": 0.0001, |
| "loss": 1.5679, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.13898191560616208, |
| "grad_norm": 0.4539605975151062, |
| "learning_rate": 0.0001, |
| "loss": 1.5908, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.13931681178834562, |
| "grad_norm": 0.435198038816452, |
| "learning_rate": 0.0001, |
| "loss": 1.5868, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.13965170797052914, |
| "grad_norm": 0.47620803117752075, |
| "learning_rate": 0.0001, |
| "loss": 1.537, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.13998660415271266, |
| "grad_norm": 0.4514625668525696, |
| "learning_rate": 0.0001, |
| "loss": 1.6141, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.14032150033489618, |
| "grad_norm": 0.49498939514160156, |
| "learning_rate": 0.0001, |
| "loss": 1.5555, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.1406563965170797, |
| "grad_norm": 0.44335290789604187, |
| "learning_rate": 0.0001, |
| "loss": 1.5191, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.14099129269926322, |
| "grad_norm": 0.49824535846710205, |
| "learning_rate": 0.0001, |
| "loss": 1.6201, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.14132618888144674, |
| "grad_norm": 0.4429181218147278, |
| "learning_rate": 0.0001, |
| "loss": 1.6018, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.1416610850636303, |
| "grad_norm": 0.4757002294063568, |
| "learning_rate": 0.0001, |
| "loss": 1.5436, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.1419959812458138, |
| "grad_norm": 0.4324745237827301, |
| "learning_rate": 0.0001, |
| "loss": 1.5126, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.14233087742799733, |
| "grad_norm": 0.4601553678512573, |
| "learning_rate": 0.0001, |
| "loss": 1.6138, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.14266577361018085, |
| "grad_norm": 0.4711107611656189, |
| "learning_rate": 0.0001, |
| "loss": 1.5933, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.14300066979236437, |
| "grad_norm": 0.4817027747631073, |
| "learning_rate": 0.0001, |
| "loss": 1.5768, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.14333556597454788, |
| "grad_norm": 0.44300511479377747, |
| "learning_rate": 0.0001, |
| "loss": 1.553, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.1436704621567314, |
| "grad_norm": 0.4497964084148407, |
| "learning_rate": 0.0001, |
| "loss": 1.61, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.14400535833891492, |
| "grad_norm": 0.4525448977947235, |
| "learning_rate": 0.0001, |
| "loss": 1.5809, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.14434025452109847, |
| "grad_norm": 0.45168110728263855, |
| "learning_rate": 0.0001, |
| "loss": 1.6471, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.144675150703282, |
| "grad_norm": 0.4560868740081787, |
| "learning_rate": 0.0001, |
| "loss": 1.5933, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.1450100468854655, |
| "grad_norm": 0.4391075670719147, |
| "learning_rate": 0.0001, |
| "loss": 1.5445, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.14534494306764903, |
| "grad_norm": 0.44584378600120544, |
| "learning_rate": 0.0001, |
| "loss": 1.587, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.14567983924983255, |
| "grad_norm": 0.4544006288051605, |
| "learning_rate": 0.0001, |
| "loss": 1.5944, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.14601473543201607, |
| "grad_norm": 0.41910088062286377, |
| "learning_rate": 0.0001, |
| "loss": 1.5533, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.1463496316141996, |
| "grad_norm": 0.4487232565879822, |
| "learning_rate": 0.0001, |
| "loss": 1.5474, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.14668452779638314, |
| "grad_norm": 0.44522252678871155, |
| "learning_rate": 0.0001, |
| "loss": 1.5708, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.14701942397856665, |
| "grad_norm": 0.4506109058856964, |
| "learning_rate": 0.0001, |
| "loss": 1.6352, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.14735432016075017, |
| "grad_norm": 0.49480026960372925, |
| "learning_rate": 0.0001, |
| "loss": 1.5882, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.1476892163429337, |
| "grad_norm": 0.45891493558883667, |
| "learning_rate": 0.0001, |
| "loss": 1.5441, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.1480241125251172, |
| "grad_norm": 0.46245646476745605, |
| "learning_rate": 0.0001, |
| "loss": 1.5591, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.14835900870730073, |
| "grad_norm": 0.44219061732292175, |
| "learning_rate": 0.0001, |
| "loss": 1.5168, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.14869390488948425, |
| "grad_norm": 0.4645799398422241, |
| "learning_rate": 0.0001, |
| "loss": 1.6928, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.14902880107166777, |
| "grad_norm": 0.48340848088264465, |
| "learning_rate": 0.0001, |
| "loss": 1.6647, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.14936369725385132, |
| "grad_norm": 0.45820561051368713, |
| "learning_rate": 0.0001, |
| "loss": 1.6125, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.14969859343603484, |
| "grad_norm": 0.46245497465133667, |
| "learning_rate": 0.0001, |
| "loss": 1.5606, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.15003348961821836, |
| "grad_norm": 0.4639655351638794, |
| "learning_rate": 0.0001, |
| "loss": 1.62, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.15036838580040188, |
| "grad_norm": 0.43300825357437134, |
| "learning_rate": 0.0001, |
| "loss": 1.5333, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.1507032819825854, |
| "grad_norm": 0.44144105911254883, |
| "learning_rate": 0.0001, |
| "loss": 1.6079, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.15103817816476892, |
| "grad_norm": 0.45080748200416565, |
| "learning_rate": 0.0001, |
| "loss": 1.662, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.15137307434695244, |
| "grad_norm": 0.4435412287712097, |
| "learning_rate": 0.0001, |
| "loss": 1.5376, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.15170797052913595, |
| "grad_norm": 0.4712364077568054, |
| "learning_rate": 0.0001, |
| "loss": 1.5347, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.1520428667113195, |
| "grad_norm": 0.6480394601821899, |
| "learning_rate": 0.0001, |
| "loss": 1.536, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.15237776289350302, |
| "grad_norm": 0.4520147740840912, |
| "learning_rate": 0.0001, |
| "loss": 1.5105, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.15271265907568654, |
| "grad_norm": 0.47112977504730225, |
| "learning_rate": 0.0001, |
| "loss": 1.5844, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.15304755525787006, |
| "grad_norm": 0.42187485098838806, |
| "learning_rate": 0.0001, |
| "loss": 1.4871, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.15338245144005358, |
| "grad_norm": 0.466097891330719, |
| "learning_rate": 0.0001, |
| "loss": 1.656, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.1537173476222371, |
| "grad_norm": 0.48392733931541443, |
| "learning_rate": 0.0001, |
| "loss": 1.7219, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.15405224380442062, |
| "grad_norm": 0.45683392882347107, |
| "learning_rate": 0.0001, |
| "loss": 1.673, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.15438713998660417, |
| "grad_norm": 0.4448898434638977, |
| "learning_rate": 0.0001, |
| "loss": 1.5867, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.15472203616878769, |
| "grad_norm": 0.44210925698280334, |
| "learning_rate": 0.0001, |
| "loss": 1.5485, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.1550569323509712, |
| "grad_norm": 0.4557957649230957, |
| "learning_rate": 0.0001, |
| "loss": 1.6391, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.15539182853315472, |
| "grad_norm": 0.44259247183799744, |
| "learning_rate": 0.0001, |
| "loss": 1.6136, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.15572672471533824, |
| "grad_norm": 0.4970155954360962, |
| "learning_rate": 0.0001, |
| "loss": 1.5398, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.15606162089752176, |
| "grad_norm": 0.4889807403087616, |
| "learning_rate": 0.0001, |
| "loss": 1.6689, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.15639651707970528, |
| "grad_norm": 0.4557911157608032, |
| "learning_rate": 0.0001, |
| "loss": 1.5573, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.1567314132618888, |
| "grad_norm": 0.4632352292537689, |
| "learning_rate": 0.0001, |
| "loss": 1.5766, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.15706630944407235, |
| "grad_norm": 0.4403630793094635, |
| "learning_rate": 0.0001, |
| "loss": 1.6143, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.15740120562625587, |
| "grad_norm": 0.4789538085460663, |
| "learning_rate": 0.0001, |
| "loss": 1.6662, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.1577361018084394, |
| "grad_norm": 0.46481096744537354, |
| "learning_rate": 0.0001, |
| "loss": 1.629, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.1580709979906229, |
| "grad_norm": 0.47119757533073425, |
| "learning_rate": 0.0001, |
| "loss": 1.5943, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.15840589417280643, |
| "grad_norm": 0.4695896804332733, |
| "learning_rate": 0.0001, |
| "loss": 1.6441, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.15874079035498995, |
| "grad_norm": 0.46595636010169983, |
| "learning_rate": 0.0001, |
| "loss": 1.6329, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.15907568653717347, |
| "grad_norm": 0.43559157848358154, |
| "learning_rate": 0.0001, |
| "loss": 1.5618, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.15941058271935699, |
| "grad_norm": 0.4306503236293793, |
| "learning_rate": 0.0001, |
| "loss": 1.5752, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.15974547890154053, |
| "grad_norm": 0.4469747543334961, |
| "learning_rate": 0.0001, |
| "loss": 1.6192, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.16008037508372405, |
| "grad_norm": 0.43521738052368164, |
| "learning_rate": 0.0001, |
| "loss": 1.5409, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.16041527126590757, |
| "grad_norm": 0.44318079948425293, |
| "learning_rate": 0.0001, |
| "loss": 1.5524, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.1607501674480911, |
| "grad_norm": 0.42884916067123413, |
| "learning_rate": 0.0001, |
| "loss": 1.5466, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.1610850636302746, |
| "grad_norm": 0.44447648525238037, |
| "learning_rate": 0.0001, |
| "loss": 1.578, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.16141995981245813, |
| "grad_norm": 0.45701852440834045, |
| "learning_rate": 0.0001, |
| "loss": 1.6444, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.16175485599464165, |
| "grad_norm": 0.44687342643737793, |
| "learning_rate": 0.0001, |
| "loss": 1.6858, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.1620897521768252, |
| "grad_norm": 0.41851550340652466, |
| "learning_rate": 0.0001, |
| "loss": 1.4588, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.16242464835900872, |
| "grad_norm": 0.4491201937198639, |
| "learning_rate": 0.0001, |
| "loss": 1.5753, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.16275954454119224, |
| "grad_norm": 0.4423746168613434, |
| "learning_rate": 0.0001, |
| "loss": 1.5222, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.16309444072337576, |
| "grad_norm": 0.44861212372779846, |
| "learning_rate": 0.0001, |
| "loss": 1.6327, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.16342933690555927, |
| "grad_norm": 0.4326763451099396, |
| "learning_rate": 0.0001, |
| "loss": 1.5405, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.1637642330877428, |
| "grad_norm": 0.43661123514175415, |
| "learning_rate": 0.0001, |
| "loss": 1.52, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.1640991292699263, |
| "grad_norm": 0.4316999614238739, |
| "learning_rate": 0.0001, |
| "loss": 1.516, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.16443402545210983, |
| "grad_norm": 0.4735428988933563, |
| "learning_rate": 0.0001, |
| "loss": 1.6493, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.16476892163429338, |
| "grad_norm": 0.4458985924720764, |
| "learning_rate": 0.0001, |
| "loss": 1.5783, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.1651038178164769, |
| "grad_norm": 0.46412917971611023, |
| "learning_rate": 0.0001, |
| "loss": 1.6794, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.16543871399866042, |
| "grad_norm": 0.4410945773124695, |
| "learning_rate": 0.0001, |
| "loss": 1.6956, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.16577361018084394, |
| "grad_norm": 0.44671037793159485, |
| "learning_rate": 0.0001, |
| "loss": 1.6173, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.16610850636302746, |
| "grad_norm": 0.44258755445480347, |
| "learning_rate": 0.0001, |
| "loss": 1.6461, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.16644340254521098, |
| "grad_norm": 0.4786889851093292, |
| "learning_rate": 0.0001, |
| "loss": 1.4906, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.1667782987273945, |
| "grad_norm": 0.4585178792476654, |
| "learning_rate": 0.0001, |
| "loss": 1.6344, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.16711319490957804, |
| "grad_norm": 0.44208812713623047, |
| "learning_rate": 0.0001, |
| "loss": 1.5837, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.16744809109176156, |
| "grad_norm": 0.4394063651561737, |
| "learning_rate": 0.0001, |
| "loss": 1.4863, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.16778298727394508, |
| "grad_norm": 0.4426101744174957, |
| "learning_rate": 0.0001, |
| "loss": 1.5901, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.1681178834561286, |
| "grad_norm": 0.45466336607933044, |
| "learning_rate": 0.0001, |
| "loss": 1.5928, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.16845277963831212, |
| "grad_norm": 0.44412124156951904, |
| "learning_rate": 0.0001, |
| "loss": 1.6238, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.16878767582049564, |
| "grad_norm": 0.45599251985549927, |
| "learning_rate": 0.0001, |
| "loss": 1.6092, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.16912257200267916, |
| "grad_norm": 0.4290856719017029, |
| "learning_rate": 0.0001, |
| "loss": 1.5464, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.16945746818486268, |
| "grad_norm": 0.45510977506637573, |
| "learning_rate": 0.0001, |
| "loss": 1.5688, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.16979236436704623, |
| "grad_norm": 0.44688305258750916, |
| "learning_rate": 0.0001, |
| "loss": 1.5343, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.17012726054922975, |
| "grad_norm": 0.4349426031112671, |
| "learning_rate": 0.0001, |
| "loss": 1.6256, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.17046215673141327, |
| "grad_norm": 0.45415475964546204, |
| "learning_rate": 0.0001, |
| "loss": 1.643, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.1707970529135968, |
| "grad_norm": 0.4510926604270935, |
| "learning_rate": 0.0001, |
| "loss": 1.5533, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.1711319490957803, |
| "grad_norm": 0.43210574984550476, |
| "learning_rate": 0.0001, |
| "loss": 1.5273, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.17146684527796383, |
| "grad_norm": 0.49950575828552246, |
| "learning_rate": 0.0001, |
| "loss": 1.6401, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.17180174146014734, |
| "grad_norm": 0.4558428227901459, |
| "learning_rate": 0.0001, |
| "loss": 1.5263, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.17213663764233086, |
| "grad_norm": 0.46560290455818176, |
| "learning_rate": 0.0001, |
| "loss": 1.6968, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.1724715338245144, |
| "grad_norm": 0.4596928656101227, |
| "learning_rate": 0.0001, |
| "loss": 1.6222, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.17280643000669793, |
| "grad_norm": 0.43341562151908875, |
| "learning_rate": 0.0001, |
| "loss": 1.5597, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.17314132618888145, |
| "grad_norm": 0.4746050536632538, |
| "learning_rate": 0.0001, |
| "loss": 1.5563, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.17347622237106497, |
| "grad_norm": 0.44967103004455566, |
| "learning_rate": 0.0001, |
| "loss": 1.5534, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.1738111185532485, |
| "grad_norm": 0.43267977237701416, |
| "learning_rate": 0.0001, |
| "loss": 1.4994, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.174146014735432, |
| "grad_norm": 0.45056331157684326, |
| "learning_rate": 0.0001, |
| "loss": 1.5279, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.17448091091761553, |
| "grad_norm": 0.4300089478492737, |
| "learning_rate": 0.0001, |
| "loss": 1.5114, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.17481580709979908, |
| "grad_norm": 0.48487889766693115, |
| "learning_rate": 0.0001, |
| "loss": 1.5379, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.1751507032819826, |
| "grad_norm": 0.4652538299560547, |
| "learning_rate": 0.0001, |
| "loss": 1.5751, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.17548559946416611, |
| "grad_norm": 0.5223555564880371, |
| "learning_rate": 0.0001, |
| "loss": 1.6954, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.17582049564634963, |
| "grad_norm": 0.4837857186794281, |
| "learning_rate": 0.0001, |
| "loss": 1.6461, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.17615539182853315, |
| "grad_norm": 0.4681553542613983, |
| "learning_rate": 0.0001, |
| "loss": 1.5406, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.17649028801071667, |
| "grad_norm": 0.44726818799972534, |
| "learning_rate": 0.0001, |
| "loss": 1.5826, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.1768251841929002, |
| "grad_norm": 0.4904516637325287, |
| "learning_rate": 0.0001, |
| "loss": 1.6685, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.1771600803750837, |
| "grad_norm": 0.4621741771697998, |
| "learning_rate": 0.0001, |
| "loss": 1.6141, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.17749497655726726, |
| "grad_norm": 0.46151936054229736, |
| "learning_rate": 0.0001, |
| "loss": 1.497, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.17782987273945078, |
| "grad_norm": 0.4495275914669037, |
| "learning_rate": 0.0001, |
| "loss": 1.5489, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.1781647689216343, |
| "grad_norm": 0.44468608498573303, |
| "learning_rate": 0.0001, |
| "loss": 1.5458, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.17849966510381782, |
| "grad_norm": 0.4433906078338623, |
| "learning_rate": 0.0001, |
| "loss": 1.6035, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.17883456128600134, |
| "grad_norm": 0.4223592281341553, |
| "learning_rate": 0.0001, |
| "loss": 1.5338, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.17916945746818486, |
| "grad_norm": 0.46216294169425964, |
| "learning_rate": 0.0001, |
| "loss": 1.6656, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.17950435365036838, |
| "grad_norm": 0.46588945388793945, |
| "learning_rate": 0.0001, |
| "loss": 1.5895, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.1798392498325519, |
| "grad_norm": 0.44910311698913574, |
| "learning_rate": 0.0001, |
| "loss": 1.6307, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.18017414601473544, |
| "grad_norm": 0.4371938407421112, |
| "learning_rate": 0.0001, |
| "loss": 1.5129, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.18050904219691896, |
| "grad_norm": 0.4528622031211853, |
| "learning_rate": 0.0001, |
| "loss": 1.6958, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.18084393837910248, |
| "grad_norm": 0.46079522371292114, |
| "learning_rate": 0.0001, |
| "loss": 1.642, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.181178834561286, |
| "grad_norm": 0.4528695344924927, |
| "learning_rate": 0.0001, |
| "loss": 1.5441, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.18151373074346952, |
| "grad_norm": 0.44676560163497925, |
| "learning_rate": 0.0001, |
| "loss": 1.548, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.18184862692565304, |
| "grad_norm": 0.42788952589035034, |
| "learning_rate": 0.0001, |
| "loss": 1.5221, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.18218352310783656, |
| "grad_norm": 0.4401197135448456, |
| "learning_rate": 0.0001, |
| "loss": 1.5169, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.1825184192900201, |
| "grad_norm": 0.4610879719257355, |
| "learning_rate": 0.0001, |
| "loss": 1.6219, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.18285331547220363, |
| "grad_norm": 0.45374858379364014, |
| "learning_rate": 0.0001, |
| "loss": 1.5726, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.18318821165438715, |
| "grad_norm": 0.45622724294662476, |
| "learning_rate": 0.0001, |
| "loss": 1.6732, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.18352310783657066, |
| "grad_norm": 0.45034128427505493, |
| "learning_rate": 0.0001, |
| "loss": 1.6309, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.18385800401875418, |
| "grad_norm": 0.46395254135131836, |
| "learning_rate": 0.0001, |
| "loss": 1.5214, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.1841929002009377, |
| "grad_norm": 0.44303593039512634, |
| "learning_rate": 0.0001, |
| "loss": 1.546, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.18452779638312122, |
| "grad_norm": 0.4510901868343353, |
| "learning_rate": 0.0001, |
| "loss": 1.5942, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.18486269256530474, |
| "grad_norm": 0.44481050968170166, |
| "learning_rate": 0.0001, |
| "loss": 1.5295, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.1851975887474883, |
| "grad_norm": 0.46099281311035156, |
| "learning_rate": 0.0001, |
| "loss": 1.5907, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.1855324849296718, |
| "grad_norm": 0.4374411106109619, |
| "learning_rate": 0.0001, |
| "loss": 1.5278, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.18586738111185533, |
| "grad_norm": 0.4573977291584015, |
| "learning_rate": 0.0001, |
| "loss": 1.4811, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.18620227729403885, |
| "grad_norm": 0.4958246052265167, |
| "learning_rate": 0.0001, |
| "loss": 1.6173, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.18653717347622237, |
| "grad_norm": 0.44467490911483765, |
| "learning_rate": 0.0001, |
| "loss": 1.5038, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.1868720696584059, |
| "grad_norm": 0.45773619413375854, |
| "learning_rate": 0.0001, |
| "loss": 1.5535, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.1872069658405894, |
| "grad_norm": 0.47369417548179626, |
| "learning_rate": 0.0001, |
| "loss": 1.6407, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.18754186202277295, |
| "grad_norm": 0.4539751708507538, |
| "learning_rate": 0.0001, |
| "loss": 1.5265, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.18787675820495647, |
| "grad_norm": 0.45979294180870056, |
| "learning_rate": 0.0001, |
| "loss": 1.7121, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.18821165438714, |
| "grad_norm": 0.4300471544265747, |
| "learning_rate": 0.0001, |
| "loss": 1.5071, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.1885465505693235, |
| "grad_norm": 0.41900163888931274, |
| "learning_rate": 0.0001, |
| "loss": 1.5415, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.18888144675150703, |
| "grad_norm": 0.4393864870071411, |
| "learning_rate": 0.0001, |
| "loss": 1.5997, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.18921634293369055, |
| "grad_norm": 0.42667317390441895, |
| "learning_rate": 0.0001, |
| "loss": 1.5435, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.18955123911587407, |
| "grad_norm": 0.4409005045890808, |
| "learning_rate": 0.0001, |
| "loss": 1.6134, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.1898861352980576, |
| "grad_norm": 0.4696614444255829, |
| "learning_rate": 0.0001, |
| "loss": 1.6321, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.19022103148024114, |
| "grad_norm": 0.4674033522605896, |
| "learning_rate": 0.0001, |
| "loss": 1.5435, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.19055592766242466, |
| "grad_norm": 0.46117159724235535, |
| "learning_rate": 0.0001, |
| "loss": 1.5953, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.19089082384460818, |
| "grad_norm": 0.48608630895614624, |
| "learning_rate": 0.0001, |
| "loss": 1.5979, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.1912257200267917, |
| "grad_norm": 0.4258008897304535, |
| "learning_rate": 0.0001, |
| "loss": 1.5425, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.19156061620897522, |
| "grad_norm": 0.4251329302787781, |
| "learning_rate": 0.0001, |
| "loss": 1.4931, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.19189551239115873, |
| "grad_norm": 0.4929272532463074, |
| "learning_rate": 0.0001, |
| "loss": 1.7021, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.19223040857334225, |
| "grad_norm": 0.4487670958042145, |
| "learning_rate": 0.0001, |
| "loss": 1.6679, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.19256530475552577, |
| "grad_norm": 0.4457535147666931, |
| "learning_rate": 0.0001, |
| "loss": 1.6178, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.19290020093770932, |
| "grad_norm": 0.42327451705932617, |
| "learning_rate": 0.0001, |
| "loss": 1.5191, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.19323509711989284, |
| "grad_norm": 0.43987563252449036, |
| "learning_rate": 0.0001, |
| "loss": 1.6082, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.19356999330207636, |
| "grad_norm": 0.43453678488731384, |
| "learning_rate": 0.0001, |
| "loss": 1.561, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.19390488948425988, |
| "grad_norm": 0.4256730377674103, |
| "learning_rate": 0.0001, |
| "loss": 1.5515, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.1942397856664434, |
| "grad_norm": 0.4502320885658264, |
| "learning_rate": 0.0001, |
| "loss": 1.6113, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.19457468184862692, |
| "grad_norm": 0.444354772567749, |
| "learning_rate": 0.0001, |
| "loss": 1.5668, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.19490957803081044, |
| "grad_norm": 0.43178266286849976, |
| "learning_rate": 0.0001, |
| "loss": 1.5433, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.19524447421299398, |
| "grad_norm": 0.4282374382019043, |
| "learning_rate": 0.0001, |
| "loss": 1.5646, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.1955793703951775, |
| "grad_norm": 0.46302083134651184, |
| "learning_rate": 0.0001, |
| "loss": 1.547, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.19591426657736102, |
| "grad_norm": 0.4419693052768707, |
| "learning_rate": 0.0001, |
| "loss": 1.6002, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.19624916275954454, |
| "grad_norm": 0.4477298855781555, |
| "learning_rate": 0.0001, |
| "loss": 1.6908, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.19658405894172806, |
| "grad_norm": 0.44686245918273926, |
| "learning_rate": 0.0001, |
| "loss": 1.541, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.19691895512391158, |
| "grad_norm": 0.44238075613975525, |
| "learning_rate": 0.0001, |
| "loss": 1.6098, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.1972538513060951, |
| "grad_norm": 0.44104936718940735, |
| "learning_rate": 0.0001, |
| "loss": 1.5786, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.19758874748827862, |
| "grad_norm": 0.44523486495018005, |
| "learning_rate": 0.0001, |
| "loss": 1.5821, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.19792364367046217, |
| "grad_norm": 0.4347565770149231, |
| "learning_rate": 0.0001, |
| "loss": 1.531, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.1982585398526457, |
| "grad_norm": 0.4408382177352905, |
| "learning_rate": 0.0001, |
| "loss": 1.522, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.1985934360348292, |
| "grad_norm": 0.45242637395858765, |
| "learning_rate": 0.0001, |
| "loss": 1.5678, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.19892833221701273, |
| "grad_norm": 0.42536839842796326, |
| "learning_rate": 0.0001, |
| "loss": 1.389, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.19926322839919625, |
| "grad_norm": 0.4437471628189087, |
| "learning_rate": 0.0001, |
| "loss": 1.5923, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.19959812458137977, |
| "grad_norm": 0.46893227100372314, |
| "learning_rate": 0.0001, |
| "loss": 1.7063, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.19993302076356329, |
| "grad_norm": 0.46792319416999817, |
| "learning_rate": 0.0001, |
| "loss": 1.6279, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.20026791694574683, |
| "grad_norm": 0.43383094668388367, |
| "learning_rate": 0.0001, |
| "loss": 1.5666, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.20060281312793035, |
| "grad_norm": 0.4612673819065094, |
| "learning_rate": 0.0001, |
| "loss": 1.657, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.20093770931011387, |
| "grad_norm": 0.45714735984802246, |
| "learning_rate": 0.0001, |
| "loss": 1.5166, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.2012726054922974, |
| "grad_norm": 0.4980306625366211, |
| "learning_rate": 0.0001, |
| "loss": 1.6338, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.2016075016744809, |
| "grad_norm": 0.4374399483203888, |
| "learning_rate": 0.0001, |
| "loss": 1.4887, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.20194239785666443, |
| "grad_norm": 0.4663514792919159, |
| "learning_rate": 0.0001, |
| "loss": 1.583, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.20227729403884795, |
| "grad_norm": 0.4445118308067322, |
| "learning_rate": 0.0001, |
| "loss": 1.5925, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.20261219022103147, |
| "grad_norm": 0.47844797372817993, |
| "learning_rate": 0.0001, |
| "loss": 1.6224, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.20294708640321502, |
| "grad_norm": 0.43679067492485046, |
| "learning_rate": 0.0001, |
| "loss": 1.5509, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.20328198258539854, |
| "grad_norm": 0.4718396067619324, |
| "learning_rate": 0.0001, |
| "loss": 1.5242, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.20361687876758205, |
| "grad_norm": 0.43233776092529297, |
| "learning_rate": 0.0001, |
| "loss": 1.5069, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.20395177494976557, |
| "grad_norm": 0.45062628388404846, |
| "learning_rate": 0.0001, |
| "loss": 1.5507, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.2042866711319491, |
| "grad_norm": 0.43045535683631897, |
| "learning_rate": 0.0001, |
| "loss": 1.4764, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.2046215673141326, |
| "grad_norm": 0.4338122010231018, |
| "learning_rate": 0.0001, |
| "loss": 1.4395, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.20495646349631613, |
| "grad_norm": 0.4495809078216553, |
| "learning_rate": 0.0001, |
| "loss": 1.5663, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.20529135967849965, |
| "grad_norm": 0.4504551291465759, |
| "learning_rate": 0.0001, |
| "loss": 1.5791, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.2056262558606832, |
| "grad_norm": 0.4504256546497345, |
| "learning_rate": 0.0001, |
| "loss": 1.5453, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.20596115204286672, |
| "grad_norm": 0.44566693902015686, |
| "learning_rate": 0.0001, |
| "loss": 1.4733, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.20629604822505024, |
| "grad_norm": 0.45763394236564636, |
| "learning_rate": 0.0001, |
| "loss": 1.5266, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.20663094440723376, |
| "grad_norm": 0.4807845652103424, |
| "learning_rate": 0.0001, |
| "loss": 1.5362, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.20696584058941728, |
| "grad_norm": 0.44216951727867126, |
| "learning_rate": 0.0001, |
| "loss": 1.6271, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.2073007367716008, |
| "grad_norm": 0.4926336109638214, |
| "learning_rate": 0.0001, |
| "loss": 1.5567, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.20763563295378432, |
| "grad_norm": 0.4597328305244446, |
| "learning_rate": 0.0001, |
| "loss": 1.5826, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.20797052913596786, |
| "grad_norm": 0.5120271444320679, |
| "learning_rate": 0.0001, |
| "loss": 1.4821, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.20830542531815138, |
| "grad_norm": 0.44934895634651184, |
| "learning_rate": 0.0001, |
| "loss": 1.6378, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.2086403215003349, |
| "grad_norm": 0.5311879515647888, |
| "learning_rate": 0.0001, |
| "loss": 1.6859, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.20897521768251842, |
| "grad_norm": 0.4356757402420044, |
| "learning_rate": 0.0001, |
| "loss": 1.5312, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.20931011386470194, |
| "grad_norm": 0.4556974470615387, |
| "learning_rate": 0.0001, |
| "loss": 1.5898, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.20964501004688546, |
| "grad_norm": 0.45119380950927734, |
| "learning_rate": 0.0001, |
| "loss": 1.6319, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.20997990622906898, |
| "grad_norm": 0.44451865553855896, |
| "learning_rate": 0.0001, |
| "loss": 1.5044, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.2103148024112525, |
| "grad_norm": 0.4557413160800934, |
| "learning_rate": 0.0001, |
| "loss": 1.5661, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.21064969859343605, |
| "grad_norm": 0.45663654804229736, |
| "learning_rate": 0.0001, |
| "loss": 1.5894, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.21098459477561957, |
| "grad_norm": 0.47488924860954285, |
| "learning_rate": 0.0001, |
| "loss": 1.598, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.21131949095780309, |
| "grad_norm": 0.4461064338684082, |
| "learning_rate": 0.0001, |
| "loss": 1.4725, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.2116543871399866, |
| "grad_norm": 0.4668349027633667, |
| "learning_rate": 0.0001, |
| "loss": 1.4965, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.21198928332217012, |
| "grad_norm": 0.45846477150917053, |
| "learning_rate": 0.0001, |
| "loss": 1.5392, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.21232417950435364, |
| "grad_norm": 0.456134557723999, |
| "learning_rate": 0.0001, |
| "loss": 1.5775, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.21265907568653716, |
| "grad_norm": 0.4363207221031189, |
| "learning_rate": 0.0001, |
| "loss": 1.5577, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.21299397186872068, |
| "grad_norm": 0.4299388825893402, |
| "learning_rate": 0.0001, |
| "loss": 1.5528, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.21332886805090423, |
| "grad_norm": 0.4551451504230499, |
| "learning_rate": 0.0001, |
| "loss": 1.6204, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.21366376423308775, |
| "grad_norm": 0.459564745426178, |
| "learning_rate": 0.0001, |
| "loss": 1.5634, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.21399866041527127, |
| "grad_norm": 0.4449961483478546, |
| "learning_rate": 0.0001, |
| "loss": 1.6278, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.2143335565974548, |
| "grad_norm": 0.42216700315475464, |
| "learning_rate": 0.0001, |
| "loss": 1.5006, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.2146684527796383, |
| "grad_norm": 0.4538195729255676, |
| "learning_rate": 0.0001, |
| "loss": 1.5815, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.21500334896182183, |
| "grad_norm": 0.4515959918498993, |
| "learning_rate": 0.0001, |
| "loss": 1.5853, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.21533824514400535, |
| "grad_norm": 0.4696963429450989, |
| "learning_rate": 0.0001, |
| "loss": 1.6153, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.2156731413261889, |
| "grad_norm": 0.45178598165512085, |
| "learning_rate": 0.0001, |
| "loss": 1.6019, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.2160080375083724, |
| "grad_norm": 0.4643690884113312, |
| "learning_rate": 0.0001, |
| "loss": 1.608, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.21634293369055593, |
| "grad_norm": 0.4362979531288147, |
| "learning_rate": 0.0001, |
| "loss": 1.6127, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.21667782987273945, |
| "grad_norm": 0.4543468952178955, |
| "learning_rate": 0.0001, |
| "loss": 1.6253, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.21701272605492297, |
| "grad_norm": 0.44180887937545776, |
| "learning_rate": 0.0001, |
| "loss": 1.5105, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.2173476222371065, |
| "grad_norm": 0.45792412757873535, |
| "learning_rate": 0.0001, |
| "loss": 1.6662, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.21768251841929, |
| "grad_norm": 0.4656217694282532, |
| "learning_rate": 0.0001, |
| "loss": 1.7252, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.21801741460147353, |
| "grad_norm": 0.44460582733154297, |
| "learning_rate": 0.0001, |
| "loss": 1.5543, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.21835231078365708, |
| "grad_norm": 0.45652541518211365, |
| "learning_rate": 0.0001, |
| "loss": 1.5782, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.2186872069658406, |
| "grad_norm": 0.449512779712677, |
| "learning_rate": 0.0001, |
| "loss": 1.5156, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.21902210314802412, |
| "grad_norm": 0.4760141670703888, |
| "learning_rate": 0.0001, |
| "loss": 1.5886, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.21935699933020764, |
| "grad_norm": 0.45350316166877747, |
| "learning_rate": 0.0001, |
| "loss": 1.5902, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.21969189551239116, |
| "grad_norm": 0.4670565128326416, |
| "learning_rate": 0.0001, |
| "loss": 1.509, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.22002679169457467, |
| "grad_norm": 0.462492436170578, |
| "learning_rate": 0.0001, |
| "loss": 1.5283, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.2203616878767582, |
| "grad_norm": 0.4635310173034668, |
| "learning_rate": 0.0001, |
| "loss": 1.6162, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.22069658405894174, |
| "grad_norm": 0.48554205894470215, |
| "learning_rate": 0.0001, |
| "loss": 1.5574, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.22103148024112526, |
| "grad_norm": 0.4405190050601959, |
| "learning_rate": 0.0001, |
| "loss": 1.5643, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.22136637642330878, |
| "grad_norm": 0.506333589553833, |
| "learning_rate": 0.0001, |
| "loss": 1.5499, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.2217012726054923, |
| "grad_norm": 0.4472103714942932, |
| "learning_rate": 0.0001, |
| "loss": 1.4229, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.22203616878767582, |
| "grad_norm": 0.46667394042015076, |
| "learning_rate": 0.0001, |
| "loss": 1.5524, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.22237106496985934, |
| "grad_norm": 0.4534272253513336, |
| "learning_rate": 0.0001, |
| "loss": 1.6028, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.22270596115204286, |
| "grad_norm": 0.49894607067108154, |
| "learning_rate": 0.0001, |
| "loss": 1.6009, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.22304085733422638, |
| "grad_norm": 0.4377092123031616, |
| "learning_rate": 0.0001, |
| "loss": 1.5558, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.22337575351640993, |
| "grad_norm": 0.47780877351760864, |
| "learning_rate": 0.0001, |
| "loss": 1.5691, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.22371064969859344, |
| "grad_norm": 0.4473220705986023, |
| "learning_rate": 0.0001, |
| "loss": 1.63, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.22404554588077696, |
| "grad_norm": 0.44755011796951294, |
| "learning_rate": 0.0001, |
| "loss": 1.5047, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.22438044206296048, |
| "grad_norm": 0.4666520059108734, |
| "learning_rate": 0.0001, |
| "loss": 1.5957, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.224715338245144, |
| "grad_norm": 0.4471485912799835, |
| "learning_rate": 0.0001, |
| "loss": 1.5304, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.22505023442732752, |
| "grad_norm": 0.4512856900691986, |
| "learning_rate": 0.0001, |
| "loss": 1.6551, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.22538513060951104, |
| "grad_norm": 0.4558720290660858, |
| "learning_rate": 0.0001, |
| "loss": 1.5348, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.22572002679169456, |
| "grad_norm": 0.44984716176986694, |
| "learning_rate": 0.0001, |
| "loss": 1.5811, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.2260549229738781, |
| "grad_norm": 0.4666685163974762, |
| "learning_rate": 0.0001, |
| "loss": 1.6274, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.22638981915606163, |
| "grad_norm": 0.4678918719291687, |
| "learning_rate": 0.0001, |
| "loss": 1.6061, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.22672471533824515, |
| "grad_norm": 0.4496474266052246, |
| "learning_rate": 0.0001, |
| "loss": 1.4882, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.22705961152042867, |
| "grad_norm": 0.455914169549942, |
| "learning_rate": 0.0001, |
| "loss": 1.6567, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.2273945077026122, |
| "grad_norm": 0.43080762028694153, |
| "learning_rate": 0.0001, |
| "loss": 1.4999, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.2277294038847957, |
| "grad_norm": 0.44386598467826843, |
| "learning_rate": 0.0001, |
| "loss": 1.5976, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.22806430006697923, |
| "grad_norm": 0.44304677844047546, |
| "learning_rate": 0.0001, |
| "loss": 1.5395, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.22839919624916277, |
| "grad_norm": 0.44651317596435547, |
| "learning_rate": 0.0001, |
| "loss": 1.5642, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.2287340924313463, |
| "grad_norm": 0.4273903965950012, |
| "learning_rate": 0.0001, |
| "loss": 1.4957, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.2290689886135298, |
| "grad_norm": 0.45144903659820557, |
| "learning_rate": 0.0001, |
| "loss": 1.6001, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.22940388479571333, |
| "grad_norm": 0.4881639778614044, |
| "learning_rate": 0.0001, |
| "loss": 1.5736, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.22973878097789685, |
| "grad_norm": 0.4803936183452606, |
| "learning_rate": 0.0001, |
| "loss": 1.6289, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.23007367716008037, |
| "grad_norm": 0.44501784443855286, |
| "learning_rate": 0.0001, |
| "loss": 1.5637, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.2304085733422639, |
| "grad_norm": 0.44302091002464294, |
| "learning_rate": 0.0001, |
| "loss": 1.3864, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.2307434695244474, |
| "grad_norm": 0.4551304280757904, |
| "learning_rate": 0.0001, |
| "loss": 1.606, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.23107836570663096, |
| "grad_norm": 0.5015878677368164, |
| "learning_rate": 0.0001, |
| "loss": 1.645, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.23141326188881448, |
| "grad_norm": 0.45502328872680664, |
| "learning_rate": 0.0001, |
| "loss": 1.5172, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.231748158070998, |
| "grad_norm": 0.45328280329704285, |
| "learning_rate": 0.0001, |
| "loss": 1.5835, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.23208305425318151, |
| "grad_norm": 0.44391489028930664, |
| "learning_rate": 0.0001, |
| "loss": 1.5525, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.23241795043536503, |
| "grad_norm": 0.4328315258026123, |
| "learning_rate": 0.0001, |
| "loss": 1.4947, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.23275284661754855, |
| "grad_norm": 0.4365626871585846, |
| "learning_rate": 0.0001, |
| "loss": 1.5518, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.23308774279973207, |
| "grad_norm": 0.4440605640411377, |
| "learning_rate": 0.0001, |
| "loss": 1.5446, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.23342263898191562, |
| "grad_norm": 0.4284314811229706, |
| "learning_rate": 0.0001, |
| "loss": 1.4681, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.23375753516409914, |
| "grad_norm": 0.4485720992088318, |
| "learning_rate": 0.0001, |
| "loss": 1.5829, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.23409243134628266, |
| "grad_norm": 0.45565420389175415, |
| "learning_rate": 0.0001, |
| "loss": 1.6203, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.23442732752846618, |
| "grad_norm": 0.45044267177581787, |
| "learning_rate": 0.0001, |
| "loss": 1.5372, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.2347622237106497, |
| "grad_norm": 0.46250998973846436, |
| "learning_rate": 0.0001, |
| "loss": 1.5617, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.23509711989283322, |
| "grad_norm": 0.47578829526901245, |
| "learning_rate": 0.0001, |
| "loss": 1.6007, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.23543201607501674, |
| "grad_norm": 0.4471472501754761, |
| "learning_rate": 0.0001, |
| "loss": 1.4711, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.23576691225720026, |
| "grad_norm": 0.4580185115337372, |
| "learning_rate": 0.0001, |
| "loss": 1.5707, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.2361018084393838, |
| "grad_norm": 0.4444526135921478, |
| "learning_rate": 0.0001, |
| "loss": 1.5372, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.23643670462156732, |
| "grad_norm": 0.4543907046318054, |
| "learning_rate": 0.0001, |
| "loss": 1.6054, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.23677160080375084, |
| "grad_norm": 0.4625626802444458, |
| "learning_rate": 0.0001, |
| "loss": 1.6129, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.23710649698593436, |
| "grad_norm": 0.45688140392303467, |
| "learning_rate": 0.0001, |
| "loss": 1.6426, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.23744139316811788, |
| "grad_norm": 0.4441126883029938, |
| "learning_rate": 0.0001, |
| "loss": 1.5791, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.2377762893503014, |
| "grad_norm": 0.4567664861679077, |
| "learning_rate": 0.0001, |
| "loss": 1.4215, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.23811118553248492, |
| "grad_norm": 0.47155311703681946, |
| "learning_rate": 0.0001, |
| "loss": 1.6239, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.23844608171466844, |
| "grad_norm": 0.4321557581424713, |
| "learning_rate": 0.0001, |
| "loss": 1.493, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.238780977896852, |
| "grad_norm": 0.43683552742004395, |
| "learning_rate": 0.0001, |
| "loss": 1.5824, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.2391158740790355, |
| "grad_norm": 0.4407794773578644, |
| "learning_rate": 0.0001, |
| "loss": 1.5692, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.23945077026121903, |
| "grad_norm": 0.43725642561912537, |
| "learning_rate": 0.0001, |
| "loss": 1.5868, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.23978566644340255, |
| "grad_norm": 0.4484725296497345, |
| "learning_rate": 0.0001, |
| "loss": 1.6105, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.24012056262558606, |
| "grad_norm": 0.47937703132629395, |
| "learning_rate": 0.0001, |
| "loss": 1.5828, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.24045545880776958, |
| "grad_norm": 0.45690029859542847, |
| "learning_rate": 0.0001, |
| "loss": 1.542, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.2407903549899531, |
| "grad_norm": 0.4617394208908081, |
| "learning_rate": 0.0001, |
| "loss": 1.6432, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.24112525117213665, |
| "grad_norm": 0.46813878417015076, |
| "learning_rate": 0.0001, |
| "loss": 1.5294, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.24146014735432017, |
| "grad_norm": 0.44215312600135803, |
| "learning_rate": 0.0001, |
| "loss": 1.5601, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.2417950435365037, |
| "grad_norm": 0.44908544421195984, |
| "learning_rate": 0.0001, |
| "loss": 1.5717, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.2421299397186872, |
| "grad_norm": 0.46838417649269104, |
| "learning_rate": 0.0001, |
| "loss": 1.5559, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.24246483590087073, |
| "grad_norm": 0.4515591859817505, |
| "learning_rate": 0.0001, |
| "loss": 1.6158, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.24279973208305425, |
| "grad_norm": 0.4613933563232422, |
| "learning_rate": 0.0001, |
| "loss": 1.5708, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.24313462826523777, |
| "grad_norm": 0.4435577392578125, |
| "learning_rate": 0.0001, |
| "loss": 1.5237, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.2434695244474213, |
| "grad_norm": 0.43679118156433105, |
| "learning_rate": 0.0001, |
| "loss": 1.5505, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.24380442062960483, |
| "grad_norm": 0.42168736457824707, |
| "learning_rate": 0.0001, |
| "loss": 1.5065, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.24413931681178835, |
| "grad_norm": 0.46262645721435547, |
| "learning_rate": 0.0001, |
| "loss": 1.5761, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.24447421299397187, |
| "grad_norm": 0.4420361816883087, |
| "learning_rate": 0.0001, |
| "loss": 1.4914, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.2448091091761554, |
| "grad_norm": 0.4631895124912262, |
| "learning_rate": 0.0001, |
| "loss": 1.6061, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.2451440053583389, |
| "grad_norm": 0.4412582814693451, |
| "learning_rate": 0.0001, |
| "loss": 1.5394, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.24547890154052243, |
| "grad_norm": 0.461233913898468, |
| "learning_rate": 0.0001, |
| "loss": 1.6477, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.24581379772270595, |
| "grad_norm": 0.44835495948791504, |
| "learning_rate": 0.0001, |
| "loss": 1.5493, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.24614869390488947, |
| "grad_norm": 0.4717530310153961, |
| "learning_rate": 0.0001, |
| "loss": 1.5904, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.24648359008707302, |
| "grad_norm": 0.44697943329811096, |
| "learning_rate": 0.0001, |
| "loss": 1.5974, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.24681848626925654, |
| "grad_norm": 0.4591861665248871, |
| "learning_rate": 0.0001, |
| "loss": 1.5437, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.24715338245144006, |
| "grad_norm": 0.4309515357017517, |
| "learning_rate": 0.0001, |
| "loss": 1.5082, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.24748827863362358, |
| "grad_norm": 0.48085346817970276, |
| "learning_rate": 0.0001, |
| "loss": 1.5783, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.2478231748158071, |
| "grad_norm": 0.4605758786201477, |
| "learning_rate": 0.0001, |
| "loss": 1.6161, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.24815807099799062, |
| "grad_norm": 0.449176162481308, |
| "learning_rate": 0.0001, |
| "loss": 1.6114, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.24849296718017413, |
| "grad_norm": 0.4406338632106781, |
| "learning_rate": 0.0001, |
| "loss": 1.5702, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.24882786336235768, |
| "grad_norm": 0.444603830575943, |
| "learning_rate": 0.0001, |
| "loss": 1.6437, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.2491627595445412, |
| "grad_norm": 0.4413153827190399, |
| "learning_rate": 0.0001, |
| "loss": 1.5557, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.24949765572672472, |
| "grad_norm": 0.4562162756919861, |
| "learning_rate": 0.0001, |
| "loss": 1.7314, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.24983255190890824, |
| "grad_norm": 0.4683936536312103, |
| "learning_rate": 0.0001, |
| "loss": 1.5625, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.25016744809109176, |
| "grad_norm": 0.43468979001045227, |
| "learning_rate": 0.0001, |
| "loss": 1.5982, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.2505023442732753, |
| "grad_norm": 0.45922327041625977, |
| "learning_rate": 0.0001, |
| "loss": 1.5583, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.2508372404554588, |
| "grad_norm": 0.45437920093536377, |
| "learning_rate": 0.0001, |
| "loss": 1.5728, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.2511721366376423, |
| "grad_norm": 0.43223872780799866, |
| "learning_rate": 0.0001, |
| "loss": 1.5178, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.25150703281982584, |
| "grad_norm": 0.4360998868942261, |
| "learning_rate": 0.0001, |
| "loss": 1.5791, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.25184192900200936, |
| "grad_norm": 0.4442369043827057, |
| "learning_rate": 0.0001, |
| "loss": 1.5894, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.2521768251841929, |
| "grad_norm": 0.4531276226043701, |
| "learning_rate": 0.0001, |
| "loss": 1.5669, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.2525117213663764, |
| "grad_norm": 0.4386443495750427, |
| "learning_rate": 0.0001, |
| "loss": 1.4815, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.25284661754855997, |
| "grad_norm": 0.45330408215522766, |
| "learning_rate": 0.0001, |
| "loss": 1.6762, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.2531815137307435, |
| "grad_norm": 0.4780266582965851, |
| "learning_rate": 0.0001, |
| "loss": 1.5828, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.253516409912927, |
| "grad_norm": 0.45670267939567566, |
| "learning_rate": 0.0001, |
| "loss": 1.5561, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.25385130609511053, |
| "grad_norm": 0.4919813573360443, |
| "learning_rate": 0.0001, |
| "loss": 1.6987, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.25418620227729405, |
| "grad_norm": 0.45109012722969055, |
| "learning_rate": 0.0001, |
| "loss": 1.578, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.25452109845947757, |
| "grad_norm": 0.46791893243789673, |
| "learning_rate": 0.0001, |
| "loss": 1.5083, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.2548559946416611, |
| "grad_norm": 0.44861626625061035, |
| "learning_rate": 0.0001, |
| "loss": 1.5325, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.2551908908238446, |
| "grad_norm": 0.4484095275402069, |
| "learning_rate": 0.0001, |
| "loss": 1.5581, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.2555257870060281, |
| "grad_norm": 0.4307587146759033, |
| "learning_rate": 0.0001, |
| "loss": 1.4857, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.25586068318821165, |
| "grad_norm": 0.4829903542995453, |
| "learning_rate": 0.0001, |
| "loss": 1.5391, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.25619557937039517, |
| "grad_norm": 0.4534643292427063, |
| "learning_rate": 0.0001, |
| "loss": 1.6835, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.2565304755525787, |
| "grad_norm": 0.48684337735176086, |
| "learning_rate": 0.0001, |
| "loss": 1.6099, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.2568653717347622, |
| "grad_norm": 0.45247527956962585, |
| "learning_rate": 0.0001, |
| "loss": 1.4907, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.2572002679169457, |
| "grad_norm": 0.48305273056030273, |
| "learning_rate": 0.0001, |
| "loss": 1.5884, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.25753516409912924, |
| "grad_norm": 0.45419007539749146, |
| "learning_rate": 0.0001, |
| "loss": 1.5194, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.2578700602813128, |
| "grad_norm": 0.4592842161655426, |
| "learning_rate": 0.0001, |
| "loss": 1.5247, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.25820495646349634, |
| "grad_norm": 0.4464816451072693, |
| "learning_rate": 0.0001, |
| "loss": 1.5456, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.25853985264567986, |
| "grad_norm": 0.44528427720069885, |
| "learning_rate": 0.0001, |
| "loss": 1.4692, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.2588747488278634, |
| "grad_norm": 0.4575278162956238, |
| "learning_rate": 0.0001, |
| "loss": 1.5124, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.2592096450100469, |
| "grad_norm": 0.44354987144470215, |
| "learning_rate": 0.0001, |
| "loss": 1.5577, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.2595445411922304, |
| "grad_norm": 0.4407951831817627, |
| "learning_rate": 0.0001, |
| "loss": 1.637, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.25987943737441394, |
| "grad_norm": 0.4313081204891205, |
| "learning_rate": 0.0001, |
| "loss": 1.533, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.26021433355659745, |
| "grad_norm": 0.4363669157028198, |
| "learning_rate": 0.0001, |
| "loss": 1.6094, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.260549229738781, |
| "grad_norm": 0.44643253087997437, |
| "learning_rate": 0.0001, |
| "loss": 1.5489, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.2608841259209645, |
| "grad_norm": 0.460085928440094, |
| "learning_rate": 0.0001, |
| "loss": 1.5721, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.261219022103148, |
| "grad_norm": 0.44179096817970276, |
| "learning_rate": 0.0001, |
| "loss": 1.5302, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.26155391828533153, |
| "grad_norm": 0.44657012820243835, |
| "learning_rate": 0.0001, |
| "loss": 1.4765, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.26188881446751505, |
| "grad_norm": 0.458996444940567, |
| "learning_rate": 0.0001, |
| "loss": 1.6837, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.26222371064969857, |
| "grad_norm": 0.4423884451389313, |
| "learning_rate": 0.0001, |
| "loss": 1.5736, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.2625586068318821, |
| "grad_norm": 0.46296098828315735, |
| "learning_rate": 0.0001, |
| "loss": 1.6311, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.26289350301406567, |
| "grad_norm": 0.43383488059043884, |
| "learning_rate": 0.0001, |
| "loss": 1.6063, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.2632283991962492, |
| "grad_norm": 0.43596151471138, |
| "learning_rate": 0.0001, |
| "loss": 1.4607, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.2635632953784327, |
| "grad_norm": 0.4386815130710602, |
| "learning_rate": 0.0001, |
| "loss": 1.5185, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.2638981915606162, |
| "grad_norm": 0.47889411449432373, |
| "learning_rate": 0.0001, |
| "loss": 1.5469, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.26423308774279974, |
| "grad_norm": 0.4255765974521637, |
| "learning_rate": 0.0001, |
| "loss": 1.4188, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.26456798392498326, |
| "grad_norm": 0.5282460451126099, |
| "learning_rate": 0.0001, |
| "loss": 1.4845, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.2649028801071668, |
| "grad_norm": 0.4410010576248169, |
| "learning_rate": 0.0001, |
| "loss": 1.5932, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.2652377762893503, |
| "grad_norm": 0.4921286106109619, |
| "learning_rate": 0.0001, |
| "loss": 1.7258, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.2655726724715338, |
| "grad_norm": 0.46794795989990234, |
| "learning_rate": 0.0001, |
| "loss": 1.6228, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.26590756865371734, |
| "grad_norm": 0.4620145261287689, |
| "learning_rate": 0.0001, |
| "loss": 1.4615, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.26624246483590086, |
| "grad_norm": 0.4698029160499573, |
| "learning_rate": 0.0001, |
| "loss": 1.6091, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.2665773610180844, |
| "grad_norm": 0.4392451047897339, |
| "learning_rate": 0.0001, |
| "loss": 1.5311, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.2669122572002679, |
| "grad_norm": 0.5082414746284485, |
| "learning_rate": 0.0001, |
| "loss": 1.5202, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.2672471533824514, |
| "grad_norm": 0.4562722444534302, |
| "learning_rate": 0.0001, |
| "loss": 1.6005, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.26758204956463494, |
| "grad_norm": 0.473054438829422, |
| "learning_rate": 0.0001, |
| "loss": 1.6171, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.2679169457468185, |
| "grad_norm": 0.4447856843471527, |
| "learning_rate": 0.0001, |
| "loss": 1.5041, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.26825184192900203, |
| "grad_norm": 0.4536293148994446, |
| "learning_rate": 0.0001, |
| "loss": 1.5749, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.26858673811118555, |
| "grad_norm": 0.46840107440948486, |
| "learning_rate": 0.0001, |
| "loss": 1.5392, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.26892163429336907, |
| "grad_norm": 0.4613092839717865, |
| "learning_rate": 0.0001, |
| "loss": 1.6126, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.2692565304755526, |
| "grad_norm": 0.46580761671066284, |
| "learning_rate": 0.0001, |
| "loss": 1.5783, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.2695914266577361, |
| "grad_norm": 0.43385007977485657, |
| "learning_rate": 0.0001, |
| "loss": 1.5475, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.26992632283991963, |
| "grad_norm": 0.44625353813171387, |
| "learning_rate": 0.0001, |
| "loss": 1.5473, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.27026121902210315, |
| "grad_norm": 0.43871554732322693, |
| "learning_rate": 0.0001, |
| "loss": 1.5818, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.27059611520428667, |
| "grad_norm": 0.44773444533348083, |
| "learning_rate": 0.0001, |
| "loss": 1.6033, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.2709310113864702, |
| "grad_norm": 0.4612893760204315, |
| "learning_rate": 0.0001, |
| "loss": 1.597, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.2712659075686537, |
| "grad_norm": 0.44292980432510376, |
| "learning_rate": 0.0001, |
| "loss": 1.5572, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.2716008037508372, |
| "grad_norm": 0.44339171051979065, |
| "learning_rate": 0.0001, |
| "loss": 1.569, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.27193569993302075, |
| "grad_norm": 0.46169915795326233, |
| "learning_rate": 0.0001, |
| "loss": 1.6067, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.27227059611520427, |
| "grad_norm": 0.4542236924171448, |
| "learning_rate": 0.0001, |
| "loss": 1.5463, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.2726054922973878, |
| "grad_norm": 0.45044103264808655, |
| "learning_rate": 0.0001, |
| "loss": 1.515, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.2729403884795713, |
| "grad_norm": 0.4549798369407654, |
| "learning_rate": 0.0001, |
| "loss": 1.5613, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.2732752846617549, |
| "grad_norm": 0.4387785792350769, |
| "learning_rate": 0.0001, |
| "loss": 1.6267, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.2736101808439384, |
| "grad_norm": 0.44183585047721863, |
| "learning_rate": 0.0001, |
| "loss": 1.584, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.2739450770261219, |
| "grad_norm": 0.4401913285255432, |
| "learning_rate": 0.0001, |
| "loss": 1.4822, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.27427997320830544, |
| "grad_norm": 0.44586020708084106, |
| "learning_rate": 0.0001, |
| "loss": 1.5685, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.27461486939048896, |
| "grad_norm": 0.44778352975845337, |
| "learning_rate": 0.0001, |
| "loss": 1.6375, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.2749497655726725, |
| "grad_norm": 0.4404751658439636, |
| "learning_rate": 0.0001, |
| "loss": 1.5043, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.275284661754856, |
| "grad_norm": 0.4170089066028595, |
| "learning_rate": 0.0001, |
| "loss": 1.4737, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.2756195579370395, |
| "grad_norm": 0.43222057819366455, |
| "learning_rate": 0.0001, |
| "loss": 1.5244, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.27595445411922304, |
| "grad_norm": 0.43604615330696106, |
| "learning_rate": 0.0001, |
| "loss": 1.5028, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.27628935030140656, |
| "grad_norm": 0.46478959918022156, |
| "learning_rate": 0.0001, |
| "loss": 1.5599, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.2766242464835901, |
| "grad_norm": 0.4629174768924713, |
| "learning_rate": 0.0001, |
| "loss": 1.5553, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.2769591426657736, |
| "grad_norm": 0.4615054130554199, |
| "learning_rate": 0.0001, |
| "loss": 1.6052, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.2772940388479571, |
| "grad_norm": 0.4389117360115051, |
| "learning_rate": 0.0001, |
| "loss": 1.5926, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.27762893503014063, |
| "grad_norm": 0.4443889558315277, |
| "learning_rate": 0.0001, |
| "loss": 1.5341, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.27796383121232415, |
| "grad_norm": 0.43649813532829285, |
| "learning_rate": 0.0001, |
| "loss": 1.5323, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.27829872739450773, |
| "grad_norm": 0.4427342414855957, |
| "learning_rate": 0.0001, |
| "loss": 1.5615, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.27863362357669125, |
| "grad_norm": 0.44088149070739746, |
| "learning_rate": 0.0001, |
| "loss": 1.5227, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.27896851975887477, |
| "grad_norm": 0.478352427482605, |
| "learning_rate": 0.0001, |
| "loss": 1.6994, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.2793034159410583, |
| "grad_norm": 0.45829662680625916, |
| "learning_rate": 0.0001, |
| "loss": 1.6766, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.2796383121232418, |
| "grad_norm": 0.44967886805534363, |
| "learning_rate": 0.0001, |
| "loss": 1.4908, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.2799732083054253, |
| "grad_norm": 0.45656687021255493, |
| "learning_rate": 0.0001, |
| "loss": 1.5392, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.28030810448760884, |
| "grad_norm": 0.4756939113140106, |
| "learning_rate": 0.0001, |
| "loss": 1.5648, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.28064300066979236, |
| "grad_norm": 0.4533655047416687, |
| "learning_rate": 0.0001, |
| "loss": 1.5131, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.2809778968519759, |
| "grad_norm": 0.4716136157512665, |
| "learning_rate": 0.0001, |
| "loss": 1.6867, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.2813127930341594, |
| "grad_norm": 0.43292513489723206, |
| "learning_rate": 0.0001, |
| "loss": 1.5307, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.2816476892163429, |
| "grad_norm": 0.45670297741889954, |
| "learning_rate": 0.0001, |
| "loss": 1.5685, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.28198258539852644, |
| "grad_norm": 0.43542346358299255, |
| "learning_rate": 0.0001, |
| "loss": 1.532, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.28231748158070996, |
| "grad_norm": 0.44551143050193787, |
| "learning_rate": 0.0001, |
| "loss": 1.5195, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.2826523777628935, |
| "grad_norm": 0.4447404444217682, |
| "learning_rate": 0.0001, |
| "loss": 1.4835, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.282987273945077, |
| "grad_norm": 0.45286113023757935, |
| "learning_rate": 0.0001, |
| "loss": 1.6399, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.2833221701272606, |
| "grad_norm": 0.4515502154827118, |
| "learning_rate": 0.0001, |
| "loss": 1.5199, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.2836570663094441, |
| "grad_norm": 0.4420303404331207, |
| "learning_rate": 0.0001, |
| "loss": 1.4901, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.2839919624916276, |
| "grad_norm": 0.4807213544845581, |
| "learning_rate": 0.0001, |
| "loss": 1.6002, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.28432685867381113, |
| "grad_norm": 0.4393681585788727, |
| "learning_rate": 0.0001, |
| "loss": 1.595, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.28466175485599465, |
| "grad_norm": 0.4498080313205719, |
| "learning_rate": 0.0001, |
| "loss": 1.5361, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.2849966510381782, |
| "grad_norm": 0.4682947099208832, |
| "learning_rate": 0.0001, |
| "loss": 1.6209, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.2853315472203617, |
| "grad_norm": 0.43859928846359253, |
| "learning_rate": 0.0001, |
| "loss": 1.526, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.2856664434025452, |
| "grad_norm": 0.4339533746242523, |
| "learning_rate": 0.0001, |
| "loss": 1.5533, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.28600133958472873, |
| "grad_norm": 0.45164206624031067, |
| "learning_rate": 0.0001, |
| "loss": 1.6037, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.28633623576691225, |
| "grad_norm": 0.4741949737071991, |
| "learning_rate": 0.0001, |
| "loss": 1.594, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.28667113194909577, |
| "grad_norm": 0.46638429164886475, |
| "learning_rate": 0.0001, |
| "loss": 1.5615, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.2870060281312793, |
| "grad_norm": 0.46086496114730835, |
| "learning_rate": 0.0001, |
| "loss": 1.5066, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.2873409243134628, |
| "grad_norm": 0.4441535174846649, |
| "learning_rate": 0.0001, |
| "loss": 1.5619, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.28767582049564633, |
| "grad_norm": 0.44475072622299194, |
| "learning_rate": 0.0001, |
| "loss": 1.6023, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.28801071667782985, |
| "grad_norm": 0.45497584342956543, |
| "learning_rate": 0.0001, |
| "loss": 1.5075, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.2883456128600134, |
| "grad_norm": 0.4760246276855469, |
| "learning_rate": 0.0001, |
| "loss": 1.5075, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.28868050904219694, |
| "grad_norm": 0.46818268299102783, |
| "learning_rate": 0.0001, |
| "loss": 1.6088, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.28901540522438046, |
| "grad_norm": 0.45602530241012573, |
| "learning_rate": 0.0001, |
| "loss": 1.5326, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.289350301406564, |
| "grad_norm": 0.4454081654548645, |
| "learning_rate": 0.0001, |
| "loss": 1.5957, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.2896851975887475, |
| "grad_norm": 0.45742931962013245, |
| "learning_rate": 0.0001, |
| "loss": 1.5309, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.290020093770931, |
| "grad_norm": 0.45301052927970886, |
| "learning_rate": 0.0001, |
| "loss": 1.6096, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.29035498995311454, |
| "grad_norm": 0.45820847153663635, |
| "learning_rate": 0.0001, |
| "loss": 1.5232, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.29068988613529806, |
| "grad_norm": 0.47826266288757324, |
| "learning_rate": 0.0001, |
| "loss": 1.6533, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.2910247823174816, |
| "grad_norm": 0.43897736072540283, |
| "learning_rate": 0.0001, |
| "loss": 1.5567, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.2913596784996651, |
| "grad_norm": 0.43555381894111633, |
| "learning_rate": 0.0001, |
| "loss": 1.5425, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.2916945746818486, |
| "grad_norm": 0.4390638768672943, |
| "learning_rate": 0.0001, |
| "loss": 1.6117, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.29202947086403214, |
| "grad_norm": 0.4563729465007782, |
| "learning_rate": 0.0001, |
| "loss": 1.676, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.29236436704621566, |
| "grad_norm": 0.44380465149879456, |
| "learning_rate": 0.0001, |
| "loss": 1.4597, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.2926992632283992, |
| "grad_norm": 0.5023241639137268, |
| "learning_rate": 0.0001, |
| "loss": 1.5869, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.2930341594105827, |
| "grad_norm": 0.4469534158706665, |
| "learning_rate": 0.0001, |
| "loss": 1.4953, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.29336905559276627, |
| "grad_norm": 0.4469916820526123, |
| "learning_rate": 0.0001, |
| "loss": 1.5673, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.2937039517749498, |
| "grad_norm": 0.46404793858528137, |
| "learning_rate": 0.0001, |
| "loss": 1.5858, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.2940388479571333, |
| "grad_norm": 0.45173484086990356, |
| "learning_rate": 0.0001, |
| "loss": 1.6339, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.29437374413931683, |
| "grad_norm": 0.45506271719932556, |
| "learning_rate": 0.0001, |
| "loss": 1.6059, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.29470864032150035, |
| "grad_norm": 0.46061256527900696, |
| "learning_rate": 0.0001, |
| "loss": 1.5897, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.29504353650368387, |
| "grad_norm": 0.46344825625419617, |
| "learning_rate": 0.0001, |
| "loss": 1.6338, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.2953784326858674, |
| "grad_norm": 0.43154940009117126, |
| "learning_rate": 0.0001, |
| "loss": 1.4563, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.2957133288680509, |
| "grad_norm": 0.43573182821273804, |
| "learning_rate": 0.0001, |
| "loss": 1.4755, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.2960482250502344, |
| "grad_norm": 0.4424009919166565, |
| "learning_rate": 0.0001, |
| "loss": 1.6018, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.29638312123241795, |
| "grad_norm": 0.4376654326915741, |
| "learning_rate": 0.0001, |
| "loss": 1.5291, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.29671801741460146, |
| "grad_norm": 0.44850626587867737, |
| "learning_rate": 0.0001, |
| "loss": 1.5544, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.297052913596785, |
| "grad_norm": 0.44172677397727966, |
| "learning_rate": 0.0001, |
| "loss": 1.5138, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.2973878097789685, |
| "grad_norm": 0.4760349690914154, |
| "learning_rate": 0.0001, |
| "loss": 1.6774, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.297722705961152, |
| "grad_norm": 0.4951341152191162, |
| "learning_rate": 0.0001, |
| "loss": 1.5995, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.29805760214333554, |
| "grad_norm": 0.46811017394065857, |
| "learning_rate": 0.0001, |
| "loss": 1.6005, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.29839249832551906, |
| "grad_norm": 0.46500176191329956, |
| "learning_rate": 0.0001, |
| "loss": 1.61, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.29872739450770264, |
| "grad_norm": 0.4412824511528015, |
| "learning_rate": 0.0001, |
| "loss": 1.56, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.29906229068988616, |
| "grad_norm": 0.4398655891418457, |
| "learning_rate": 0.0001, |
| "loss": 1.5329, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.2993971868720697, |
| "grad_norm": 0.4386952817440033, |
| "learning_rate": 0.0001, |
| "loss": 1.5272, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.2997320830542532, |
| "grad_norm": 0.47550323605537415, |
| "learning_rate": 0.0001, |
| "loss": 1.5591, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.3000669792364367, |
| "grad_norm": 0.43544065952301025, |
| "learning_rate": 0.0001, |
| "loss": 1.5343, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.30040187541862023, |
| "grad_norm": 0.43268048763275146, |
| "learning_rate": 0.0001, |
| "loss": 1.4944, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.30073677160080375, |
| "grad_norm": 0.4282139837741852, |
| "learning_rate": 0.0001, |
| "loss": 1.4313, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.3010716677829873, |
| "grad_norm": 0.46300849318504333, |
| "learning_rate": 0.0001, |
| "loss": 1.6772, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.3014065639651708, |
| "grad_norm": 0.4673476815223694, |
| "learning_rate": 0.0001, |
| "loss": 1.6028, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.3017414601473543, |
| "grad_norm": 0.43645891547203064, |
| "learning_rate": 0.0001, |
| "loss": 1.5157, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.30207635632953783, |
| "grad_norm": 0.44053512811660767, |
| "learning_rate": 0.0001, |
| "loss": 1.5987, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.30241125251172135, |
| "grad_norm": 0.43275871872901917, |
| "learning_rate": 0.0001, |
| "loss": 1.4311, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.30274614869390487, |
| "grad_norm": 0.45569944381713867, |
| "learning_rate": 0.0001, |
| "loss": 1.6145, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.3030810448760884, |
| "grad_norm": 0.45299553871154785, |
| "learning_rate": 0.0001, |
| "loss": 1.5617, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.3034159410582719, |
| "grad_norm": 0.4409105181694031, |
| "learning_rate": 0.0001, |
| "loss": 1.5418, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.3037508372404555, |
| "grad_norm": 0.4408958852291107, |
| "learning_rate": 0.0001, |
| "loss": 1.4375, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.304085733422639, |
| "grad_norm": 0.4480845332145691, |
| "learning_rate": 0.0001, |
| "loss": 1.5399, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.3044206296048225, |
| "grad_norm": 0.4365403950214386, |
| "learning_rate": 0.0001, |
| "loss": 1.4934, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.30475552578700604, |
| "grad_norm": 0.45147716999053955, |
| "learning_rate": 0.0001, |
| "loss": 1.5526, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.30509042196918956, |
| "grad_norm": 0.45969611406326294, |
| "learning_rate": 0.0001, |
| "loss": 1.5609, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.3054253181513731, |
| "grad_norm": 0.4570698142051697, |
| "learning_rate": 0.0001, |
| "loss": 1.626, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.3057602143335566, |
| "grad_norm": 0.45453599095344543, |
| "learning_rate": 0.0001, |
| "loss": 1.6268, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.3060951105157401, |
| "grad_norm": 0.4364335536956787, |
| "learning_rate": 0.0001, |
| "loss": 1.5523, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.30643000669792364, |
| "grad_norm": 0.4353067874908447, |
| "learning_rate": 0.0001, |
| "loss": 1.5668, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.30676490288010716, |
| "grad_norm": 0.439081072807312, |
| "learning_rate": 0.0001, |
| "loss": 1.4408, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.3070997990622907, |
| "grad_norm": 0.4517746567726135, |
| "learning_rate": 0.0001, |
| "loss": 1.514, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.3074346952444742, |
| "grad_norm": 0.4414094090461731, |
| "learning_rate": 0.0001, |
| "loss": 1.4736, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.3077695914266577, |
| "grad_norm": 0.4406594932079315, |
| "learning_rate": 0.0001, |
| "loss": 1.576, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.30810448760884124, |
| "grad_norm": 0.46079543232917786, |
| "learning_rate": 0.0001, |
| "loss": 1.5106, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.30843938379102476, |
| "grad_norm": 0.4498567283153534, |
| "learning_rate": 0.0001, |
| "loss": 1.5298, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.30877427997320833, |
| "grad_norm": 0.45379623770713806, |
| "learning_rate": 0.0001, |
| "loss": 1.5265, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.30910917615539185, |
| "grad_norm": 0.4834693372249603, |
| "learning_rate": 0.0001, |
| "loss": 1.6934, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.30944407233757537, |
| "grad_norm": 0.46280190348625183, |
| "learning_rate": 0.0001, |
| "loss": 1.552, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.3097789685197589, |
| "grad_norm": 0.4772225320339203, |
| "learning_rate": 0.0001, |
| "loss": 1.5628, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.3101138647019424, |
| "grad_norm": 0.45620399713516235, |
| "learning_rate": 0.0001, |
| "loss": 1.5828, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.31044876088412593, |
| "grad_norm": 0.45849597454071045, |
| "learning_rate": 0.0001, |
| "loss": 1.4951, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.31078365706630945, |
| "grad_norm": 0.46801209449768066, |
| "learning_rate": 0.0001, |
| "loss": 1.5744, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.31111855324849297, |
| "grad_norm": 0.4555777311325073, |
| "learning_rate": 0.0001, |
| "loss": 1.4925, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.3114534494306765, |
| "grad_norm": 0.4439859688282013, |
| "learning_rate": 0.0001, |
| "loss": 1.5912, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.31178834561286, |
| "grad_norm": 0.4438050389289856, |
| "learning_rate": 0.0001, |
| "loss": 1.5269, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.3121232417950435, |
| "grad_norm": 0.4439409077167511, |
| "learning_rate": 0.0001, |
| "loss": 1.6237, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.31245813797722705, |
| "grad_norm": 0.43066197633743286, |
| "learning_rate": 0.0001, |
| "loss": 1.5733, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.31279303415941057, |
| "grad_norm": 0.42914170026779175, |
| "learning_rate": 0.0001, |
| "loss": 1.5052, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.3131279303415941, |
| "grad_norm": 0.4268810749053955, |
| "learning_rate": 0.0001, |
| "loss": 1.544, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.3134628265237776, |
| "grad_norm": 0.4368149936199188, |
| "learning_rate": 0.0001, |
| "loss": 1.622, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.3137977227059612, |
| "grad_norm": 0.441754013299942, |
| "learning_rate": 0.0001, |
| "loss": 1.4592, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.3141326188881447, |
| "grad_norm": 0.43533989787101746, |
| "learning_rate": 0.0001, |
| "loss": 1.5468, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.3144675150703282, |
| "grad_norm": 0.4400700032711029, |
| "learning_rate": 0.0001, |
| "loss": 1.5719, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.31480241125251174, |
| "grad_norm": 0.4473206400871277, |
| "learning_rate": 0.0001, |
| "loss": 1.5889, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.31513730743469526, |
| "grad_norm": 0.4529035687446594, |
| "learning_rate": 0.0001, |
| "loss": 1.5555, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.3154722036168788, |
| "grad_norm": 0.45281359553337097, |
| "learning_rate": 0.0001, |
| "loss": 1.6016, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.3158070997990623, |
| "grad_norm": 0.4597030282020569, |
| "learning_rate": 0.0001, |
| "loss": 1.5947, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.3161419959812458, |
| "grad_norm": 0.4365510642528534, |
| "learning_rate": 0.0001, |
| "loss": 1.4508, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.31647689216342934, |
| "grad_norm": 0.44290444254875183, |
| "learning_rate": 0.0001, |
| "loss": 1.5718, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.31681178834561285, |
| "grad_norm": 0.4928835332393646, |
| "learning_rate": 0.0001, |
| "loss": 1.5904, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.3171466845277964, |
| "grad_norm": 0.44557881355285645, |
| "learning_rate": 0.0001, |
| "loss": 1.5851, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.3174815807099799, |
| "grad_norm": 0.5136463046073914, |
| "learning_rate": 0.0001, |
| "loss": 1.5845, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.3178164768921634, |
| "grad_norm": 0.4590359032154083, |
| "learning_rate": 0.0001, |
| "loss": 1.5975, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.31815137307434693, |
| "grad_norm": 0.44247373938560486, |
| "learning_rate": 0.0001, |
| "loss": 1.458, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.31848626925653045, |
| "grad_norm": 0.43431544303894043, |
| "learning_rate": 0.0001, |
| "loss": 1.4779, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.31882116543871397, |
| "grad_norm": 0.4587078392505646, |
| "learning_rate": 0.0001, |
| "loss": 1.5128, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.31915606162089755, |
| "grad_norm": 0.44057053327560425, |
| "learning_rate": 0.0001, |
| "loss": 1.603, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.31949095780308107, |
| "grad_norm": 0.43187084794044495, |
| "learning_rate": 0.0001, |
| "loss": 1.5044, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.3198258539852646, |
| "grad_norm": 0.4155219793319702, |
| "learning_rate": 0.0001, |
| "loss": 1.471, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.3201607501674481, |
| "grad_norm": 0.47326022386550903, |
| "learning_rate": 0.0001, |
| "loss": 1.5794, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.3204956463496316, |
| "grad_norm": 0.45905107259750366, |
| "learning_rate": 0.0001, |
| "loss": 1.5353, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.32083054253181514, |
| "grad_norm": 0.44687706232070923, |
| "learning_rate": 0.0001, |
| "loss": 1.5249, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.32116543871399866, |
| "grad_norm": 0.46414482593536377, |
| "learning_rate": 0.0001, |
| "loss": 1.5599, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.3215003348961822, |
| "grad_norm": 0.44584473967552185, |
| "learning_rate": 0.0001, |
| "loss": 1.5236, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.3218352310783657, |
| "grad_norm": 0.49396583437919617, |
| "learning_rate": 0.0001, |
| "loss": 1.6187, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.3221701272605492, |
| "grad_norm": 0.45838162302970886, |
| "learning_rate": 0.0001, |
| "loss": 1.5558, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.32250502344273274, |
| "grad_norm": 0.4484620988368988, |
| "learning_rate": 0.0001, |
| "loss": 1.5179, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.32283991962491626, |
| "grad_norm": 0.45024028420448303, |
| "learning_rate": 0.0001, |
| "loss": 1.5206, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.3231748158070998, |
| "grad_norm": 0.43294331431388855, |
| "learning_rate": 0.0001, |
| "loss": 1.6291, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.3235097119892833, |
| "grad_norm": 0.4585472345352173, |
| "learning_rate": 0.0001, |
| "loss": 1.545, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.3238446081714668, |
| "grad_norm": 0.44397369027137756, |
| "learning_rate": 0.0001, |
| "loss": 1.5551, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.3241795043536504, |
| "grad_norm": 0.462065190076828, |
| "learning_rate": 0.0001, |
| "loss": 1.5943, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.3245144005358339, |
| "grad_norm": 0.4261050820350647, |
| "learning_rate": 0.0001, |
| "loss": 1.4429, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.32484929671801743, |
| "grad_norm": 0.44713786244392395, |
| "learning_rate": 0.0001, |
| "loss": 1.6157, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.32518419290020095, |
| "grad_norm": 0.46949875354766846, |
| "learning_rate": 0.0001, |
| "loss": 1.579, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.32551908908238447, |
| "grad_norm": 0.4351710379123688, |
| "learning_rate": 0.0001, |
| "loss": 1.4521, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.325853985264568, |
| "grad_norm": 0.5096984505653381, |
| "learning_rate": 0.0001, |
| "loss": 1.6964, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.3261888814467515, |
| "grad_norm": 0.44196903705596924, |
| "learning_rate": 0.0001, |
| "loss": 1.5528, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.32652377762893503, |
| "grad_norm": 0.4667568802833557, |
| "learning_rate": 0.0001, |
| "loss": 1.5643, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.32685867381111855, |
| "grad_norm": 0.46379634737968445, |
| "learning_rate": 0.0001, |
| "loss": 1.6277, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.32719356999330207, |
| "grad_norm": 0.451865553855896, |
| "learning_rate": 0.0001, |
| "loss": 1.6356, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.3275284661754856, |
| "grad_norm": 0.42902424931526184, |
| "learning_rate": 0.0001, |
| "loss": 1.5017, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.3278633623576691, |
| "grad_norm": 0.46481791138648987, |
| "learning_rate": 0.0001, |
| "loss": 1.5955, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.3281982585398526, |
| "grad_norm": 0.45368990302085876, |
| "learning_rate": 0.0001, |
| "loss": 1.6469, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.32853315472203615, |
| "grad_norm": 0.46452459692955017, |
| "learning_rate": 0.0001, |
| "loss": 1.5121, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.32886805090421967, |
| "grad_norm": 0.4299396276473999, |
| "learning_rate": 0.0001, |
| "loss": 1.4892, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.32920294708640324, |
| "grad_norm": 0.46773141622543335, |
| "learning_rate": 0.0001, |
| "loss": 1.6213, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.32953784326858676, |
| "grad_norm": 0.4707593023777008, |
| "learning_rate": 0.0001, |
| "loss": 1.672, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.3298727394507703, |
| "grad_norm": 0.46293628215789795, |
| "learning_rate": 0.0001, |
| "loss": 1.6151, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.3302076356329538, |
| "grad_norm": 0.4385524392127991, |
| "learning_rate": 0.0001, |
| "loss": 1.4511, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.3305425318151373, |
| "grad_norm": 0.43595242500305176, |
| "learning_rate": 0.0001, |
| "loss": 1.4555, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.33087742799732084, |
| "grad_norm": 0.4468819499015808, |
| "learning_rate": 0.0001, |
| "loss": 1.6023, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.33121232417950436, |
| "grad_norm": 0.45761582255363464, |
| "learning_rate": 0.0001, |
| "loss": 1.5441, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.3315472203616879, |
| "grad_norm": 0.4671020209789276, |
| "learning_rate": 0.0001, |
| "loss": 1.633, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.3318821165438714, |
| "grad_norm": 0.4528156518936157, |
| "learning_rate": 0.0001, |
| "loss": 1.4952, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.3322170127260549, |
| "grad_norm": 0.4482688009738922, |
| "learning_rate": 0.0001, |
| "loss": 1.537, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.33255190890823844, |
| "grad_norm": 0.43465572595596313, |
| "learning_rate": 0.0001, |
| "loss": 1.4921, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.33288680509042196, |
| "grad_norm": 0.4700695872306824, |
| "learning_rate": 0.0001, |
| "loss": 1.6168, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.3332217012726055, |
| "grad_norm": 0.4559037387371063, |
| "learning_rate": 0.0001, |
| "loss": 1.6104, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.333556597454789, |
| "grad_norm": 0.44127675890922546, |
| "learning_rate": 0.0001, |
| "loss": 1.5354, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.3338914936369725, |
| "grad_norm": 0.4611609876155853, |
| "learning_rate": 0.0001, |
| "loss": 1.6717, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.3342263898191561, |
| "grad_norm": 0.4240245223045349, |
| "learning_rate": 0.0001, |
| "loss": 1.4703, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.3345612860013396, |
| "grad_norm": 0.4530922472476959, |
| "learning_rate": 0.0001, |
| "loss": 1.5208, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.33489618218352313, |
| "grad_norm": 0.4416406452655792, |
| "learning_rate": 0.0001, |
| "loss": 1.5127, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.33523107836570665, |
| "grad_norm": 0.45270246267318726, |
| "learning_rate": 0.0001, |
| "loss": 1.5718, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.33556597454789017, |
| "grad_norm": 0.439411997795105, |
| "learning_rate": 0.0001, |
| "loss": 1.517, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.3359008707300737, |
| "grad_norm": 0.46239691972732544, |
| "learning_rate": 0.0001, |
| "loss": 1.6039, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.3362357669122572, |
| "grad_norm": 0.43868276476860046, |
| "learning_rate": 0.0001, |
| "loss": 1.6153, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.3365706630944407, |
| "grad_norm": 0.48331135511398315, |
| "learning_rate": 0.0001, |
| "loss": 1.5955, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.33690555927662424, |
| "grad_norm": 0.45195087790489197, |
| "learning_rate": 0.0001, |
| "loss": 1.6155, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.33724045545880776, |
| "grad_norm": 0.4425548315048218, |
| "learning_rate": 0.0001, |
| "loss": 1.4241, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.3375753516409913, |
| "grad_norm": 0.43767687678337097, |
| "learning_rate": 0.0001, |
| "loss": 1.5117, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.3379102478231748, |
| "grad_norm": 0.43844422698020935, |
| "learning_rate": 0.0001, |
| "loss": 1.5459, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.3382451440053583, |
| "grad_norm": 0.4390891492366791, |
| "learning_rate": 0.0001, |
| "loss": 1.5435, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.33858004018754184, |
| "grad_norm": 0.4437672793865204, |
| "learning_rate": 0.0001, |
| "loss": 1.4874, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.33891493636972536, |
| "grad_norm": 0.4345516264438629, |
| "learning_rate": 0.0001, |
| "loss": 1.4398, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.3392498325519089, |
| "grad_norm": 0.47101935744285583, |
| "learning_rate": 0.0001, |
| "loss": 1.7193, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.33958472873409246, |
| "grad_norm": 0.4873376190662384, |
| "learning_rate": 0.0001, |
| "loss": 1.6006, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.339919624916276, |
| "grad_norm": 0.4573456943035126, |
| "learning_rate": 0.0001, |
| "loss": 1.4895, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.3402545210984595, |
| "grad_norm": 0.4560162425041199, |
| "learning_rate": 0.0001, |
| "loss": 1.6158, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.340589417280643, |
| "grad_norm": 0.45291417837142944, |
| "learning_rate": 0.0001, |
| "loss": 1.5367, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.34092431346282653, |
| "grad_norm": 0.46238189935684204, |
| "learning_rate": 0.0001, |
| "loss": 1.4627, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.34125920964501005, |
| "grad_norm": 0.47080227732658386, |
| "learning_rate": 0.0001, |
| "loss": 1.5742, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.3415941058271936, |
| "grad_norm": 0.46811938285827637, |
| "learning_rate": 0.0001, |
| "loss": 1.6345, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.3419290020093771, |
| "grad_norm": 0.46588802337646484, |
| "learning_rate": 0.0001, |
| "loss": 1.5783, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.3422638981915606, |
| "grad_norm": 0.4479082524776459, |
| "learning_rate": 0.0001, |
| "loss": 1.6295, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.34259879437374413, |
| "grad_norm": 0.45164626836776733, |
| "learning_rate": 0.0001, |
| "loss": 1.5263, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.34293369055592765, |
| "grad_norm": 0.45851123332977295, |
| "learning_rate": 0.0001, |
| "loss": 1.5631, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.34326858673811117, |
| "grad_norm": 0.4423898756504059, |
| "learning_rate": 0.0001, |
| "loss": 1.6015, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.3436034829202947, |
| "grad_norm": 0.46684497594833374, |
| "learning_rate": 0.0001, |
| "loss": 1.5274, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.3439383791024782, |
| "grad_norm": 0.42938143014907837, |
| "learning_rate": 0.0001, |
| "loss": 1.4798, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.34427327528466173, |
| "grad_norm": 0.45635873079299927, |
| "learning_rate": 0.0001, |
| "loss": 1.6065, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.3446081714668453, |
| "grad_norm": 0.4600565731525421, |
| "learning_rate": 0.0001, |
| "loss": 1.523, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.3449430676490288, |
| "grad_norm": 0.4675713777542114, |
| "learning_rate": 0.0001, |
| "loss": 1.5928, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.34527796383121234, |
| "grad_norm": 0.45423001050949097, |
| "learning_rate": 0.0001, |
| "loss": 1.6305, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.34561286001339586, |
| "grad_norm": 0.44853097200393677, |
| "learning_rate": 0.0001, |
| "loss": 1.5195, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.3459477561955794, |
| "grad_norm": 0.4483386278152466, |
| "learning_rate": 0.0001, |
| "loss": 1.677, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.3462826523777629, |
| "grad_norm": 0.43875786662101746, |
| "learning_rate": 0.0001, |
| "loss": 1.5086, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.3466175485599464, |
| "grad_norm": 0.44167378544807434, |
| "learning_rate": 0.0001, |
| "loss": 1.6012, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.34695244474212994, |
| "grad_norm": 0.4552042782306671, |
| "learning_rate": 0.0001, |
| "loss": 1.6003, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.34728734092431346, |
| "grad_norm": 0.4757082462310791, |
| "learning_rate": 0.0001, |
| "loss": 1.6217, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.347622237106497, |
| "grad_norm": 0.4523075819015503, |
| "learning_rate": 0.0001, |
| "loss": 1.5876, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.3479571332886805, |
| "grad_norm": 0.4553813636302948, |
| "learning_rate": 0.0001, |
| "loss": 1.5238, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.348292029470864, |
| "grad_norm": 0.4430896043777466, |
| "learning_rate": 0.0001, |
| "loss": 1.5503, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.34862692565304754, |
| "grad_norm": 0.4415845274925232, |
| "learning_rate": 0.0001, |
| "loss": 1.5227, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.34896182183523106, |
| "grad_norm": 0.4520629942417145, |
| "learning_rate": 0.0001, |
| "loss": 1.6111, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.3492967180174146, |
| "grad_norm": 0.4249700605869293, |
| "learning_rate": 0.0001, |
| "loss": 1.496, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.34963161419959815, |
| "grad_norm": 0.4444321095943451, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.34996651038178167, |
| "grad_norm": 0.41880741715431213, |
| "learning_rate": 0.0001, |
| "loss": 1.4905, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.3503014065639652, |
| "grad_norm": 0.48384132981300354, |
| "learning_rate": 0.0001, |
| "loss": 1.6709, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.3506363027461487, |
| "grad_norm": 0.4757600724697113, |
| "learning_rate": 0.0001, |
| "loss": 1.5308, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.35097119892833223, |
| "grad_norm": 0.4434678554534912, |
| "learning_rate": 0.0001, |
| "loss": 1.6136, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.35130609511051575, |
| "grad_norm": 0.43935686349868774, |
| "learning_rate": 0.0001, |
| "loss": 1.5569, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.35164099129269927, |
| "grad_norm": 0.43453824520111084, |
| "learning_rate": 0.0001, |
| "loss": 1.5371, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.3519758874748828, |
| "grad_norm": 0.43408510088920593, |
| "learning_rate": 0.0001, |
| "loss": 1.4975, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.3523107836570663, |
| "grad_norm": 0.437828004360199, |
| "learning_rate": 0.0001, |
| "loss": 1.4563, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.3526456798392498, |
| "grad_norm": 0.43769609928131104, |
| "learning_rate": 0.0001, |
| "loss": 1.5476, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.35298057602143335, |
| "grad_norm": 0.4404858350753784, |
| "learning_rate": 0.0001, |
| "loss": 1.5501, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.35331547220361686, |
| "grad_norm": 0.44895392656326294, |
| "learning_rate": 0.0001, |
| "loss": 1.543, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.3536503683858004, |
| "grad_norm": 0.4335348904132843, |
| "learning_rate": 0.0001, |
| "loss": 1.4981, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.3539852645679839, |
| "grad_norm": 0.43414801359176636, |
| "learning_rate": 0.0001, |
| "loss": 1.4583, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.3543201607501674, |
| "grad_norm": 0.44667840003967285, |
| "learning_rate": 0.0001, |
| "loss": 1.5666, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.354655056932351, |
| "grad_norm": 0.43654167652130127, |
| "learning_rate": 0.0001, |
| "loss": 1.4808, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.3549899531145345, |
| "grad_norm": 0.4739477336406708, |
| "learning_rate": 0.0001, |
| "loss": 1.5895, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.35532484929671804, |
| "grad_norm": 0.46855276823043823, |
| "learning_rate": 0.0001, |
| "loss": 1.6228, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.35565974547890156, |
| "grad_norm": 0.4664900004863739, |
| "learning_rate": 0.0001, |
| "loss": 1.523, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.3559946416610851, |
| "grad_norm": 0.4334528148174286, |
| "learning_rate": 0.0001, |
| "loss": 1.5027, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.3563295378432686, |
| "grad_norm": 0.4692411422729492, |
| "learning_rate": 0.0001, |
| "loss": 1.5308, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.3566644340254521, |
| "grad_norm": 0.4434930086135864, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.35699933020763563, |
| "grad_norm": 0.43848127126693726, |
| "learning_rate": 0.0001, |
| "loss": 1.523, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.35733422638981915, |
| "grad_norm": 0.4805077612400055, |
| "learning_rate": 0.0001, |
| "loss": 1.5774, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.3576691225720027, |
| "grad_norm": 0.46720483899116516, |
| "learning_rate": 0.0001, |
| "loss": 1.5654, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.3580040187541862, |
| "grad_norm": 0.47015318274497986, |
| "learning_rate": 0.0001, |
| "loss": 1.6036, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.3583389149363697, |
| "grad_norm": 0.46188637614250183, |
| "learning_rate": 0.0001, |
| "loss": 1.5884, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.35867381111855323, |
| "grad_norm": 0.46755450963974, |
| "learning_rate": 0.0001, |
| "loss": 1.6366, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.35900870730073675, |
| "grad_norm": 0.44284823536872864, |
| "learning_rate": 0.0001, |
| "loss": 1.5063, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.35934360348292027, |
| "grad_norm": 0.452816903591156, |
| "learning_rate": 0.0001, |
| "loss": 1.5715, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.3596784996651038, |
| "grad_norm": 0.44831642508506775, |
| "learning_rate": 0.0001, |
| "loss": 1.5484, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.36001339584728737, |
| "grad_norm": 0.45156675577163696, |
| "learning_rate": 0.0001, |
| "loss": 1.5377, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.3603482920294709, |
| "grad_norm": 0.46498456597328186, |
| "learning_rate": 0.0001, |
| "loss": 1.5168, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.3606831882116544, |
| "grad_norm": 0.4552898705005646, |
| "learning_rate": 0.0001, |
| "loss": 1.5627, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.3610180843938379, |
| "grad_norm": 0.44832298159599304, |
| "learning_rate": 0.0001, |
| "loss": 1.5315, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.36135298057602144, |
| "grad_norm": 0.4634014070034027, |
| "learning_rate": 0.0001, |
| "loss": 1.6449, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.36168787675820496, |
| "grad_norm": 0.4542264938354492, |
| "learning_rate": 0.0001, |
| "loss": 1.5183, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.3620227729403885, |
| "grad_norm": 0.46849897503852844, |
| "learning_rate": 0.0001, |
| "loss": 1.4831, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.362357669122572, |
| "grad_norm": 0.43632426857948303, |
| "learning_rate": 0.0001, |
| "loss": 1.4944, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.3626925653047555, |
| "grad_norm": 0.4542003273963928, |
| "learning_rate": 0.0001, |
| "loss": 1.5185, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.36302746148693904, |
| "grad_norm": 0.45889243483543396, |
| "learning_rate": 0.0001, |
| "loss": 1.6152, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.36336235766912256, |
| "grad_norm": 0.4599902629852295, |
| "learning_rate": 0.0001, |
| "loss": 1.5389, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.3636972538513061, |
| "grad_norm": 0.4660051763057709, |
| "learning_rate": 0.0001, |
| "loss": 1.5831, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.3640321500334896, |
| "grad_norm": 0.4395544230937958, |
| "learning_rate": 0.0001, |
| "loss": 1.5524, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.3643670462156731, |
| "grad_norm": 0.4700053930282593, |
| "learning_rate": 0.0001, |
| "loss": 1.5226, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.36470194239785664, |
| "grad_norm": 0.4484608471393585, |
| "learning_rate": 0.0001, |
| "loss": 1.5259, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.3650368385800402, |
| "grad_norm": 0.4695565104484558, |
| "learning_rate": 0.0001, |
| "loss": 1.5009, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.36537173476222373, |
| "grad_norm": 0.447613388299942, |
| "learning_rate": 0.0001, |
| "loss": 1.5532, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.36570663094440725, |
| "grad_norm": 0.48899951577186584, |
| "learning_rate": 0.0001, |
| "loss": 1.6002, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.36604152712659077, |
| "grad_norm": 0.45602288842201233, |
| "learning_rate": 0.0001, |
| "loss": 1.6501, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.3663764233087743, |
| "grad_norm": 0.449912965297699, |
| "learning_rate": 0.0001, |
| "loss": 1.5524, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.3667113194909578, |
| "grad_norm": 0.4617339074611664, |
| "learning_rate": 0.0001, |
| "loss": 1.5734, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.36704621567314133, |
| "grad_norm": 0.4346519708633423, |
| "learning_rate": 0.0001, |
| "loss": 1.4779, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.36738111185532485, |
| "grad_norm": 0.4436924457550049, |
| "learning_rate": 0.0001, |
| "loss": 1.5651, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.36771600803750837, |
| "grad_norm": 0.4515039324760437, |
| "learning_rate": 0.0001, |
| "loss": 1.5545, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.3680509042196919, |
| "grad_norm": 0.44696515798568726, |
| "learning_rate": 0.0001, |
| "loss": 1.542, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.3683858004018754, |
| "grad_norm": 0.4515751600265503, |
| "learning_rate": 0.0001, |
| "loss": 1.5397, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.3687206965840589, |
| "grad_norm": 0.4478546977043152, |
| "learning_rate": 0.0001, |
| "loss": 1.5181, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.36905559276624245, |
| "grad_norm": 0.46708935499191284, |
| "learning_rate": 0.0001, |
| "loss": 1.4895, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.36939048894842597, |
| "grad_norm": 0.43863463401794434, |
| "learning_rate": 0.0001, |
| "loss": 1.5248, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.3697253851306095, |
| "grad_norm": 0.4555443227291107, |
| "learning_rate": 0.0001, |
| "loss": 1.5217, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.37006028131279306, |
| "grad_norm": 0.4301038086414337, |
| "learning_rate": 0.0001, |
| "loss": 1.4424, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.3703951774949766, |
| "grad_norm": 0.44846969842910767, |
| "learning_rate": 0.0001, |
| "loss": 1.523, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.3707300736771601, |
| "grad_norm": 0.45033925771713257, |
| "learning_rate": 0.0001, |
| "loss": 1.5273, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.3710649698593436, |
| "grad_norm": 0.4505354166030884, |
| "learning_rate": 0.0001, |
| "loss": 1.6396, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.37139986604152714, |
| "grad_norm": 0.4425196647644043, |
| "learning_rate": 0.0001, |
| "loss": 1.585, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.37173476222371066, |
| "grad_norm": 0.45227015018463135, |
| "learning_rate": 0.0001, |
| "loss": 1.6102, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.3720696584058942, |
| "grad_norm": 0.4454893469810486, |
| "learning_rate": 0.0001, |
| "loss": 1.6097, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.3724045545880777, |
| "grad_norm": 0.43742790818214417, |
| "learning_rate": 0.0001, |
| "loss": 1.5027, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.3727394507702612, |
| "grad_norm": 0.4249231517314911, |
| "learning_rate": 0.0001, |
| "loss": 1.5145, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.37307434695244474, |
| "grad_norm": 0.4511743485927582, |
| "learning_rate": 0.0001, |
| "loss": 1.625, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.37340924313462825, |
| "grad_norm": 0.4412231147289276, |
| "learning_rate": 0.0001, |
| "loss": 1.5336, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.3737441393168118, |
| "grad_norm": 0.4392704963684082, |
| "learning_rate": 0.0001, |
| "loss": 1.4292, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.3740790354989953, |
| "grad_norm": 0.43994712829589844, |
| "learning_rate": 0.0001, |
| "loss": 1.4992, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.3744139316811788, |
| "grad_norm": 0.44654780626296997, |
| "learning_rate": 0.0001, |
| "loss": 1.5576, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.37474882786336233, |
| "grad_norm": 0.4374602138996124, |
| "learning_rate": 0.0001, |
| "loss": 1.473, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.3750837240455459, |
| "grad_norm": 0.43292462825775146, |
| "learning_rate": 0.0001, |
| "loss": 1.5379, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.3754186202277294, |
| "grad_norm": 0.46613624691963196, |
| "learning_rate": 0.0001, |
| "loss": 1.5969, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.37575351640991295, |
| "grad_norm": 0.4482156038284302, |
| "learning_rate": 0.0001, |
| "loss": 1.6308, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.37608841259209647, |
| "grad_norm": 0.4478096067905426, |
| "learning_rate": 0.0001, |
| "loss": 1.5234, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.37642330877428, |
| "grad_norm": 0.4612812399864197, |
| "learning_rate": 0.0001, |
| "loss": 1.6014, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.3767582049564635, |
| "grad_norm": 0.45624470710754395, |
| "learning_rate": 0.0001, |
| "loss": 1.6727, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.377093101138647, |
| "grad_norm": 0.45116880536079407, |
| "learning_rate": 0.0001, |
| "loss": 1.5213, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.37742799732083054, |
| "grad_norm": 0.45031848549842834, |
| "learning_rate": 0.0001, |
| "loss": 1.5688, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.37776289350301406, |
| "grad_norm": 0.4345763921737671, |
| "learning_rate": 0.0001, |
| "loss": 1.5191, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.3780977896851976, |
| "grad_norm": 0.4490206837654114, |
| "learning_rate": 0.0001, |
| "loss": 1.5108, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.3784326858673811, |
| "grad_norm": 0.46614694595336914, |
| "learning_rate": 0.0001, |
| "loss": 1.5587, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.3787675820495646, |
| "grad_norm": 0.4247433543205261, |
| "learning_rate": 0.0001, |
| "loss": 1.4486, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.37910247823174814, |
| "grad_norm": 0.45231062173843384, |
| "learning_rate": 0.0001, |
| "loss": 1.5752, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.37943737441393166, |
| "grad_norm": 0.4566837251186371, |
| "learning_rate": 0.0001, |
| "loss": 1.6133, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.3797722705961152, |
| "grad_norm": 0.457768052816391, |
| "learning_rate": 0.0001, |
| "loss": 1.5689, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.38010716677829876, |
| "grad_norm": 0.4402942955493927, |
| "learning_rate": 0.0001, |
| "loss": 1.6279, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.3804420629604823, |
| "grad_norm": 0.4514990448951721, |
| "learning_rate": 0.0001, |
| "loss": 1.5486, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.3807769591426658, |
| "grad_norm": 0.45938149094581604, |
| "learning_rate": 0.0001, |
| "loss": 1.4448, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.3811118553248493, |
| "grad_norm": 0.4579198360443115, |
| "learning_rate": 0.0001, |
| "loss": 1.5811, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.38144675150703283, |
| "grad_norm": 0.43243205547332764, |
| "learning_rate": 0.0001, |
| "loss": 1.5189, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.38178164768921635, |
| "grad_norm": 0.45426496863365173, |
| "learning_rate": 0.0001, |
| "loss": 1.5627, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.38211654387139987, |
| "grad_norm": 0.4345931112766266, |
| "learning_rate": 0.0001, |
| "loss": 1.4466, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.3824514400535834, |
| "grad_norm": 0.44259166717529297, |
| "learning_rate": 0.0001, |
| "loss": 1.5348, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.3827863362357669, |
| "grad_norm": 0.4565470516681671, |
| "learning_rate": 0.0001, |
| "loss": 1.5584, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.38312123241795043, |
| "grad_norm": 0.44665977358818054, |
| "learning_rate": 0.0001, |
| "loss": 1.589, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.38345612860013395, |
| "grad_norm": 0.44836845993995667, |
| "learning_rate": 0.0001, |
| "loss": 1.5347, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.38379102478231747, |
| "grad_norm": 0.44463881850242615, |
| "learning_rate": 0.0001, |
| "loss": 1.476, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.384125920964501, |
| "grad_norm": 0.457633376121521, |
| "learning_rate": 0.0001, |
| "loss": 1.559, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.3844608171466845, |
| "grad_norm": 0.4234335720539093, |
| "learning_rate": 0.0001, |
| "loss": 1.4449, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.384795713328868, |
| "grad_norm": 0.4380183815956116, |
| "learning_rate": 0.0001, |
| "loss": 1.4661, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.38513060951105155, |
| "grad_norm": 0.4707789421081543, |
| "learning_rate": 0.0001, |
| "loss": 1.6386, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.3854655056932351, |
| "grad_norm": 0.4421239197254181, |
| "learning_rate": 0.0001, |
| "loss": 1.4635, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.38580040187541864, |
| "grad_norm": 0.47190219163894653, |
| "learning_rate": 0.0001, |
| "loss": 1.4755, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.38613529805760216, |
| "grad_norm": 0.4488900899887085, |
| "learning_rate": 0.0001, |
| "loss": 1.5538, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.3864701942397857, |
| "grad_norm": 0.46024009585380554, |
| "learning_rate": 0.0001, |
| "loss": 1.4989, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.3868050904219692, |
| "grad_norm": 0.45907169580459595, |
| "learning_rate": 0.0001, |
| "loss": 1.6402, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.3871399866041527, |
| "grad_norm": 0.4615232050418854, |
| "learning_rate": 0.0001, |
| "loss": 1.6111, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.38747488278633624, |
| "grad_norm": 0.4274507164955139, |
| "learning_rate": 0.0001, |
| "loss": 1.4771, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.38780977896851976, |
| "grad_norm": 0.46233120560646057, |
| "learning_rate": 0.0001, |
| "loss": 1.511, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.3881446751507033, |
| "grad_norm": 0.46118366718292236, |
| "learning_rate": 0.0001, |
| "loss": 1.5729, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.3884795713328868, |
| "grad_norm": 0.4777587056159973, |
| "learning_rate": 0.0001, |
| "loss": 1.656, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.3888144675150703, |
| "grad_norm": 0.4458993971347809, |
| "learning_rate": 0.0001, |
| "loss": 1.5991, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.38914936369725384, |
| "grad_norm": 0.4479345679283142, |
| "learning_rate": 0.0001, |
| "loss": 1.5968, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.38948425987943736, |
| "grad_norm": 0.45050573348999023, |
| "learning_rate": 0.0001, |
| "loss": 1.5441, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.3898191560616209, |
| "grad_norm": 0.45146119594573975, |
| "learning_rate": 0.0001, |
| "loss": 1.5685, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.3901540522438044, |
| "grad_norm": 0.46487465500831604, |
| "learning_rate": 0.0001, |
| "loss": 1.6563, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.39048894842598797, |
| "grad_norm": 0.45339757204055786, |
| "learning_rate": 0.0001, |
| "loss": 1.5304, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.3908238446081715, |
| "grad_norm": 0.4561135172843933, |
| "learning_rate": 0.0001, |
| "loss": 1.5212, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.391158740790355, |
| "grad_norm": 0.471395343542099, |
| "learning_rate": 0.0001, |
| "loss": 1.6656, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.39149363697253853, |
| "grad_norm": 0.4500054121017456, |
| "learning_rate": 0.0001, |
| "loss": 1.5474, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.39182853315472205, |
| "grad_norm": 0.4495108425617218, |
| "learning_rate": 0.0001, |
| "loss": 1.4906, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.39216342933690557, |
| "grad_norm": 0.4497644007205963, |
| "learning_rate": 0.0001, |
| "loss": 1.6199, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.3924983255190891, |
| "grad_norm": 0.47592660784721375, |
| "learning_rate": 0.0001, |
| "loss": 1.5653, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.3928332217012726, |
| "grad_norm": 0.4495856761932373, |
| "learning_rate": 0.0001, |
| "loss": 1.5728, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.3931681178834561, |
| "grad_norm": 0.4661826193332672, |
| "learning_rate": 0.0001, |
| "loss": 1.5641, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.39350301406563964, |
| "grad_norm": 0.4343692660331726, |
| "learning_rate": 0.0001, |
| "loss": 1.5582, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.39383791024782316, |
| "grad_norm": 0.48545411229133606, |
| "learning_rate": 0.0001, |
| "loss": 1.5754, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.3941728064300067, |
| "grad_norm": 0.44631296396255493, |
| "learning_rate": 0.0001, |
| "loss": 1.6401, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.3945077026121902, |
| "grad_norm": 0.46297982335090637, |
| "learning_rate": 0.0001, |
| "loss": 1.4894, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.3948425987943737, |
| "grad_norm": 0.435683935880661, |
| "learning_rate": 0.0001, |
| "loss": 1.5418, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.39517749497655724, |
| "grad_norm": 0.47171199321746826, |
| "learning_rate": 0.0001, |
| "loss": 1.5518, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.3955123911587408, |
| "grad_norm": 0.44476407766342163, |
| "learning_rate": 0.0001, |
| "loss": 1.5165, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.39584728734092434, |
| "grad_norm": 0.45694172382354736, |
| "learning_rate": 0.0001, |
| "loss": 1.6215, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.39618218352310786, |
| "grad_norm": 0.4570925533771515, |
| "learning_rate": 0.0001, |
| "loss": 1.543, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.3965170797052914, |
| "grad_norm": 0.4366303086280823, |
| "learning_rate": 0.0001, |
| "loss": 1.521, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.3968519758874749, |
| "grad_norm": 0.44927310943603516, |
| "learning_rate": 0.0001, |
| "loss": 1.5209, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.3971868720696584, |
| "grad_norm": 0.4475070536136627, |
| "learning_rate": 0.0001, |
| "loss": 1.53, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.39752176825184193, |
| "grad_norm": 0.4301213026046753, |
| "learning_rate": 0.0001, |
| "loss": 1.5712, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.39785666443402545, |
| "grad_norm": 0.4296334385871887, |
| "learning_rate": 0.0001, |
| "loss": 1.5227, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.398191560616209, |
| "grad_norm": 0.4464989900588989, |
| "learning_rate": 0.0001, |
| "loss": 1.553, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.3985264567983925, |
| "grad_norm": 0.4370761513710022, |
| "learning_rate": 0.0001, |
| "loss": 1.5375, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.398861352980576, |
| "grad_norm": 0.43171000480651855, |
| "learning_rate": 0.0001, |
| "loss": 1.4762, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.39919624916275953, |
| "grad_norm": 0.457441508769989, |
| "learning_rate": 0.0001, |
| "loss": 1.5028, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.39953114534494305, |
| "grad_norm": 0.4684367775917053, |
| "learning_rate": 0.0001, |
| "loss": 1.6496, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.39986604152712657, |
| "grad_norm": 0.4578244686126709, |
| "learning_rate": 0.0001, |
| "loss": 1.573, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.4002009377093101, |
| "grad_norm": 0.46412548422813416, |
| "learning_rate": 0.0001, |
| "loss": 1.4879, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.40053583389149366, |
| "grad_norm": 0.4387846291065216, |
| "learning_rate": 0.0001, |
| "loss": 1.5545, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.4008707300736772, |
| "grad_norm": 0.44225457310676575, |
| "learning_rate": 0.0001, |
| "loss": 1.5465, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.4012056262558607, |
| "grad_norm": 0.44463130831718445, |
| "learning_rate": 0.0001, |
| "loss": 1.5255, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.4015405224380442, |
| "grad_norm": 0.4457281231880188, |
| "learning_rate": 0.0001, |
| "loss": 1.5522, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.40187541862022774, |
| "grad_norm": 0.4476866126060486, |
| "learning_rate": 0.0001, |
| "loss": 1.4994, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.40221031480241126, |
| "grad_norm": 0.44953233003616333, |
| "learning_rate": 0.0001, |
| "loss": 1.5804, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.4025452109845948, |
| "grad_norm": 0.46008914709091187, |
| "learning_rate": 0.0001, |
| "loss": 1.679, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.4028801071667783, |
| "grad_norm": 0.4389602243900299, |
| "learning_rate": 0.0001, |
| "loss": 1.4782, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.4032150033489618, |
| "grad_norm": 0.43227729201316833, |
| "learning_rate": 0.0001, |
| "loss": 1.469, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.40354989953114534, |
| "grad_norm": 0.4598828852176666, |
| "learning_rate": 0.0001, |
| "loss": 1.6371, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.40388479571332886, |
| "grad_norm": 0.4646576941013336, |
| "learning_rate": 0.0001, |
| "loss": 1.597, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.4042196918955124, |
| "grad_norm": 0.444198876619339, |
| "learning_rate": 0.0001, |
| "loss": 1.5285, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.4045545880776959, |
| "grad_norm": 0.468888521194458, |
| "learning_rate": 0.0001, |
| "loss": 1.5654, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.4048894842598794, |
| "grad_norm": 0.4509347677230835, |
| "learning_rate": 0.0001, |
| "loss": 1.5588, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.40522438044206294, |
| "grad_norm": 0.463221937417984, |
| "learning_rate": 0.0001, |
| "loss": 1.6426, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.40555927662424646, |
| "grad_norm": 0.44848695397377014, |
| "learning_rate": 0.0001, |
| "loss": 1.5946, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.40589417280643003, |
| "grad_norm": 0.45708489418029785, |
| "learning_rate": 0.0001, |
| "loss": 1.6386, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.40622906898861355, |
| "grad_norm": 0.4351506233215332, |
| "learning_rate": 0.0001, |
| "loss": 1.4849, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.40656396517079707, |
| "grad_norm": 0.43319088220596313, |
| "learning_rate": 0.0001, |
| "loss": 1.6073, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.4068988613529806, |
| "grad_norm": 0.4341161549091339, |
| "learning_rate": 0.0001, |
| "loss": 1.5351, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.4072337575351641, |
| "grad_norm": 0.4327983558177948, |
| "learning_rate": 0.0001, |
| "loss": 1.5076, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.40756865371734763, |
| "grad_norm": 0.4561294615268707, |
| "learning_rate": 0.0001, |
| "loss": 1.5733, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.40790354989953115, |
| "grad_norm": 0.4407605230808258, |
| "learning_rate": 0.0001, |
| "loss": 1.5913, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.40823844608171467, |
| "grad_norm": 0.4305534362792969, |
| "learning_rate": 0.0001, |
| "loss": 1.5868, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.4085733422638982, |
| "grad_norm": 0.4355212152004242, |
| "learning_rate": 0.0001, |
| "loss": 1.5317, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.4089082384460817, |
| "grad_norm": 0.4315885007381439, |
| "learning_rate": 0.0001, |
| "loss": 1.5013, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.4092431346282652, |
| "grad_norm": 0.4414379596710205, |
| "learning_rate": 0.0001, |
| "loss": 1.4476, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.40957803081044875, |
| "grad_norm": 0.45077255368232727, |
| "learning_rate": 0.0001, |
| "loss": 1.4619, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.40991292699263226, |
| "grad_norm": 0.5059629678726196, |
| "learning_rate": 0.0001, |
| "loss": 1.5868, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.4102478231748158, |
| "grad_norm": 0.4635983109474182, |
| "learning_rate": 0.0001, |
| "loss": 1.6556, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.4105827193569993, |
| "grad_norm": 0.4328989088535309, |
| "learning_rate": 0.0001, |
| "loss": 1.4919, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.4109176155391829, |
| "grad_norm": 0.4631066918373108, |
| "learning_rate": 0.0001, |
| "loss": 1.3765, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.4112525117213664, |
| "grad_norm": 0.4278043508529663, |
| "learning_rate": 0.0001, |
| "loss": 1.4469, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.4115874079035499, |
| "grad_norm": 0.5048599243164062, |
| "learning_rate": 0.0001, |
| "loss": 1.5859, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.41192230408573344, |
| "grad_norm": 0.4466727674007416, |
| "learning_rate": 0.0001, |
| "loss": 1.5656, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.41225720026791696, |
| "grad_norm": 0.48620685935020447, |
| "learning_rate": 0.0001, |
| "loss": 1.5889, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.4125920964501005, |
| "grad_norm": 0.47321709990501404, |
| "learning_rate": 0.0001, |
| "loss": 1.6017, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.412926992632284, |
| "grad_norm": 0.48233163356781006, |
| "learning_rate": 0.0001, |
| "loss": 1.5773, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.4132618888144675, |
| "grad_norm": 0.4501434564590454, |
| "learning_rate": 0.0001, |
| "loss": 1.5323, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.41359678499665103, |
| "grad_norm": 0.45282772183418274, |
| "learning_rate": 0.0001, |
| "loss": 1.5687, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.41393168117883455, |
| "grad_norm": 0.4586126506328583, |
| "learning_rate": 0.0001, |
| "loss": 1.5614, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.4142665773610181, |
| "grad_norm": 0.4504382312297821, |
| "learning_rate": 0.0001, |
| "loss": 1.5353, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.4146014735432016, |
| "grad_norm": 0.46755629777908325, |
| "learning_rate": 0.0001, |
| "loss": 1.5325, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.4149363697253851, |
| "grad_norm": 0.45872950553894043, |
| "learning_rate": 0.0001, |
| "loss": 1.5826, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.41527126590756863, |
| "grad_norm": 0.47714489698410034, |
| "learning_rate": 0.0001, |
| "loss": 1.574, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.41560616208975215, |
| "grad_norm": 0.449758380651474, |
| "learning_rate": 0.0001, |
| "loss": 1.5127, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.4159410582719357, |
| "grad_norm": 0.4618908166885376, |
| "learning_rate": 0.0001, |
| "loss": 1.7078, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.41627595445411925, |
| "grad_norm": 0.43264150619506836, |
| "learning_rate": 0.0001, |
| "loss": 1.5085, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.41661085063630277, |
| "grad_norm": 0.4599592387676239, |
| "learning_rate": 0.0001, |
| "loss": 1.553, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.4169457468184863, |
| "grad_norm": 0.43902966380119324, |
| "learning_rate": 0.0001, |
| "loss": 1.5751, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.4172806430006698, |
| "grad_norm": 0.4508312940597534, |
| "learning_rate": 0.0001, |
| "loss": 1.5752, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.4176155391828533, |
| "grad_norm": 0.4534173309803009, |
| "learning_rate": 0.0001, |
| "loss": 1.5037, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.41795043536503684, |
| "grad_norm": 0.46478843688964844, |
| "learning_rate": 0.0001, |
| "loss": 1.6001, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.41828533154722036, |
| "grad_norm": 0.47987523674964905, |
| "learning_rate": 0.0001, |
| "loss": 1.564, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.4186202277294039, |
| "grad_norm": 0.46117353439331055, |
| "learning_rate": 0.0001, |
| "loss": 1.5695, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.4189551239115874, |
| "grad_norm": 0.46787649393081665, |
| "learning_rate": 0.0001, |
| "loss": 1.4925, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.4192900200937709, |
| "grad_norm": 0.4669802784919739, |
| "learning_rate": 0.0001, |
| "loss": 1.5902, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.41962491627595444, |
| "grad_norm": 0.4593794047832489, |
| "learning_rate": 0.0001, |
| "loss": 1.5026, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.41995981245813796, |
| "grad_norm": 0.4430108964443207, |
| "learning_rate": 0.0001, |
| "loss": 1.6007, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.4202947086403215, |
| "grad_norm": 0.48188650608062744, |
| "learning_rate": 0.0001, |
| "loss": 1.6276, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.420629604822505, |
| "grad_norm": 0.44512104988098145, |
| "learning_rate": 0.0001, |
| "loss": 1.476, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.4209645010046886, |
| "grad_norm": 0.46867334842681885, |
| "learning_rate": 0.0001, |
| "loss": 1.5168, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.4212993971868721, |
| "grad_norm": 0.4648856222629547, |
| "learning_rate": 0.0001, |
| "loss": 1.6594, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.4216342933690556, |
| "grad_norm": 0.47854331135749817, |
| "learning_rate": 0.0001, |
| "loss": 1.567, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.42196918955123913, |
| "grad_norm": 0.48470669984817505, |
| "learning_rate": 0.0001, |
| "loss": 1.6363, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.42230408573342265, |
| "grad_norm": 0.44366371631622314, |
| "learning_rate": 0.0001, |
| "loss": 1.5321, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.42263898191560617, |
| "grad_norm": 0.43455782532691956, |
| "learning_rate": 0.0001, |
| "loss": 1.4234, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.4229738780977897, |
| "grad_norm": 0.44860178232192993, |
| "learning_rate": 0.0001, |
| "loss": 1.4949, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.4233087742799732, |
| "grad_norm": 0.47595492005348206, |
| "learning_rate": 0.0001, |
| "loss": 1.6015, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.42364367046215673, |
| "grad_norm": 0.4563322961330414, |
| "learning_rate": 0.0001, |
| "loss": 1.5518, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.42397856664434025, |
| "grad_norm": 0.45181918144226074, |
| "learning_rate": 0.0001, |
| "loss": 1.6027, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.42431346282652377, |
| "grad_norm": 0.43826577067375183, |
| "learning_rate": 0.0001, |
| "loss": 1.5372, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.4246483590087073, |
| "grad_norm": 0.46303653717041016, |
| "learning_rate": 0.0001, |
| "loss": 1.6133, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.4249832551908908, |
| "grad_norm": 0.4417208731174469, |
| "learning_rate": 0.0001, |
| "loss": 1.6145, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.4253181513730743, |
| "grad_norm": 0.4449644684791565, |
| "learning_rate": 0.0001, |
| "loss": 1.531, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.42565304755525785, |
| "grad_norm": 0.4542076289653778, |
| "learning_rate": 0.0001, |
| "loss": 1.5358, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.42598794373744137, |
| "grad_norm": 0.4694463610649109, |
| "learning_rate": 0.0001, |
| "loss": 1.6805, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.42632283991962494, |
| "grad_norm": 0.4595602750778198, |
| "learning_rate": 0.0001, |
| "loss": 1.5962, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.42665773610180846, |
| "grad_norm": 0.4681914746761322, |
| "learning_rate": 0.0001, |
| "loss": 1.591, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.426992632283992, |
| "grad_norm": 0.4733419120311737, |
| "learning_rate": 0.0001, |
| "loss": 1.4529, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.4273275284661755, |
| "grad_norm": 0.43556496500968933, |
| "learning_rate": 0.0001, |
| "loss": 1.5598, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.427662424648359, |
| "grad_norm": 0.4413219094276428, |
| "learning_rate": 0.0001, |
| "loss": 1.4457, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.42799732083054254, |
| "grad_norm": 0.4566291272640228, |
| "learning_rate": 0.0001, |
| "loss": 1.5364, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.42833221701272606, |
| "grad_norm": 0.43846362829208374, |
| "learning_rate": 0.0001, |
| "loss": 1.5502, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.4286671131949096, |
| "grad_norm": 0.44239136576652527, |
| "learning_rate": 0.0001, |
| "loss": 1.5821, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.4290020093770931, |
| "grad_norm": 0.41309061646461487, |
| "learning_rate": 0.0001, |
| "loss": 1.3608, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.4293369055592766, |
| "grad_norm": 0.4513871371746063, |
| "learning_rate": 0.0001, |
| "loss": 1.6097, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.42967180174146014, |
| "grad_norm": 0.44489651918411255, |
| "learning_rate": 0.0001, |
| "loss": 1.5062, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.43000669792364365, |
| "grad_norm": 0.4357435405254364, |
| "learning_rate": 0.0001, |
| "loss": 1.4767, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.4303415941058272, |
| "grad_norm": 0.44091832637786865, |
| "learning_rate": 0.0001, |
| "loss": 1.5169, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.4306764902880107, |
| "grad_norm": 0.4526561498641968, |
| "learning_rate": 0.0001, |
| "loss": 1.5086, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.4310113864701942, |
| "grad_norm": 0.43664780259132385, |
| "learning_rate": 0.0001, |
| "loss": 1.5234, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.4313462826523778, |
| "grad_norm": 0.4524122178554535, |
| "learning_rate": 0.0001, |
| "loss": 1.5574, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.4316811788345613, |
| "grad_norm": 0.4236125946044922, |
| "learning_rate": 0.0001, |
| "loss": 1.5119, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.4320160750167448, |
| "grad_norm": 0.44944438338279724, |
| "learning_rate": 0.0001, |
| "loss": 1.6364, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.43235097119892835, |
| "grad_norm": 0.44755467772483826, |
| "learning_rate": 0.0001, |
| "loss": 1.5394, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.43268586738111187, |
| "grad_norm": 0.4241560697555542, |
| "learning_rate": 0.0001, |
| "loss": 1.4207, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.4330207635632954, |
| "grad_norm": 0.45893594622612, |
| "learning_rate": 0.0001, |
| "loss": 1.581, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.4333556597454789, |
| "grad_norm": 0.4530966877937317, |
| "learning_rate": 0.0001, |
| "loss": 1.6222, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.4336905559276624, |
| "grad_norm": 0.4758310616016388, |
| "learning_rate": 0.0001, |
| "loss": 1.651, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.43402545210984594, |
| "grad_norm": 0.442427396774292, |
| "learning_rate": 0.0001, |
| "loss": 1.6638, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.43436034829202946, |
| "grad_norm": 0.4493705928325653, |
| "learning_rate": 0.0001, |
| "loss": 1.4638, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.434695244474213, |
| "grad_norm": 0.44565629959106445, |
| "learning_rate": 0.0001, |
| "loss": 1.5027, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.4350301406563965, |
| "grad_norm": 0.44541066884994507, |
| "learning_rate": 0.0001, |
| "loss": 1.6266, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.43536503683858, |
| "grad_norm": 0.4457719624042511, |
| "learning_rate": 0.0001, |
| "loss": 1.5075, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.43569993302076354, |
| "grad_norm": 0.4656354486942291, |
| "learning_rate": 0.0001, |
| "loss": 1.6376, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.43603482920294706, |
| "grad_norm": 0.44514432549476624, |
| "learning_rate": 0.0001, |
| "loss": 1.5267, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.43636972538513064, |
| "grad_norm": 0.43724554777145386, |
| "learning_rate": 0.0001, |
| "loss": 1.5427, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.43670462156731416, |
| "grad_norm": 0.44671183824539185, |
| "learning_rate": 0.0001, |
| "loss": 1.5544, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.4370395177494977, |
| "grad_norm": 0.4274207353591919, |
| "learning_rate": 0.0001, |
| "loss": 1.4558, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.4373744139316812, |
| "grad_norm": 0.45309945940971375, |
| "learning_rate": 0.0001, |
| "loss": 1.5629, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.4377093101138647, |
| "grad_norm": 0.5383068323135376, |
| "learning_rate": 0.0001, |
| "loss": 1.5105, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.43804420629604823, |
| "grad_norm": 0.4550372064113617, |
| "learning_rate": 0.0001, |
| "loss": 1.587, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.43837910247823175, |
| "grad_norm": 0.4549542963504791, |
| "learning_rate": 0.0001, |
| "loss": 1.5023, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.43871399866041527, |
| "grad_norm": 0.49144187569618225, |
| "learning_rate": 0.0001, |
| "loss": 1.5724, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.4390488948425988, |
| "grad_norm": 0.4485637843608856, |
| "learning_rate": 0.0001, |
| "loss": 1.59, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.4393837910247823, |
| "grad_norm": 0.47622618079185486, |
| "learning_rate": 0.0001, |
| "loss": 1.6332, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.43971868720696583, |
| "grad_norm": 0.4740828573703766, |
| "learning_rate": 0.0001, |
| "loss": 1.5848, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.44005358338914935, |
| "grad_norm": 0.4865962862968445, |
| "learning_rate": 0.0001, |
| "loss": 1.6034, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.44038847957133287, |
| "grad_norm": 0.4601993262767792, |
| "learning_rate": 0.0001, |
| "loss": 1.5108, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.4407233757535164, |
| "grad_norm": 0.4529930055141449, |
| "learning_rate": 0.0001, |
| "loss": 1.5559, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.4410582719356999, |
| "grad_norm": 0.45661240816116333, |
| "learning_rate": 0.0001, |
| "loss": 1.5116, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.4413931681178835, |
| "grad_norm": 0.46067968010902405, |
| "learning_rate": 0.0001, |
| "loss": 1.5718, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.441728064300067, |
| "grad_norm": 0.4406924545764923, |
| "learning_rate": 0.0001, |
| "loss": 1.5766, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.4420629604822505, |
| "grad_norm": 0.41819578409194946, |
| "learning_rate": 0.0001, |
| "loss": 1.5329, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.44239785666443404, |
| "grad_norm": 0.4444178342819214, |
| "learning_rate": 0.0001, |
| "loss": 1.501, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.44273275284661756, |
| "grad_norm": 0.47189411520957947, |
| "learning_rate": 0.0001, |
| "loss": 1.5189, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.4430676490288011, |
| "grad_norm": 0.44008877873420715, |
| "learning_rate": 0.0001, |
| "loss": 1.5558, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.4434025452109846, |
| "grad_norm": 0.44241729378700256, |
| "learning_rate": 0.0001, |
| "loss": 1.4663, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.4437374413931681, |
| "grad_norm": 0.4437888264656067, |
| "learning_rate": 0.0001, |
| "loss": 1.5171, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.44407233757535164, |
| "grad_norm": 0.4380750358104706, |
| "learning_rate": 0.0001, |
| "loss": 1.4767, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.44440723375753516, |
| "grad_norm": 0.44850581884384155, |
| "learning_rate": 0.0001, |
| "loss": 1.6045, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.4447421299397187, |
| "grad_norm": 0.47014400362968445, |
| "learning_rate": 0.0001, |
| "loss": 1.5321, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.4450770261219022, |
| "grad_norm": 0.4461109936237335, |
| "learning_rate": 0.0001, |
| "loss": 1.5916, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.4454119223040857, |
| "grad_norm": 0.4577700197696686, |
| "learning_rate": 0.0001, |
| "loss": 1.5366, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.44574681848626924, |
| "grad_norm": 0.47515833377838135, |
| "learning_rate": 0.0001, |
| "loss": 1.6021, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.44608171466845276, |
| "grad_norm": 0.4587557911872864, |
| "learning_rate": 0.0001, |
| "loss": 1.5462, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.4464166108506363, |
| "grad_norm": 0.45300590991973877, |
| "learning_rate": 0.0001, |
| "loss": 1.4961, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.44675150703281985, |
| "grad_norm": 0.42595532536506653, |
| "learning_rate": 0.0001, |
| "loss": 1.4913, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.44708640321500337, |
| "grad_norm": 0.4388653635978699, |
| "learning_rate": 0.0001, |
| "loss": 1.4831, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.4474212993971869, |
| "grad_norm": 0.4430214464664459, |
| "learning_rate": 0.0001, |
| "loss": 1.5763, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.4477561955793704, |
| "grad_norm": 0.4435592591762543, |
| "learning_rate": 0.0001, |
| "loss": 1.5364, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.44809109176155393, |
| "grad_norm": 0.4518854320049286, |
| "learning_rate": 0.0001, |
| "loss": 1.5821, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.44842598794373745, |
| "grad_norm": 0.4529475271701813, |
| "learning_rate": 0.0001, |
| "loss": 1.5938, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.44876088412592097, |
| "grad_norm": 0.4599657654762268, |
| "learning_rate": 0.0001, |
| "loss": 1.5055, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.4490957803081045, |
| "grad_norm": 0.46153104305267334, |
| "learning_rate": 0.0001, |
| "loss": 1.5245, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.449430676490288, |
| "grad_norm": 0.4551970064640045, |
| "learning_rate": 0.0001, |
| "loss": 1.5615, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.4497655726724715, |
| "grad_norm": 0.4437796473503113, |
| "learning_rate": 0.0001, |
| "loss": 1.5732, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.45010046885465504, |
| "grad_norm": 0.4721779227256775, |
| "learning_rate": 0.0001, |
| "loss": 1.63, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.45043536503683856, |
| "grad_norm": 0.45753568410873413, |
| "learning_rate": 0.0001, |
| "loss": 1.5609, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.4507702612190221, |
| "grad_norm": 0.4599592983722687, |
| "learning_rate": 0.0001, |
| "loss": 1.5068, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.4511051574012056, |
| "grad_norm": 0.45269471406936646, |
| "learning_rate": 0.0001, |
| "loss": 1.5497, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.4514400535833891, |
| "grad_norm": 0.44746312499046326, |
| "learning_rate": 0.0001, |
| "loss": 1.4903, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.4517749497655727, |
| "grad_norm": 0.4894854724407196, |
| "learning_rate": 0.0001, |
| "loss": 1.635, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.4521098459477562, |
| "grad_norm": 0.4542970359325409, |
| "learning_rate": 0.0001, |
| "loss": 1.5604, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.45244474212993974, |
| "grad_norm": 0.4588306248188019, |
| "learning_rate": 0.0001, |
| "loss": 1.5299, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.45277963831212326, |
| "grad_norm": 0.44788745045661926, |
| "learning_rate": 0.0001, |
| "loss": 1.5458, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.4531145344943068, |
| "grad_norm": 0.45654165744781494, |
| "learning_rate": 0.0001, |
| "loss": 1.555, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.4534494306764903, |
| "grad_norm": 0.44824379682540894, |
| "learning_rate": 0.0001, |
| "loss": 1.5576, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.4537843268586738, |
| "grad_norm": 0.446819543838501, |
| "learning_rate": 0.0001, |
| "loss": 1.4559, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.45411922304085733, |
| "grad_norm": 0.4417940378189087, |
| "learning_rate": 0.0001, |
| "loss": 1.3853, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.45445411922304085, |
| "grad_norm": 0.43769514560699463, |
| "learning_rate": 0.0001, |
| "loss": 1.5094, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.4547890154052244, |
| "grad_norm": 0.4573988914489746, |
| "learning_rate": 0.0001, |
| "loss": 1.5601, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.4551239115874079, |
| "grad_norm": 0.4574551582336426, |
| "learning_rate": 0.0001, |
| "loss": 1.6729, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.4554588077695914, |
| "grad_norm": 0.4588263928890228, |
| "learning_rate": 0.0001, |
| "loss": 1.5916, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.45579370395177493, |
| "grad_norm": 0.4549427032470703, |
| "learning_rate": 0.0001, |
| "loss": 1.5437, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.45612860013395845, |
| "grad_norm": 0.4670909643173218, |
| "learning_rate": 0.0001, |
| "loss": 1.5125, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.45646349631614197, |
| "grad_norm": 0.45247015357017517, |
| "learning_rate": 0.0001, |
| "loss": 1.5788, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.45679839249832555, |
| "grad_norm": 0.4637114703655243, |
| "learning_rate": 0.0001, |
| "loss": 1.6297, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.45713328868050906, |
| "grad_norm": 0.4805709719657898, |
| "learning_rate": 0.0001, |
| "loss": 1.6032, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.4574681848626926, |
| "grad_norm": 0.44155606627464294, |
| "learning_rate": 0.0001, |
| "loss": 1.486, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.4578030810448761, |
| "grad_norm": 0.46776220202445984, |
| "learning_rate": 0.0001, |
| "loss": 1.5869, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.4581379772270596, |
| "grad_norm": 0.47851744294166565, |
| "learning_rate": 0.0001, |
| "loss": 1.5728, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.45847287340924314, |
| "grad_norm": 0.4378662407398224, |
| "learning_rate": 0.0001, |
| "loss": 1.5544, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.45880776959142666, |
| "grad_norm": 0.4366649389266968, |
| "learning_rate": 0.0001, |
| "loss": 1.455, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.4591426657736102, |
| "grad_norm": 0.4515934884548187, |
| "learning_rate": 0.0001, |
| "loss": 1.545, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.4594775619557937, |
| "grad_norm": 0.4429531395435333, |
| "learning_rate": 0.0001, |
| "loss": 1.4451, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.4598124581379772, |
| "grad_norm": 0.4400520920753479, |
| "learning_rate": 0.0001, |
| "loss": 1.4532, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.46014735432016074, |
| "grad_norm": 0.4494495987892151, |
| "learning_rate": 0.0001, |
| "loss": 1.4642, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.46048225050234426, |
| "grad_norm": 0.447348415851593, |
| "learning_rate": 0.0001, |
| "loss": 1.5466, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.4608171466845278, |
| "grad_norm": 0.4689560532569885, |
| "learning_rate": 0.0001, |
| "loss": 1.62, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.4611520428667113, |
| "grad_norm": 0.4379405379295349, |
| "learning_rate": 0.0001, |
| "loss": 1.4164, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.4614869390488948, |
| "grad_norm": 0.4504340589046478, |
| "learning_rate": 0.0001, |
| "loss": 1.5672, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.4618218352310784, |
| "grad_norm": 0.466804176568985, |
| "learning_rate": 0.0001, |
| "loss": 1.4973, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.4621567314132619, |
| "grad_norm": 0.45532482862472534, |
| "learning_rate": 0.0001, |
| "loss": 1.5122, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.46249162759544543, |
| "grad_norm": 0.4531165361404419, |
| "learning_rate": 0.0001, |
| "loss": 1.5097, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.46282652377762895, |
| "grad_norm": 0.44292378425598145, |
| "learning_rate": 0.0001, |
| "loss": 1.5498, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.46316141995981247, |
| "grad_norm": 0.4559877812862396, |
| "learning_rate": 0.0001, |
| "loss": 1.5847, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.463496316141996, |
| "grad_norm": 0.4499688744544983, |
| "learning_rate": 0.0001, |
| "loss": 1.5498, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.4638312123241795, |
| "grad_norm": 0.46169179677963257, |
| "learning_rate": 0.0001, |
| "loss": 1.5752, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.46416610850636303, |
| "grad_norm": 0.4544617235660553, |
| "learning_rate": 0.0001, |
| "loss": 1.5089, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.46450100468854655, |
| "grad_norm": 0.4471500515937805, |
| "learning_rate": 0.0001, |
| "loss": 1.5842, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.46483590087073007, |
| "grad_norm": 0.490632027387619, |
| "learning_rate": 0.0001, |
| "loss": 1.6259, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.4651707970529136, |
| "grad_norm": 0.46217668056488037, |
| "learning_rate": 0.0001, |
| "loss": 1.5832, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.4655056932350971, |
| "grad_norm": 0.45851930975914, |
| "learning_rate": 0.0001, |
| "loss": 1.4738, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.4658405894172806, |
| "grad_norm": 0.43563809990882874, |
| "learning_rate": 0.0001, |
| "loss": 1.4767, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.46617548559946415, |
| "grad_norm": 0.4546947181224823, |
| "learning_rate": 0.0001, |
| "loss": 1.5412, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.46651038178164767, |
| "grad_norm": 0.456242173910141, |
| "learning_rate": 0.0001, |
| "loss": 1.5476, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.46684527796383124, |
| "grad_norm": 0.4377875328063965, |
| "learning_rate": 0.0001, |
| "loss": 1.4826, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.46718017414601476, |
| "grad_norm": 0.4969845414161682, |
| "learning_rate": 0.0001, |
| "loss": 1.6719, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.4675150703281983, |
| "grad_norm": 0.4470406770706177, |
| "learning_rate": 0.0001, |
| "loss": 1.5464, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.4678499665103818, |
| "grad_norm": 0.47641536593437195, |
| "learning_rate": 0.0001, |
| "loss": 1.6137, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.4681848626925653, |
| "grad_norm": 0.44054463505744934, |
| "learning_rate": 0.0001, |
| "loss": 1.4965, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.46851975887474884, |
| "grad_norm": 0.4915468096733093, |
| "learning_rate": 0.0001, |
| "loss": 1.5505, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.46885465505693236, |
| "grad_norm": 0.43659910559654236, |
| "learning_rate": 0.0001, |
| "loss": 1.415, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.4691895512391159, |
| "grad_norm": 0.47145307064056396, |
| "learning_rate": 0.0001, |
| "loss": 1.5463, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.4695244474212994, |
| "grad_norm": 0.470012366771698, |
| "learning_rate": 0.0001, |
| "loss": 1.632, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.4698593436034829, |
| "grad_norm": 0.4900912642478943, |
| "learning_rate": 0.0001, |
| "loss": 1.5168, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.47019423978566643, |
| "grad_norm": 0.5177119374275208, |
| "learning_rate": 0.0001, |
| "loss": 1.5512, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.47052913596784995, |
| "grad_norm": 0.5017977952957153, |
| "learning_rate": 0.0001, |
| "loss": 1.4807, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.4708640321500335, |
| "grad_norm": 0.4529106616973877, |
| "learning_rate": 0.0001, |
| "loss": 1.5158, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.471198928332217, |
| "grad_norm": 0.4319479763507843, |
| "learning_rate": 0.0001, |
| "loss": 1.5249, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.4715338245144005, |
| "grad_norm": 0.4647986590862274, |
| "learning_rate": 0.0001, |
| "loss": 1.4653, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.47186872069658403, |
| "grad_norm": 0.458469420671463, |
| "learning_rate": 0.0001, |
| "loss": 1.4954, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.4722036168787676, |
| "grad_norm": 0.50237637758255, |
| "learning_rate": 0.0001, |
| "loss": 1.6159, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.4725385130609511, |
| "grad_norm": 0.46038538217544556, |
| "learning_rate": 0.0001, |
| "loss": 1.5783, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.47287340924313465, |
| "grad_norm": 0.46736159920692444, |
| "learning_rate": 0.0001, |
| "loss": 1.6369, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.47320830542531817, |
| "grad_norm": 0.47504475712776184, |
| "learning_rate": 0.0001, |
| "loss": 1.5477, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.4735432016075017, |
| "grad_norm": 0.4639604687690735, |
| "learning_rate": 0.0001, |
| "loss": 1.4814, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.4738780977896852, |
| "grad_norm": 0.4924888014793396, |
| "learning_rate": 0.0001, |
| "loss": 1.7246, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.4742129939718687, |
| "grad_norm": 0.4379710257053375, |
| "learning_rate": 0.0001, |
| "loss": 1.4826, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.47454789015405224, |
| "grad_norm": 0.4734916687011719, |
| "learning_rate": 0.0001, |
| "loss": 1.5407, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.47488278633623576, |
| "grad_norm": 0.4327939748764038, |
| "learning_rate": 0.0001, |
| "loss": 1.5339, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.4752176825184193, |
| "grad_norm": 0.4371609091758728, |
| "learning_rate": 0.0001, |
| "loss": 1.5058, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.4755525787006028, |
| "grad_norm": 0.43390345573425293, |
| "learning_rate": 0.0001, |
| "loss": 1.5108, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.4758874748827863, |
| "grad_norm": 0.44707202911376953, |
| "learning_rate": 0.0001, |
| "loss": 1.5266, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.47622237106496984, |
| "grad_norm": 0.4597282111644745, |
| "learning_rate": 0.0001, |
| "loss": 1.5125, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.47655726724715336, |
| "grad_norm": 0.495181143283844, |
| "learning_rate": 0.0001, |
| "loss": 1.5363, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.4768921634293369, |
| "grad_norm": 0.447334349155426, |
| "learning_rate": 0.0001, |
| "loss": 1.5074, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.47722705961152045, |
| "grad_norm": 0.450639545917511, |
| "learning_rate": 0.0001, |
| "loss": 1.5462, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.477561955793704, |
| "grad_norm": 0.4501533806324005, |
| "learning_rate": 0.0001, |
| "loss": 1.4815, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.4778968519758875, |
| "grad_norm": 0.4699714779853821, |
| "learning_rate": 0.0001, |
| "loss": 1.5042, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.478231748158071, |
| "grad_norm": 0.4734054505825043, |
| "learning_rate": 0.0001, |
| "loss": 1.6161, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.47856664434025453, |
| "grad_norm": 0.46738970279693604, |
| "learning_rate": 0.0001, |
| "loss": 1.5816, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.47890154052243805, |
| "grad_norm": 0.43620428442955017, |
| "learning_rate": 0.0001, |
| "loss": 1.5605, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.47923643670462157, |
| "grad_norm": 0.48260974884033203, |
| "learning_rate": 0.0001, |
| "loss": 1.6668, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.4795713328868051, |
| "grad_norm": 0.4639464318752289, |
| "learning_rate": 0.0001, |
| "loss": 1.5394, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.4799062290689886, |
| "grad_norm": 0.456023633480072, |
| "learning_rate": 0.0001, |
| "loss": 1.5617, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.48024112525117213, |
| "grad_norm": 0.4547133147716522, |
| "learning_rate": 0.0001, |
| "loss": 1.5456, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.48057602143335565, |
| "grad_norm": 0.4591653048992157, |
| "learning_rate": 0.0001, |
| "loss": 1.641, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.48091091761553917, |
| "grad_norm": 0.4386260211467743, |
| "learning_rate": 0.0001, |
| "loss": 1.391, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.4812458137977227, |
| "grad_norm": 0.48630258440971375, |
| "learning_rate": 0.0001, |
| "loss": 1.5811, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.4815807099799062, |
| "grad_norm": 0.46854186058044434, |
| "learning_rate": 0.0001, |
| "loss": 1.6036, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.4819156061620897, |
| "grad_norm": 0.450717955827713, |
| "learning_rate": 0.0001, |
| "loss": 1.4546, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.4822505023442733, |
| "grad_norm": 0.454939067363739, |
| "learning_rate": 0.0001, |
| "loss": 1.5829, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.4825853985264568, |
| "grad_norm": 0.45046836137771606, |
| "learning_rate": 0.0001, |
| "loss": 1.4901, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.48292029470864034, |
| "grad_norm": 0.4793626666069031, |
| "learning_rate": 0.0001, |
| "loss": 1.5315, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.48325519089082386, |
| "grad_norm": 0.4342884421348572, |
| "learning_rate": 0.0001, |
| "loss": 1.4576, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.4835900870730074, |
| "grad_norm": 0.47625261545181274, |
| "learning_rate": 0.0001, |
| "loss": 1.4951, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.4839249832551909, |
| "grad_norm": 0.44386520981788635, |
| "learning_rate": 0.0001, |
| "loss": 1.5058, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.4842598794373744, |
| "grad_norm": 0.4521249532699585, |
| "learning_rate": 0.0001, |
| "loss": 1.5013, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.48459477561955794, |
| "grad_norm": 0.46549564599990845, |
| "learning_rate": 0.0001, |
| "loss": 1.499, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.48492967180174146, |
| "grad_norm": 0.4664047956466675, |
| "learning_rate": 0.0001, |
| "loss": 1.4905, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.485264567983925, |
| "grad_norm": 0.45398756861686707, |
| "learning_rate": 0.0001, |
| "loss": 1.5065, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.4855994641661085, |
| "grad_norm": 0.46122342348098755, |
| "learning_rate": 0.0001, |
| "loss": 1.4866, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.485934360348292, |
| "grad_norm": 0.43852072954177856, |
| "learning_rate": 0.0001, |
| "loss": 1.4706, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.48626925653047554, |
| "grad_norm": 0.4522353410720825, |
| "learning_rate": 0.0001, |
| "loss": 1.5468, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.48660415271265905, |
| "grad_norm": 0.455821692943573, |
| "learning_rate": 0.0001, |
| "loss": 1.61, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.4869390488948426, |
| "grad_norm": 0.4561960697174072, |
| "learning_rate": 0.0001, |
| "loss": 1.5701, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.48727394507702615, |
| "grad_norm": 0.45001938939094543, |
| "learning_rate": 0.0001, |
| "loss": 1.4908, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.48760884125920967, |
| "grad_norm": 0.4559260904788971, |
| "learning_rate": 0.0001, |
| "loss": 1.578, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.4879437374413932, |
| "grad_norm": 0.4333287477493286, |
| "learning_rate": 0.0001, |
| "loss": 1.4492, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.4882786336235767, |
| "grad_norm": 0.4796982407569885, |
| "learning_rate": 0.0001, |
| "loss": 1.6042, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.4886135298057602, |
| "grad_norm": 0.43639075756073, |
| "learning_rate": 0.0001, |
| "loss": 1.4885, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.48894842598794375, |
| "grad_norm": 0.4537317454814911, |
| "learning_rate": 0.0001, |
| "loss": 1.5781, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.48928332217012727, |
| "grad_norm": 0.4368795156478882, |
| "learning_rate": 0.0001, |
| "loss": 1.4805, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.4896182183523108, |
| "grad_norm": 0.43467339873313904, |
| "learning_rate": 0.0001, |
| "loss": 1.4288, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.4899531145344943, |
| "grad_norm": 0.49252694845199585, |
| "learning_rate": 0.0001, |
| "loss": 1.5542, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.4902880107166778, |
| "grad_norm": 0.4549628496170044, |
| "learning_rate": 0.0001, |
| "loss": 1.5405, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.49062290689886134, |
| "grad_norm": 0.4875522255897522, |
| "learning_rate": 0.0001, |
| "loss": 1.6199, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.49095780308104486, |
| "grad_norm": 0.4471842348575592, |
| "learning_rate": 0.0001, |
| "loss": 1.5369, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.4912926992632284, |
| "grad_norm": 0.45362889766693115, |
| "learning_rate": 0.0001, |
| "loss": 1.4544, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.4916275954454119, |
| "grad_norm": 0.4525300860404968, |
| "learning_rate": 0.0001, |
| "loss": 1.4715, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.4919624916275954, |
| "grad_norm": 0.46428170800209045, |
| "learning_rate": 0.0001, |
| "loss": 1.5975, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.49229738780977894, |
| "grad_norm": 0.4399421811103821, |
| "learning_rate": 0.0001, |
| "loss": 1.4805, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.4926322839919625, |
| "grad_norm": 0.4486546516418457, |
| "learning_rate": 0.0001, |
| "loss": 1.4835, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.49296718017414604, |
| "grad_norm": 0.4494679868221283, |
| "learning_rate": 0.0001, |
| "loss": 1.507, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.49330207635632956, |
| "grad_norm": 0.49521908164024353, |
| "learning_rate": 0.0001, |
| "loss": 1.6279, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.4936369725385131, |
| "grad_norm": 0.4433446526527405, |
| "learning_rate": 0.0001, |
| "loss": 1.5223, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.4939718687206966, |
| "grad_norm": 0.47312355041503906, |
| "learning_rate": 0.0001, |
| "loss": 1.5241, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.4943067649028801, |
| "grad_norm": 0.42530277371406555, |
| "learning_rate": 0.0001, |
| "loss": 1.4212, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.49464166108506363, |
| "grad_norm": 0.4690539240837097, |
| "learning_rate": 0.0001, |
| "loss": 1.4736, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.49497655726724715, |
| "grad_norm": 0.44447723031044006, |
| "learning_rate": 0.0001, |
| "loss": 1.5079, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.49531145344943067, |
| "grad_norm": 0.4463139772415161, |
| "learning_rate": 0.0001, |
| "loss": 1.4826, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.4956463496316142, |
| "grad_norm": 0.45554107427597046, |
| "learning_rate": 0.0001, |
| "loss": 1.5286, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.4959812458137977, |
| "grad_norm": 0.45664557814598083, |
| "learning_rate": 0.0001, |
| "loss": 1.5602, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.49631614199598123, |
| "grad_norm": 0.46041151881217957, |
| "learning_rate": 0.0001, |
| "loss": 1.5732, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.49665103817816475, |
| "grad_norm": 0.46366506814956665, |
| "learning_rate": 0.0001, |
| "loss": 1.497, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.49698593436034827, |
| "grad_norm": 0.46002379059791565, |
| "learning_rate": 0.0001, |
| "loss": 1.5219, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.4973208305425318, |
| "grad_norm": 0.4685385525226593, |
| "learning_rate": 0.0001, |
| "loss": 1.5126, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.49765572672471536, |
| "grad_norm": 0.4530273973941803, |
| "learning_rate": 0.0001, |
| "loss": 1.5677, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.4979906229068989, |
| "grad_norm": 0.450147807598114, |
| "learning_rate": 0.0001, |
| "loss": 1.5109, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.4983255190890824, |
| "grad_norm": 0.4494372010231018, |
| "learning_rate": 0.0001, |
| "loss": 1.5489, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.4986604152712659, |
| "grad_norm": 0.43944913148880005, |
| "learning_rate": 0.0001, |
| "loss": 1.5326, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.49899531145344944, |
| "grad_norm": 0.4616487920284271, |
| "learning_rate": 0.0001, |
| "loss": 1.4999, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.49933020763563296, |
| "grad_norm": 0.4538317918777466, |
| "learning_rate": 0.0001, |
| "loss": 1.4909, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.4996651038178165, |
| "grad_norm": 0.45227381587028503, |
| "learning_rate": 0.0001, |
| "loss": 1.4275, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.44475820660591125, |
| "learning_rate": 0.0001, |
| "loss": 1.4533, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.5003348961821835, |
| "grad_norm": 0.46907156705856323, |
| "learning_rate": 0.0001, |
| "loss": 1.5041, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.500669792364367, |
| "grad_norm": 0.4697141647338867, |
| "learning_rate": 0.0001, |
| "loss": 1.5366, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.5010046885465506, |
| "grad_norm": 0.4554111361503601, |
| "learning_rate": 0.0001, |
| "loss": 1.5005, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.5013395847287341, |
| "grad_norm": 0.44460198283195496, |
| "learning_rate": 0.0001, |
| "loss": 1.5039, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.5016744809109176, |
| "grad_norm": 0.46140316128730774, |
| "learning_rate": 0.0001, |
| "loss": 1.5757, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.5020093770931011, |
| "grad_norm": 0.4617026746273041, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.5023442732752846, |
| "grad_norm": 0.4561994671821594, |
| "learning_rate": 0.0001, |
| "loss": 1.4872, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.5026791694574682, |
| "grad_norm": 0.45759817957878113, |
| "learning_rate": 0.0001, |
| "loss": 1.5807, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.5030140656396517, |
| "grad_norm": 0.45219355821609497, |
| "learning_rate": 0.0001, |
| "loss": 1.4733, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.5033489618218352, |
| "grad_norm": 0.4509015679359436, |
| "learning_rate": 0.0001, |
| "loss": 1.5254, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.5036838580040187, |
| "grad_norm": 0.4564638137817383, |
| "learning_rate": 0.0001, |
| "loss": 1.5299, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.5040187541862022, |
| "grad_norm": 0.45715585350990295, |
| "learning_rate": 0.0001, |
| "loss": 1.5694, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.5043536503683858, |
| "grad_norm": 0.45385318994522095, |
| "learning_rate": 0.0001, |
| "loss": 1.4992, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.5046885465505693, |
| "grad_norm": 0.44898638129234314, |
| "learning_rate": 0.0001, |
| "loss": 1.4665, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.5050234427327528, |
| "grad_norm": 0.46023446321487427, |
| "learning_rate": 0.0001, |
| "loss": 1.4715, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.5053583389149364, |
| "grad_norm": 0.44878068566322327, |
| "learning_rate": 0.0001, |
| "loss": 1.492, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.5056932350971199, |
| "grad_norm": 0.46955686807632446, |
| "learning_rate": 0.0001, |
| "loss": 1.5671, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.5060281312793035, |
| "grad_norm": 0.46982455253601074, |
| "learning_rate": 0.0001, |
| "loss": 1.4894, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.506363027461487, |
| "grad_norm": 0.45919641852378845, |
| "learning_rate": 0.0001, |
| "loss": 1.5646, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.5066979236436705, |
| "grad_norm": 0.47328880429267883, |
| "learning_rate": 0.0001, |
| "loss": 1.6072, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.507032819825854, |
| "grad_norm": 0.4526023268699646, |
| "learning_rate": 0.0001, |
| "loss": 1.501, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.5073677160080375, |
| "grad_norm": 0.46821823716163635, |
| "learning_rate": 0.0001, |
| "loss": 1.5246, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.5077026121902211, |
| "grad_norm": 0.45301198959350586, |
| "learning_rate": 0.0001, |
| "loss": 1.5364, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.5080375083724046, |
| "grad_norm": 0.46978890895843506, |
| "learning_rate": 0.0001, |
| "loss": 1.5137, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.5083724045545881, |
| "grad_norm": 0.4836040735244751, |
| "learning_rate": 0.0001, |
| "loss": 1.5262, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.5087073007367716, |
| "grad_norm": 0.4945816099643707, |
| "learning_rate": 0.0001, |
| "loss": 1.5042, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.5090421969189551, |
| "grad_norm": 0.46678590774536133, |
| "learning_rate": 0.0001, |
| "loss": 1.4932, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.5093770931011387, |
| "grad_norm": 0.4569990038871765, |
| "learning_rate": 0.0001, |
| "loss": 1.5965, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.5097119892833222, |
| "grad_norm": 0.4823521375656128, |
| "learning_rate": 0.0001, |
| "loss": 1.5114, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.5100468854655057, |
| "grad_norm": 0.44416478276252747, |
| "learning_rate": 0.0001, |
| "loss": 1.4804, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.5103817816476892, |
| "grad_norm": 0.46118879318237305, |
| "learning_rate": 0.0001, |
| "loss": 1.5135, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.5107166778298727, |
| "grad_norm": 0.4893559515476227, |
| "learning_rate": 0.0001, |
| "loss": 1.6105, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.5110515740120563, |
| "grad_norm": 0.4696842133998871, |
| "learning_rate": 0.0001, |
| "loss": 1.5501, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.5113864701942398, |
| "grad_norm": 0.5141232013702393, |
| "learning_rate": 0.0001, |
| "loss": 1.6102, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.5117213663764233, |
| "grad_norm": 0.47689032554626465, |
| "learning_rate": 0.0001, |
| "loss": 1.6108, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.5120562625586068, |
| "grad_norm": 0.47305381298065186, |
| "learning_rate": 0.0001, |
| "loss": 1.6228, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.5123911587407903, |
| "grad_norm": 0.4634510576725006, |
| "learning_rate": 0.0001, |
| "loss": 1.5599, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.5127260549229739, |
| "grad_norm": 0.466594398021698, |
| "learning_rate": 0.0001, |
| "loss": 1.5349, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.5130609511051574, |
| "grad_norm": 0.47930100560188293, |
| "learning_rate": 0.0001, |
| "loss": 1.5236, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.5133958472873409, |
| "grad_norm": 0.44495946168899536, |
| "learning_rate": 0.0001, |
| "loss": 1.5767, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.5137307434695244, |
| "grad_norm": 0.4560399651527405, |
| "learning_rate": 0.0001, |
| "loss": 1.512, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.5140656396517079, |
| "grad_norm": 0.47690311074256897, |
| "learning_rate": 0.0001, |
| "loss": 1.5891, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.5144005358338914, |
| "grad_norm": 0.4491158425807953, |
| "learning_rate": 0.0001, |
| "loss": 1.5635, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.514735432016075, |
| "grad_norm": 0.4559384882450104, |
| "learning_rate": 0.0001, |
| "loss": 1.6068, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.5150703281982585, |
| "grad_norm": 0.4365287721157074, |
| "learning_rate": 0.0001, |
| "loss": 1.4712, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.5154052243804421, |
| "grad_norm": 0.48256635665893555, |
| "learning_rate": 0.0001, |
| "loss": 1.6654, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.5157401205626256, |
| "grad_norm": 0.4454759955406189, |
| "learning_rate": 0.0001, |
| "loss": 1.5364, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.5160750167448092, |
| "grad_norm": 0.46772095561027527, |
| "learning_rate": 0.0001, |
| "loss": 1.442, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.5164099129269927, |
| "grad_norm": 0.4505743682384491, |
| "learning_rate": 0.0001, |
| "loss": 1.452, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.5167448091091762, |
| "grad_norm": 0.516400158405304, |
| "learning_rate": 0.0001, |
| "loss": 1.5716, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.5170797052913597, |
| "grad_norm": 0.48185044527053833, |
| "learning_rate": 0.0001, |
| "loss": 1.6428, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.5174146014735432, |
| "grad_norm": 0.5100380778312683, |
| "learning_rate": 0.0001, |
| "loss": 1.6228, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.5177494976557268, |
| "grad_norm": 0.4634498655796051, |
| "learning_rate": 0.0001, |
| "loss": 1.5288, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.5180843938379103, |
| "grad_norm": 0.466386616230011, |
| "learning_rate": 0.0001, |
| "loss": 1.6542, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.5184192900200938, |
| "grad_norm": 0.483235627412796, |
| "learning_rate": 0.0001, |
| "loss": 1.6404, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.5187541862022773, |
| "grad_norm": 0.4495801627635956, |
| "learning_rate": 0.0001, |
| "loss": 1.4987, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.5190890823844608, |
| "grad_norm": 0.4899047017097473, |
| "learning_rate": 0.0001, |
| "loss": 1.5573, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.5194239785666444, |
| "grad_norm": 0.43485280871391296, |
| "learning_rate": 0.0001, |
| "loss": 1.5082, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.5197588747488279, |
| "grad_norm": 0.47759371995925903, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.5200937709310114, |
| "grad_norm": 0.44978150725364685, |
| "learning_rate": 0.0001, |
| "loss": 1.5328, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.5204286671131949, |
| "grad_norm": 0.46513211727142334, |
| "learning_rate": 0.0001, |
| "loss": 1.6004, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.5207635632953784, |
| "grad_norm": 0.45478302240371704, |
| "learning_rate": 0.0001, |
| "loss": 1.5036, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.521098459477562, |
| "grad_norm": 0.46547824144363403, |
| "learning_rate": 0.0001, |
| "loss": 1.5354, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.5214333556597455, |
| "grad_norm": 0.44873952865600586, |
| "learning_rate": 0.0001, |
| "loss": 1.5017, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.521768251841929, |
| "grad_norm": 0.430286705493927, |
| "learning_rate": 0.0001, |
| "loss": 1.4757, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.5221031480241125, |
| "grad_norm": 0.44566696882247925, |
| "learning_rate": 0.0001, |
| "loss": 1.5695, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.522438044206296, |
| "grad_norm": 0.4625225067138672, |
| "learning_rate": 0.0001, |
| "loss": 1.6006, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.5227729403884795, |
| "grad_norm": 0.45360034704208374, |
| "learning_rate": 0.0001, |
| "loss": 1.5741, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.5231078365706631, |
| "grad_norm": 0.4490211009979248, |
| "learning_rate": 0.0001, |
| "loss": 1.5536, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.5234427327528466, |
| "grad_norm": 0.4657514691352844, |
| "learning_rate": 0.0001, |
| "loss": 1.4871, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.5237776289350301, |
| "grad_norm": 0.44376838207244873, |
| "learning_rate": 0.0001, |
| "loss": 1.5381, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.5241125251172136, |
| "grad_norm": 0.47033995389938354, |
| "learning_rate": 0.0001, |
| "loss": 1.619, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.5244474212993971, |
| "grad_norm": 0.4549630582332611, |
| "learning_rate": 0.0001, |
| "loss": 1.5554, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.5247823174815807, |
| "grad_norm": 0.446274071931839, |
| "learning_rate": 0.0001, |
| "loss": 1.4818, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.5251172136637642, |
| "grad_norm": 0.45129215717315674, |
| "learning_rate": 0.0001, |
| "loss": 1.5083, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.5254521098459477, |
| "grad_norm": 0.4612513780593872, |
| "learning_rate": 0.0001, |
| "loss": 1.5301, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.5257870060281313, |
| "grad_norm": 0.4741304814815521, |
| "learning_rate": 0.0001, |
| "loss": 1.5815, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.5261219022103149, |
| "grad_norm": 0.45285868644714355, |
| "learning_rate": 0.0001, |
| "loss": 1.5875, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.5264567983924984, |
| "grad_norm": 0.4500559866428375, |
| "learning_rate": 0.0001, |
| "loss": 1.5457, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.5267916945746819, |
| "grad_norm": 0.45833030343055725, |
| "learning_rate": 0.0001, |
| "loss": 1.6693, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.5271265907568654, |
| "grad_norm": 0.4745745062828064, |
| "learning_rate": 0.0001, |
| "loss": 1.5703, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.5274614869390489, |
| "grad_norm": 0.44691261649131775, |
| "learning_rate": 0.0001, |
| "loss": 1.5275, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.5277963831212324, |
| "grad_norm": 0.473972350358963, |
| "learning_rate": 0.0001, |
| "loss": 1.5378, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.528131279303416, |
| "grad_norm": 0.4648163318634033, |
| "learning_rate": 0.0001, |
| "loss": 1.5336, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.5284661754855995, |
| "grad_norm": 0.4657084047794342, |
| "learning_rate": 0.0001, |
| "loss": 1.5296, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.528801071667783, |
| "grad_norm": 0.43905413150787354, |
| "learning_rate": 0.0001, |
| "loss": 1.46, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.5291359678499665, |
| "grad_norm": 0.4439171850681305, |
| "learning_rate": 0.0001, |
| "loss": 1.4725, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.52947086403215, |
| "grad_norm": 0.47205954790115356, |
| "learning_rate": 0.0001, |
| "loss": 1.6402, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.5298057602143336, |
| "grad_norm": 0.4546120762825012, |
| "learning_rate": 0.0001, |
| "loss": 1.5361, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.5301406563965171, |
| "grad_norm": 0.44714152812957764, |
| "learning_rate": 0.0001, |
| "loss": 1.5889, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.5304755525787006, |
| "grad_norm": 0.4529459476470947, |
| "learning_rate": 0.0001, |
| "loss": 1.5373, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.5308104487608841, |
| "grad_norm": 0.45140767097473145, |
| "learning_rate": 0.0001, |
| "loss": 1.5747, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.5311453449430676, |
| "grad_norm": 0.4440329968929291, |
| "learning_rate": 0.0001, |
| "loss": 1.5368, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.5314802411252512, |
| "grad_norm": 0.45267820358276367, |
| "learning_rate": 0.0001, |
| "loss": 1.5345, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.5318151373074347, |
| "grad_norm": 0.45014524459838867, |
| "learning_rate": 0.0001, |
| "loss": 1.5113, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.5321500334896182, |
| "grad_norm": 0.46034717559814453, |
| "learning_rate": 0.0001, |
| "loss": 1.5748, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.5324849296718017, |
| "grad_norm": 0.4627227187156677, |
| "learning_rate": 0.0001, |
| "loss": 1.5218, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.5328198258539852, |
| "grad_norm": 0.45107871294021606, |
| "learning_rate": 0.0001, |
| "loss": 1.5449, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.5331547220361688, |
| "grad_norm": 0.45086443424224854, |
| "learning_rate": 0.0001, |
| "loss": 1.5803, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.5334896182183523, |
| "grad_norm": 0.4722006916999817, |
| "learning_rate": 0.0001, |
| "loss": 1.5947, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.5338245144005358, |
| "grad_norm": 0.44285401701927185, |
| "learning_rate": 0.0001, |
| "loss": 1.4987, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.5341594105827193, |
| "grad_norm": 0.4450331926345825, |
| "learning_rate": 0.0001, |
| "loss": 1.4929, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.5344943067649028, |
| "grad_norm": 0.4595484733581543, |
| "learning_rate": 0.0001, |
| "loss": 1.6151, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.5348292029470864, |
| "grad_norm": 0.44541820883750916, |
| "learning_rate": 0.0001, |
| "loss": 1.5107, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.5351640991292699, |
| "grad_norm": 0.4498431980609894, |
| "learning_rate": 0.0001, |
| "loss": 1.4339, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.5354989953114534, |
| "grad_norm": 0.47547462582588196, |
| "learning_rate": 0.0001, |
| "loss": 1.5636, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.535833891493637, |
| "grad_norm": 0.4520166516304016, |
| "learning_rate": 0.0001, |
| "loss": 1.515, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.5361687876758205, |
| "grad_norm": 0.47776710987091064, |
| "learning_rate": 0.0001, |
| "loss": 1.5845, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.5365036838580041, |
| "grad_norm": 0.4761412441730499, |
| "learning_rate": 0.0001, |
| "loss": 1.5393, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.5368385800401876, |
| "grad_norm": 0.45610764622688293, |
| "learning_rate": 0.0001, |
| "loss": 1.4664, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.5371734762223711, |
| "grad_norm": 0.4365233778953552, |
| "learning_rate": 0.0001, |
| "loss": 1.5039, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.5375083724045546, |
| "grad_norm": 0.4999910891056061, |
| "learning_rate": 0.0001, |
| "loss": 1.6004, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.5378432685867381, |
| "grad_norm": 0.44889217615127563, |
| "learning_rate": 0.0001, |
| "loss": 1.4564, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.5381781647689217, |
| "grad_norm": 0.44685789942741394, |
| "learning_rate": 0.0001, |
| "loss": 1.5223, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.5385130609511052, |
| "grad_norm": 0.4566046893596649, |
| "learning_rate": 0.0001, |
| "loss": 1.5094, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.5388479571332887, |
| "grad_norm": 0.4515156149864197, |
| "learning_rate": 0.0001, |
| "loss": 1.4626, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.5391828533154722, |
| "grad_norm": 0.4659847021102905, |
| "learning_rate": 0.0001, |
| "loss": 1.4892, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.5395177494976557, |
| "grad_norm": 0.46315014362335205, |
| "learning_rate": 0.0001, |
| "loss": 1.6071, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.5398526456798393, |
| "grad_norm": 0.4561232924461365, |
| "learning_rate": 0.0001, |
| "loss": 1.4692, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.5401875418620228, |
| "grad_norm": 0.4539635181427002, |
| "learning_rate": 0.0001, |
| "loss": 1.5656, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.5405224380442063, |
| "grad_norm": 0.48085281252861023, |
| "learning_rate": 0.0001, |
| "loss": 1.6298, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.5408573342263898, |
| "grad_norm": 0.44261398911476135, |
| "learning_rate": 0.0001, |
| "loss": 1.4697, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.5411922304085733, |
| "grad_norm": 0.46685075759887695, |
| "learning_rate": 0.0001, |
| "loss": 1.5277, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.5415271265907569, |
| "grad_norm": 0.46934112906455994, |
| "learning_rate": 0.0001, |
| "loss": 1.5227, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.5418620227729404, |
| "grad_norm": 0.4731907844543457, |
| "learning_rate": 0.0001, |
| "loss": 1.6273, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.5421969189551239, |
| "grad_norm": 0.4367448687553406, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.5425318151373074, |
| "grad_norm": 0.47099941968917847, |
| "learning_rate": 0.0001, |
| "loss": 1.617, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.5428667113194909, |
| "grad_norm": 0.46735990047454834, |
| "learning_rate": 0.0001, |
| "loss": 1.5671, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.5432016075016745, |
| "grad_norm": 0.45181360840797424, |
| "learning_rate": 0.0001, |
| "loss": 1.5791, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.543536503683858, |
| "grad_norm": 0.464851975440979, |
| "learning_rate": 0.0001, |
| "loss": 1.4774, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.5438713998660415, |
| "grad_norm": 0.4605390429496765, |
| "learning_rate": 0.0001, |
| "loss": 1.5034, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.544206296048225, |
| "grad_norm": 0.46630990505218506, |
| "learning_rate": 0.0001, |
| "loss": 1.5039, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.5445411922304085, |
| "grad_norm": 0.4575473964214325, |
| "learning_rate": 0.0001, |
| "loss": 1.4877, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.544876088412592, |
| "grad_norm": 0.46546870470046997, |
| "learning_rate": 0.0001, |
| "loss": 1.5551, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.5452109845947756, |
| "grad_norm": 0.464974969625473, |
| "learning_rate": 0.0001, |
| "loss": 1.6319, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.5455458807769591, |
| "grad_norm": 0.4432606101036072, |
| "learning_rate": 0.0001, |
| "loss": 1.552, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.5458807769591426, |
| "grad_norm": 0.43254828453063965, |
| "learning_rate": 0.0001, |
| "loss": 1.4958, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.5462156731413262, |
| "grad_norm": 0.43968817591667175, |
| "learning_rate": 0.0001, |
| "loss": 1.5392, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.5465505693235098, |
| "grad_norm": 0.4545215964317322, |
| "learning_rate": 0.0001, |
| "loss": 1.4829, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.5468854655056933, |
| "grad_norm": 0.4330245852470398, |
| "learning_rate": 0.0001, |
| "loss": 1.4775, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.5472203616878768, |
| "grad_norm": 0.45169565081596375, |
| "learning_rate": 0.0001, |
| "loss": 1.5474, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.5475552578700603, |
| "grad_norm": 0.472061425447464, |
| "learning_rate": 0.0001, |
| "loss": 1.4483, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.5478901540522438, |
| "grad_norm": 0.4540085792541504, |
| "learning_rate": 0.0001, |
| "loss": 1.5259, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.5482250502344274, |
| "grad_norm": 0.4446408152580261, |
| "learning_rate": 0.0001, |
| "loss": 1.4931, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.5485599464166109, |
| "grad_norm": 0.4567773640155792, |
| "learning_rate": 0.0001, |
| "loss": 1.5267, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.5488948425987944, |
| "grad_norm": 0.447899729013443, |
| "learning_rate": 0.0001, |
| "loss": 1.5575, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.5492297387809779, |
| "grad_norm": 0.432354599237442, |
| "learning_rate": 0.0001, |
| "loss": 1.4557, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.5495646349631614, |
| "grad_norm": 0.4684544503688812, |
| "learning_rate": 0.0001, |
| "loss": 1.5363, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.549899531145345, |
| "grad_norm": 0.4516255855560303, |
| "learning_rate": 0.0001, |
| "loss": 1.6221, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.5502344273275285, |
| "grad_norm": 0.4939975142478943, |
| "learning_rate": 0.0001, |
| "loss": 1.5669, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.550569323509712, |
| "grad_norm": 0.4381793141365051, |
| "learning_rate": 0.0001, |
| "loss": 1.4645, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.5509042196918955, |
| "grad_norm": 0.4454663097858429, |
| "learning_rate": 0.0001, |
| "loss": 1.5221, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.551239115874079, |
| "grad_norm": 0.46727123856544495, |
| "learning_rate": 0.0001, |
| "loss": 1.5181, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.5515740120562626, |
| "grad_norm": 0.4579801857471466, |
| "learning_rate": 0.0001, |
| "loss": 1.5446, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.5519089082384461, |
| "grad_norm": 0.4584062099456787, |
| "learning_rate": 0.0001, |
| "loss": 1.5231, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.5522438044206296, |
| "grad_norm": 0.4621238112449646, |
| "learning_rate": 0.0001, |
| "loss": 1.486, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.5525787006028131, |
| "grad_norm": 0.4448164403438568, |
| "learning_rate": 0.0001, |
| "loss": 1.5596, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.5529135967849966, |
| "grad_norm": 0.4601452946662903, |
| "learning_rate": 0.0001, |
| "loss": 1.5746, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.5532484929671801, |
| "grad_norm": 0.49113860726356506, |
| "learning_rate": 0.0001, |
| "loss": 1.5183, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.5535833891493637, |
| "grad_norm": 0.46025919914245605, |
| "learning_rate": 0.0001, |
| "loss": 1.4989, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.5539182853315472, |
| "grad_norm": 0.46592333912849426, |
| "learning_rate": 0.0001, |
| "loss": 1.5384, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.5542531815137307, |
| "grad_norm": 0.44121989607810974, |
| "learning_rate": 0.0001, |
| "loss": 1.5274, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.5545880776959142, |
| "grad_norm": 0.454671710729599, |
| "learning_rate": 0.0001, |
| "loss": 1.5095, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.5549229738780977, |
| "grad_norm": 0.46223875880241394, |
| "learning_rate": 0.0001, |
| "loss": 1.4922, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.5552578700602813, |
| "grad_norm": 0.4409008324146271, |
| "learning_rate": 0.0001, |
| "loss": 1.5563, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.5555927662424648, |
| "grad_norm": 0.4422995150089264, |
| "learning_rate": 0.0001, |
| "loss": 1.4886, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.5559276624246483, |
| "grad_norm": 0.4750559329986572, |
| "learning_rate": 0.0001, |
| "loss": 1.5231, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.5562625586068319, |
| "grad_norm": 0.46889910101890564, |
| "learning_rate": 0.0001, |
| "loss": 1.5685, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.5565974547890155, |
| "grad_norm": 0.46044906973838806, |
| "learning_rate": 0.0001, |
| "loss": 1.5453, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.556932350971199, |
| "grad_norm": 0.4508223533630371, |
| "learning_rate": 0.0001, |
| "loss": 1.4371, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.5572672471533825, |
| "grad_norm": 0.4626295864582062, |
| "learning_rate": 0.0001, |
| "loss": 1.4826, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.557602143335566, |
| "grad_norm": 0.46173542737960815, |
| "learning_rate": 0.0001, |
| "loss": 1.5468, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.5579370395177495, |
| "grad_norm": 0.45359376072883606, |
| "learning_rate": 0.0001, |
| "loss": 1.5793, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.558271935699933, |
| "grad_norm": 0.4820610582828522, |
| "learning_rate": 0.0001, |
| "loss": 1.5675, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.5586068318821166, |
| "grad_norm": 0.44977930188179016, |
| "learning_rate": 0.0001, |
| "loss": 1.452, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.5589417280643001, |
| "grad_norm": 0.4835660755634308, |
| "learning_rate": 0.0001, |
| "loss": 1.583, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.5592766242464836, |
| "grad_norm": 0.4514458179473877, |
| "learning_rate": 0.0001, |
| "loss": 1.5142, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.5596115204286671, |
| "grad_norm": 0.48170676827430725, |
| "learning_rate": 0.0001, |
| "loss": 1.6334, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.5599464166108507, |
| "grad_norm": 0.44558587670326233, |
| "learning_rate": 0.0001, |
| "loss": 1.5629, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.5602813127930342, |
| "grad_norm": 0.476109117269516, |
| "learning_rate": 0.0001, |
| "loss": 1.5928, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.5606162089752177, |
| "grad_norm": 0.4857975244522095, |
| "learning_rate": 0.0001, |
| "loss": 1.5988, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.5609511051574012, |
| "grad_norm": 0.49178189039230347, |
| "learning_rate": 0.0001, |
| "loss": 1.6398, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.5612860013395847, |
| "grad_norm": 0.45551440119743347, |
| "learning_rate": 0.0001, |
| "loss": 1.473, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.5616208975217682, |
| "grad_norm": 0.4543136954307556, |
| "learning_rate": 0.0001, |
| "loss": 1.4731, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.5619557937039518, |
| "grad_norm": 0.44175422191619873, |
| "learning_rate": 0.0001, |
| "loss": 1.5275, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.5622906898861353, |
| "grad_norm": 0.45219117403030396, |
| "learning_rate": 0.0001, |
| "loss": 1.4795, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.5626255860683188, |
| "grad_norm": 0.47150030732154846, |
| "learning_rate": 0.0001, |
| "loss": 1.5716, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.5629604822505023, |
| "grad_norm": 0.47483593225479126, |
| "learning_rate": 0.0001, |
| "loss": 1.6408, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.5632953784326858, |
| "grad_norm": 0.44375649094581604, |
| "learning_rate": 0.0001, |
| "loss": 1.4819, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.5636302746148694, |
| "grad_norm": 0.4467259347438812, |
| "learning_rate": 0.0001, |
| "loss": 1.446, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.5639651707970529, |
| "grad_norm": 0.4565868079662323, |
| "learning_rate": 0.0001, |
| "loss": 1.4954, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.5643000669792364, |
| "grad_norm": 0.4438111484050751, |
| "learning_rate": 0.0001, |
| "loss": 1.4772, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.5646349631614199, |
| "grad_norm": 0.48216649889945984, |
| "learning_rate": 0.0001, |
| "loss": 1.5377, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.5649698593436034, |
| "grad_norm": 0.44630300998687744, |
| "learning_rate": 0.0001, |
| "loss": 1.4529, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.565304755525787, |
| "grad_norm": 0.44514498114585876, |
| "learning_rate": 0.0001, |
| "loss": 1.466, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.5656396517079705, |
| "grad_norm": 0.44218650460243225, |
| "learning_rate": 0.0001, |
| "loss": 1.5396, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.565974547890154, |
| "grad_norm": 0.45768415927886963, |
| "learning_rate": 0.0001, |
| "loss": 1.5937, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.5663094440723375, |
| "grad_norm": 0.4722628891468048, |
| "learning_rate": 0.0001, |
| "loss": 1.577, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.5666443402545212, |
| "grad_norm": 0.45134085416793823, |
| "learning_rate": 0.0001, |
| "loss": 1.5307, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.5669792364367047, |
| "grad_norm": 0.48546385765075684, |
| "learning_rate": 0.0001, |
| "loss": 1.5803, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.5673141326188882, |
| "grad_norm": 0.4477085769176483, |
| "learning_rate": 0.0001, |
| "loss": 1.5038, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.5676490288010717, |
| "grad_norm": 0.45485320687294006, |
| "learning_rate": 0.0001, |
| "loss": 1.5179, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.5679839249832552, |
| "grad_norm": 0.4585912227630615, |
| "learning_rate": 0.0001, |
| "loss": 1.4936, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.5683188211654387, |
| "grad_norm": 0.453314870595932, |
| "learning_rate": 0.0001, |
| "loss": 1.5132, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.5686537173476223, |
| "grad_norm": 0.4477619528770447, |
| "learning_rate": 0.0001, |
| "loss": 1.4687, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.5689886135298058, |
| "grad_norm": 0.4389646351337433, |
| "learning_rate": 0.0001, |
| "loss": 1.5626, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.5693235097119893, |
| "grad_norm": 0.45276832580566406, |
| "learning_rate": 0.0001, |
| "loss": 1.4914, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.5696584058941728, |
| "grad_norm": 0.46535176038742065, |
| "learning_rate": 0.0001, |
| "loss": 1.5462, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.5699933020763563, |
| "grad_norm": 0.4585205018520355, |
| "learning_rate": 0.0001, |
| "loss": 1.5687, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.5703281982585399, |
| "grad_norm": 0.4479345381259918, |
| "learning_rate": 0.0001, |
| "loss": 1.49, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.5706630944407234, |
| "grad_norm": 0.4654242694377899, |
| "learning_rate": 0.0001, |
| "loss": 1.5405, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.5709979906229069, |
| "grad_norm": 0.45105135440826416, |
| "learning_rate": 0.0001, |
| "loss": 1.4867, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.5713328868050904, |
| "grad_norm": 0.47442853450775146, |
| "learning_rate": 0.0001, |
| "loss": 1.5458, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.5716677829872739, |
| "grad_norm": 0.4526834785938263, |
| "learning_rate": 0.0001, |
| "loss": 1.5531, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.5720026791694575, |
| "grad_norm": 0.464555561542511, |
| "learning_rate": 0.0001, |
| "loss": 1.5832, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.572337575351641, |
| "grad_norm": 0.461290180683136, |
| "learning_rate": 0.0001, |
| "loss": 1.5108, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.5726724715338245, |
| "grad_norm": 0.4585516154766083, |
| "learning_rate": 0.0001, |
| "loss": 1.4864, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.573007367716008, |
| "grad_norm": 0.4530777335166931, |
| "learning_rate": 0.0001, |
| "loss": 1.4983, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.5733422638981915, |
| "grad_norm": 0.4393008053302765, |
| "learning_rate": 0.0001, |
| "loss": 1.471, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.5736771600803751, |
| "grad_norm": 0.456870436668396, |
| "learning_rate": 0.0001, |
| "loss": 1.4866, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.5740120562625586, |
| "grad_norm": 0.4635424017906189, |
| "learning_rate": 0.0001, |
| "loss": 1.4987, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.5743469524447421, |
| "grad_norm": 0.4464293122291565, |
| "learning_rate": 0.0001, |
| "loss": 1.5718, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.5746818486269256, |
| "grad_norm": 0.45753639936447144, |
| "learning_rate": 0.0001, |
| "loss": 1.5706, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.5750167448091091, |
| "grad_norm": 0.44542786478996277, |
| "learning_rate": 0.0001, |
| "loss": 1.585, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.5753516409912927, |
| "grad_norm": 0.4802192151546478, |
| "learning_rate": 0.0001, |
| "loss": 1.6014, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.5756865371734762, |
| "grad_norm": 0.4398585259914398, |
| "learning_rate": 0.0001, |
| "loss": 1.4779, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.5760214333556597, |
| "grad_norm": 0.4635072350502014, |
| "learning_rate": 0.0001, |
| "loss": 1.5682, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.5763563295378432, |
| "grad_norm": 0.45373332500457764, |
| "learning_rate": 0.0001, |
| "loss": 1.5157, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.5766912257200268, |
| "grad_norm": 0.46268123388290405, |
| "learning_rate": 0.0001, |
| "loss": 1.6094, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.5770261219022104, |
| "grad_norm": 0.4583353102207184, |
| "learning_rate": 0.0001, |
| "loss": 1.5139, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.5773610180843939, |
| "grad_norm": 0.4540995955467224, |
| "learning_rate": 0.0001, |
| "loss": 1.5147, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.5776959142665774, |
| "grad_norm": 0.44238102436065674, |
| "learning_rate": 0.0001, |
| "loss": 1.5002, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.5780308104487609, |
| "grad_norm": 0.4580625891685486, |
| "learning_rate": 0.0001, |
| "loss": 1.5356, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.5783657066309444, |
| "grad_norm": 0.4413588047027588, |
| "learning_rate": 0.0001, |
| "loss": 1.5364, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.578700602813128, |
| "grad_norm": 0.4560210704803467, |
| "learning_rate": 0.0001, |
| "loss": 1.5458, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.5790354989953115, |
| "grad_norm": 0.4587970972061157, |
| "learning_rate": 0.0001, |
| "loss": 1.5131, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.579370395177495, |
| "grad_norm": 0.4495079219341278, |
| "learning_rate": 0.0001, |
| "loss": 1.4704, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.5797052913596785, |
| "grad_norm": 0.4411208927631378, |
| "learning_rate": 0.0001, |
| "loss": 1.4685, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.580040187541862, |
| "grad_norm": 0.4805608093738556, |
| "learning_rate": 0.0001, |
| "loss": 1.5575, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.5803750837240456, |
| "grad_norm": 0.4375392198562622, |
| "learning_rate": 0.0001, |
| "loss": 1.4592, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.5807099799062291, |
| "grad_norm": 0.46086931228637695, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.5810448760884126, |
| "grad_norm": 0.4644346535205841, |
| "learning_rate": 0.0001, |
| "loss": 1.499, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.5813797722705961, |
| "grad_norm": 0.46922892332077026, |
| "learning_rate": 0.0001, |
| "loss": 1.4893, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.5817146684527796, |
| "grad_norm": 0.496118426322937, |
| "learning_rate": 0.0001, |
| "loss": 1.5636, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.5820495646349632, |
| "grad_norm": 0.4836883842945099, |
| "learning_rate": 0.0001, |
| "loss": 1.5376, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.5823844608171467, |
| "grad_norm": 0.4585513472557068, |
| "learning_rate": 0.0001, |
| "loss": 1.4839, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.5827193569993302, |
| "grad_norm": 0.44239330291748047, |
| "learning_rate": 0.0001, |
| "loss": 1.4958, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.5830542531815137, |
| "grad_norm": 0.4673005938529968, |
| "learning_rate": 0.0001, |
| "loss": 1.564, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.5833891493636972, |
| "grad_norm": 0.47267746925354004, |
| "learning_rate": 0.0001, |
| "loss": 1.5387, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.5837240455458808, |
| "grad_norm": 0.4444943964481354, |
| "learning_rate": 0.0001, |
| "loss": 1.5379, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.5840589417280643, |
| "grad_norm": 0.46939796209335327, |
| "learning_rate": 0.0001, |
| "loss": 1.56, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.5843938379102478, |
| "grad_norm": 0.4565601646900177, |
| "learning_rate": 0.0001, |
| "loss": 1.6041, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.5847287340924313, |
| "grad_norm": 0.4543992280960083, |
| "learning_rate": 0.0001, |
| "loss": 1.5446, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.5850636302746148, |
| "grad_norm": 0.4492827355861664, |
| "learning_rate": 0.0001, |
| "loss": 1.4757, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.5853985264567984, |
| "grad_norm": 0.4561556279659271, |
| "learning_rate": 0.0001, |
| "loss": 1.5098, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.5857334226389819, |
| "grad_norm": 0.45513200759887695, |
| "learning_rate": 0.0001, |
| "loss": 1.5315, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.5860683188211654, |
| "grad_norm": 0.4617581367492676, |
| "learning_rate": 0.0001, |
| "loss": 1.4389, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.5864032150033489, |
| "grad_norm": 0.4772202968597412, |
| "learning_rate": 0.0001, |
| "loss": 1.4551, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.5867381111855325, |
| "grad_norm": 0.4441646933555603, |
| "learning_rate": 0.0001, |
| "loss": 1.4341, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.5870730073677161, |
| "grad_norm": 0.4554411470890045, |
| "learning_rate": 0.0001, |
| "loss": 1.5049, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.5874079035498996, |
| "grad_norm": 0.44894352555274963, |
| "learning_rate": 0.0001, |
| "loss": 1.4413, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.5877427997320831, |
| "grad_norm": 0.4450191855430603, |
| "learning_rate": 0.0001, |
| "loss": 1.4164, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.5880776959142666, |
| "grad_norm": 0.4631238281726837, |
| "learning_rate": 0.0001, |
| "loss": 1.5182, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.5884125920964501, |
| "grad_norm": 0.46651196479797363, |
| "learning_rate": 0.0001, |
| "loss": 1.636, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.5887474882786337, |
| "grad_norm": 0.43748751282691956, |
| "learning_rate": 0.0001, |
| "loss": 1.4548, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.5890823844608172, |
| "grad_norm": 0.4362008273601532, |
| "learning_rate": 0.0001, |
| "loss": 1.4005, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.5894172806430007, |
| "grad_norm": 0.4466918408870697, |
| "learning_rate": 0.0001, |
| "loss": 1.4687, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.5897521768251842, |
| "grad_norm": 0.46769535541534424, |
| "learning_rate": 0.0001, |
| "loss": 1.6474, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.5900870730073677, |
| "grad_norm": 0.44609808921813965, |
| "learning_rate": 0.0001, |
| "loss": 1.5352, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.5904219691895513, |
| "grad_norm": 0.47041457891464233, |
| "learning_rate": 0.0001, |
| "loss": 1.7184, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.5907568653717348, |
| "grad_norm": 0.4481906592845917, |
| "learning_rate": 0.0001, |
| "loss": 1.5418, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.5910917615539183, |
| "grad_norm": 0.45170557498931885, |
| "learning_rate": 0.0001, |
| "loss": 1.5844, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.5914266577361018, |
| "grad_norm": 0.47625941038131714, |
| "learning_rate": 0.0001, |
| "loss": 1.548, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.5917615539182853, |
| "grad_norm": 0.4283701181411743, |
| "learning_rate": 0.0001, |
| "loss": 1.5318, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.5920964501004689, |
| "grad_norm": 0.4596593677997589, |
| "learning_rate": 0.0001, |
| "loss": 1.5102, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.5924313462826524, |
| "grad_norm": 0.45534801483154297, |
| "learning_rate": 0.0001, |
| "loss": 1.5354, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.5927662424648359, |
| "grad_norm": 0.4619196355342865, |
| "learning_rate": 0.0001, |
| "loss": 1.5106, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.5931011386470194, |
| "grad_norm": 0.47275838255882263, |
| "learning_rate": 0.0001, |
| "loss": 1.5252, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.5934360348292029, |
| "grad_norm": 0.4409637451171875, |
| "learning_rate": 0.0001, |
| "loss": 1.4677, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.5937709310113864, |
| "grad_norm": 0.4811408519744873, |
| "learning_rate": 0.0001, |
| "loss": 1.6534, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.59410582719357, |
| "grad_norm": 0.4599914252758026, |
| "learning_rate": 0.0001, |
| "loss": 1.5346, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.5944407233757535, |
| "grad_norm": 0.461795449256897, |
| "learning_rate": 0.0001, |
| "loss": 1.6085, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.594775619557937, |
| "grad_norm": 0.48418354988098145, |
| "learning_rate": 0.0001, |
| "loss": 1.6454, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.5951105157401205, |
| "grad_norm": 0.4726675748825073, |
| "learning_rate": 0.0001, |
| "loss": 1.6204, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.595445411922304, |
| "grad_norm": 0.4598071873188019, |
| "learning_rate": 0.0001, |
| "loss": 1.5689, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.5957803081044876, |
| "grad_norm": 0.4411622881889343, |
| "learning_rate": 0.0001, |
| "loss": 1.459, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.5961152042866711, |
| "grad_norm": 0.4531992971897125, |
| "learning_rate": 0.0001, |
| "loss": 1.539, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.5964501004688546, |
| "grad_norm": 0.4620833694934845, |
| "learning_rate": 0.0001, |
| "loss": 1.5204, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.5967849966510381, |
| "grad_norm": 0.44788408279418945, |
| "learning_rate": 0.0001, |
| "loss": 1.4709, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.5971198928332218, |
| "grad_norm": 0.4573745131492615, |
| "learning_rate": 0.0001, |
| "loss": 1.6093, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.5974547890154053, |
| "grad_norm": 0.45187342166900635, |
| "learning_rate": 0.0001, |
| "loss": 1.4622, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.5977896851975888, |
| "grad_norm": 0.4585152268409729, |
| "learning_rate": 0.0001, |
| "loss": 1.533, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.5981245813797723, |
| "grad_norm": 0.45203301310539246, |
| "learning_rate": 0.0001, |
| "loss": 1.4723, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.5984594775619558, |
| "grad_norm": 0.48862025141716003, |
| "learning_rate": 0.0001, |
| "loss": 1.5242, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.5987943737441394, |
| "grad_norm": 0.4569394290447235, |
| "learning_rate": 0.0001, |
| "loss": 1.496, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.5991292699263229, |
| "grad_norm": 0.45889320969581604, |
| "learning_rate": 0.0001, |
| "loss": 1.5281, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.5994641661085064, |
| "grad_norm": 0.45484670996665955, |
| "learning_rate": 0.0001, |
| "loss": 1.521, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.5997990622906899, |
| "grad_norm": 0.4456666111946106, |
| "learning_rate": 0.0001, |
| "loss": 1.4718, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.6001339584728734, |
| "grad_norm": 0.45055705308914185, |
| "learning_rate": 0.0001, |
| "loss": 1.4933, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.600468854655057, |
| "grad_norm": 0.4577826261520386, |
| "learning_rate": 0.0001, |
| "loss": 1.5425, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.6008037508372405, |
| "grad_norm": 0.4573463201522827, |
| "learning_rate": 0.0001, |
| "loss": 1.5396, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.601138647019424, |
| "grad_norm": 0.4672832489013672, |
| "learning_rate": 0.0001, |
| "loss": 1.6364, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.6014735432016075, |
| "grad_norm": 0.46467325091362, |
| "learning_rate": 0.0001, |
| "loss": 1.5913, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.601808439383791, |
| "grad_norm": 0.4452815651893616, |
| "learning_rate": 0.0001, |
| "loss": 1.4723, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.6021433355659745, |
| "grad_norm": 0.46799108386039734, |
| "learning_rate": 0.0001, |
| "loss": 1.421, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.6024782317481581, |
| "grad_norm": 0.48382145166397095, |
| "learning_rate": 0.0001, |
| "loss": 1.4459, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.6028131279303416, |
| "grad_norm": 0.46710142493247986, |
| "learning_rate": 0.0001, |
| "loss": 1.4924, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.6031480241125251, |
| "grad_norm": 0.48632174730300903, |
| "learning_rate": 0.0001, |
| "loss": 1.6276, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.6034829202947086, |
| "grad_norm": 0.4617525339126587, |
| "learning_rate": 0.0001, |
| "loss": 1.4878, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.6038178164768921, |
| "grad_norm": 0.4950006306171417, |
| "learning_rate": 0.0001, |
| "loss": 1.5309, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.6041527126590757, |
| "grad_norm": 0.4577718675136566, |
| "learning_rate": 0.0001, |
| "loss": 1.4796, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.6044876088412592, |
| "grad_norm": 0.47661927342414856, |
| "learning_rate": 0.0001, |
| "loss": 1.4687, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.6048225050234427, |
| "grad_norm": 0.44819697737693787, |
| "learning_rate": 0.0001, |
| "loss": 1.4981, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.6051574012056262, |
| "grad_norm": 0.4891256093978882, |
| "learning_rate": 0.0001, |
| "loss": 1.5688, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.6054922973878097, |
| "grad_norm": 0.452263742685318, |
| "learning_rate": 0.0001, |
| "loss": 1.5526, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.6058271935699933, |
| "grad_norm": 0.4358367621898651, |
| "learning_rate": 0.0001, |
| "loss": 1.5021, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.6061620897521768, |
| "grad_norm": 0.4441038966178894, |
| "learning_rate": 0.0001, |
| "loss": 1.5329, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.6064969859343603, |
| "grad_norm": 0.4507153034210205, |
| "learning_rate": 0.0001, |
| "loss": 1.4325, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.6068318821165438, |
| "grad_norm": 0.45769181847572327, |
| "learning_rate": 0.0001, |
| "loss": 1.5259, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.6071667782987274, |
| "grad_norm": 0.4539431631565094, |
| "learning_rate": 0.0001, |
| "loss": 1.5086, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.607501674480911, |
| "grad_norm": 0.4860522449016571, |
| "learning_rate": 0.0001, |
| "loss": 1.5724, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.6078365706630945, |
| "grad_norm": 0.43865716457366943, |
| "learning_rate": 0.0001, |
| "loss": 1.5729, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.608171466845278, |
| "grad_norm": 0.43174469470977783, |
| "learning_rate": 0.0001, |
| "loss": 1.4406, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.6085063630274615, |
| "grad_norm": 0.44520848989486694, |
| "learning_rate": 0.0001, |
| "loss": 1.4226, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.608841259209645, |
| "grad_norm": 0.46049627661705017, |
| "learning_rate": 0.0001, |
| "loss": 1.5524, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.6091761553918286, |
| "grad_norm": 0.4523634910583496, |
| "learning_rate": 0.0001, |
| "loss": 1.5514, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.6095110515740121, |
| "grad_norm": 0.44883349537849426, |
| "learning_rate": 0.0001, |
| "loss": 1.515, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.6098459477561956, |
| "grad_norm": 0.4615044891834259, |
| "learning_rate": 0.0001, |
| "loss": 1.5721, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.6101808439383791, |
| "grad_norm": 0.4608284831047058, |
| "learning_rate": 0.0001, |
| "loss": 1.4685, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.6105157401205626, |
| "grad_norm": 0.45228102803230286, |
| "learning_rate": 0.0001, |
| "loss": 1.5517, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.6108506363027462, |
| "grad_norm": 0.4694874882698059, |
| "learning_rate": 0.0001, |
| "loss": 1.6284, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.6111855324849297, |
| "grad_norm": 0.4595600962638855, |
| "learning_rate": 0.0001, |
| "loss": 1.5509, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.6115204286671132, |
| "grad_norm": 0.4391246736049652, |
| "learning_rate": 0.0001, |
| "loss": 1.4557, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.6118553248492967, |
| "grad_norm": 0.4490840435028076, |
| "learning_rate": 0.0001, |
| "loss": 1.5605, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.6121902210314802, |
| "grad_norm": 0.434456467628479, |
| "learning_rate": 0.0001, |
| "loss": 1.519, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.6125251172136638, |
| "grad_norm": 0.4547174274921417, |
| "learning_rate": 0.0001, |
| "loss": 1.3821, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.6128600133958473, |
| "grad_norm": 0.46523991227149963, |
| "learning_rate": 0.0001, |
| "loss": 1.5759, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.6131949095780308, |
| "grad_norm": 0.4709138870239258, |
| "learning_rate": 0.0001, |
| "loss": 1.5246, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.6135298057602143, |
| "grad_norm": 0.45439472794532776, |
| "learning_rate": 0.0001, |
| "loss": 1.5276, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.6138647019423978, |
| "grad_norm": 0.4664701521396637, |
| "learning_rate": 0.0001, |
| "loss": 1.5766, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.6141995981245814, |
| "grad_norm": 0.44643253087997437, |
| "learning_rate": 0.0001, |
| "loss": 1.4825, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.6145344943067649, |
| "grad_norm": 0.45350247621536255, |
| "learning_rate": 0.0001, |
| "loss": 1.5316, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.6148693904889484, |
| "grad_norm": 0.5003080368041992, |
| "learning_rate": 0.0001, |
| "loss": 1.5022, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.6152042866711319, |
| "grad_norm": 0.4404720366001129, |
| "learning_rate": 0.0001, |
| "loss": 1.5069, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.6155391828533154, |
| "grad_norm": 0.4526624083518982, |
| "learning_rate": 0.0001, |
| "loss": 1.5446, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.615874079035499, |
| "grad_norm": 0.4543852210044861, |
| "learning_rate": 0.0001, |
| "loss": 1.4988, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.6162089752176825, |
| "grad_norm": 0.4725828766822815, |
| "learning_rate": 0.0001, |
| "loss": 1.5402, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.616543871399866, |
| "grad_norm": 0.4534866511821747, |
| "learning_rate": 0.0001, |
| "loss": 1.6049, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.6168787675820495, |
| "grad_norm": 0.46109819412231445, |
| "learning_rate": 0.0001, |
| "loss": 1.467, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.617213663764233, |
| "grad_norm": 0.4313134253025055, |
| "learning_rate": 0.0001, |
| "loss": 1.473, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.6175485599464167, |
| "grad_norm": 0.4366667866706848, |
| "learning_rate": 0.0001, |
| "loss": 1.4491, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.6178834561286002, |
| "grad_norm": 0.44986891746520996, |
| "learning_rate": 0.0001, |
| "loss": 1.5261, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.6182183523107837, |
| "grad_norm": 0.4566178619861603, |
| "learning_rate": 0.0001, |
| "loss": 1.4325, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.6185532484929672, |
| "grad_norm": 0.452364444732666, |
| "learning_rate": 0.0001, |
| "loss": 1.4646, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.6188881446751507, |
| "grad_norm": 0.5066172480583191, |
| "learning_rate": 0.0001, |
| "loss": 1.5345, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.6192230408573343, |
| "grad_norm": 0.48632514476776123, |
| "learning_rate": 0.0001, |
| "loss": 1.5451, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.6195579370395178, |
| "grad_norm": 0.49846601486206055, |
| "learning_rate": 0.0001, |
| "loss": 1.6758, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.6198928332217013, |
| "grad_norm": 0.4605197012424469, |
| "learning_rate": 0.0001, |
| "loss": 1.4672, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.6202277294038848, |
| "grad_norm": 0.5055616497993469, |
| "learning_rate": 0.0001, |
| "loss": 1.5908, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.6205626255860683, |
| "grad_norm": 0.43290427327156067, |
| "learning_rate": 0.0001, |
| "loss": 1.4314, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.6208975217682519, |
| "grad_norm": 0.4735543131828308, |
| "learning_rate": 0.0001, |
| "loss": 1.5928, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.6212324179504354, |
| "grad_norm": 0.45503896474838257, |
| "learning_rate": 0.0001, |
| "loss": 1.4838, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.6215673141326189, |
| "grad_norm": 0.47979220747947693, |
| "learning_rate": 0.0001, |
| "loss": 1.5964, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.6219022103148024, |
| "grad_norm": 0.4965604245662689, |
| "learning_rate": 0.0001, |
| "loss": 1.6738, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.6222371064969859, |
| "grad_norm": 0.46204668283462524, |
| "learning_rate": 0.0001, |
| "loss": 1.5834, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.6225720026791695, |
| "grad_norm": 0.4727908968925476, |
| "learning_rate": 0.0001, |
| "loss": 1.5032, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.622906898861353, |
| "grad_norm": 0.4483385980129242, |
| "learning_rate": 0.0001, |
| "loss": 1.5106, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.6232417950435365, |
| "grad_norm": 0.4823533594608307, |
| "learning_rate": 0.0001, |
| "loss": 1.5182, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.62357669122572, |
| "grad_norm": 0.444034606218338, |
| "learning_rate": 0.0001, |
| "loss": 1.4752, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.6239115874079035, |
| "grad_norm": 0.4599262773990631, |
| "learning_rate": 0.0001, |
| "loss": 1.6058, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.624246483590087, |
| "grad_norm": 0.46915575861930847, |
| "learning_rate": 0.0001, |
| "loss": 1.5703, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.6245813797722706, |
| "grad_norm": 0.4495287835597992, |
| "learning_rate": 0.0001, |
| "loss": 1.5697, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.6249162759544541, |
| "grad_norm": 0.4550332725048065, |
| "learning_rate": 0.0001, |
| "loss": 1.593, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.6252511721366376, |
| "grad_norm": 0.45989227294921875, |
| "learning_rate": 0.0001, |
| "loss": 1.5264, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.6255860683188211, |
| "grad_norm": 0.4636523723602295, |
| "learning_rate": 0.0001, |
| "loss": 1.5075, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.6259209645010047, |
| "grad_norm": 0.47894105315208435, |
| "learning_rate": 0.0001, |
| "loss": 1.5339, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.6262558606831882, |
| "grad_norm": 0.48035570979118347, |
| "learning_rate": 0.0001, |
| "loss": 1.6396, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.6265907568653717, |
| "grad_norm": 0.4636676013469696, |
| "learning_rate": 0.0001, |
| "loss": 1.5379, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.6269256530475552, |
| "grad_norm": 0.45790281891822815, |
| "learning_rate": 0.0001, |
| "loss": 1.5592, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.6272605492297387, |
| "grad_norm": 0.4582197964191437, |
| "learning_rate": 0.0001, |
| "loss": 1.4942, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.6275954454119224, |
| "grad_norm": 0.46599966287612915, |
| "learning_rate": 0.0001, |
| "loss": 1.4827, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.6279303415941059, |
| "grad_norm": 0.46609312295913696, |
| "learning_rate": 0.0001, |
| "loss": 1.4927, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.6282652377762894, |
| "grad_norm": 0.4750870168209076, |
| "learning_rate": 0.0001, |
| "loss": 1.5363, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.6286001339584729, |
| "grad_norm": 0.450873464345932, |
| "learning_rate": 0.0001, |
| "loss": 1.4469, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.6289350301406564, |
| "grad_norm": 0.4762636423110962, |
| "learning_rate": 0.0001, |
| "loss": 1.5899, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.62926992632284, |
| "grad_norm": 0.4646572172641754, |
| "learning_rate": 0.0001, |
| "loss": 1.5064, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.6296048225050235, |
| "grad_norm": 0.45741555094718933, |
| "learning_rate": 0.0001, |
| "loss": 1.4961, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.629939718687207, |
| "grad_norm": 0.466714471578598, |
| "learning_rate": 0.0001, |
| "loss": 1.544, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.6302746148693905, |
| "grad_norm": 0.4649389684200287, |
| "learning_rate": 0.0001, |
| "loss": 1.5034, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.630609511051574, |
| "grad_norm": 0.4543673098087311, |
| "learning_rate": 0.0001, |
| "loss": 1.5047, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.6309444072337576, |
| "grad_norm": 0.4476313889026642, |
| "learning_rate": 0.0001, |
| "loss": 1.5352, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.6312793034159411, |
| "grad_norm": 0.45701703429222107, |
| "learning_rate": 0.0001, |
| "loss": 1.4728, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.6316141995981246, |
| "grad_norm": 0.47941985726356506, |
| "learning_rate": 0.0001, |
| "loss": 1.55, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.6319490957803081, |
| "grad_norm": 0.4696800112724304, |
| "learning_rate": 0.0001, |
| "loss": 1.5414, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.6322839919624916, |
| "grad_norm": 0.46519196033477783, |
| "learning_rate": 0.0001, |
| "loss": 1.535, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.6326188881446752, |
| "grad_norm": 0.48627668619155884, |
| "learning_rate": 0.0001, |
| "loss": 1.5944, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.6329537843268587, |
| "grad_norm": 0.48074227571487427, |
| "learning_rate": 0.0001, |
| "loss": 1.6139, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.6332886805090422, |
| "grad_norm": 0.46736252307891846, |
| "learning_rate": 0.0001, |
| "loss": 1.4678, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.6336235766912257, |
| "grad_norm": 0.4569910168647766, |
| "learning_rate": 0.0001, |
| "loss": 1.4618, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.6339584728734092, |
| "grad_norm": 0.47562307119369507, |
| "learning_rate": 0.0001, |
| "loss": 1.5823, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.6342933690555927, |
| "grad_norm": 0.466452956199646, |
| "learning_rate": 0.0001, |
| "loss": 1.5113, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.6346282652377763, |
| "grad_norm": 0.46311044692993164, |
| "learning_rate": 0.0001, |
| "loss": 1.5501, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.6349631614199598, |
| "grad_norm": 0.4702926278114319, |
| "learning_rate": 0.0001, |
| "loss": 1.5268, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.6352980576021433, |
| "grad_norm": 0.45443615317344666, |
| "learning_rate": 0.0001, |
| "loss": 1.4828, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.6356329537843268, |
| "grad_norm": 0.4715770184993744, |
| "learning_rate": 0.0001, |
| "loss": 1.4838, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.6359678499665103, |
| "grad_norm": 0.4504530429840088, |
| "learning_rate": 0.0001, |
| "loss": 1.5517, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.6363027461486939, |
| "grad_norm": 0.44113561511039734, |
| "learning_rate": 0.0001, |
| "loss": 1.5354, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.6366376423308774, |
| "grad_norm": 0.4682255685329437, |
| "learning_rate": 0.0001, |
| "loss": 1.5009, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.6369725385130609, |
| "grad_norm": 0.4606624245643616, |
| "learning_rate": 0.0001, |
| "loss": 1.5275, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.6373074346952444, |
| "grad_norm": 0.4960009455680847, |
| "learning_rate": 0.0001, |
| "loss": 1.6387, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.6376423308774279, |
| "grad_norm": 0.4498940706253052, |
| "learning_rate": 0.0001, |
| "loss": 1.3875, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.6379772270596116, |
| "grad_norm": 0.4744248688220978, |
| "learning_rate": 0.0001, |
| "loss": 1.5715, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.6383121232417951, |
| "grad_norm": 0.4565998911857605, |
| "learning_rate": 0.0001, |
| "loss": 1.6134, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.6386470194239786, |
| "grad_norm": 0.4789954423904419, |
| "learning_rate": 0.0001, |
| "loss": 1.5371, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.6389819156061621, |
| "grad_norm": 0.4668721556663513, |
| "learning_rate": 0.0001, |
| "loss": 1.6295, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.6393168117883457, |
| "grad_norm": 0.4449285566806793, |
| "learning_rate": 0.0001, |
| "loss": 1.4532, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.6396517079705292, |
| "grad_norm": 0.46385762095451355, |
| "learning_rate": 0.0001, |
| "loss": 1.5756, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.6399866041527127, |
| "grad_norm": 0.4670940339565277, |
| "learning_rate": 0.0001, |
| "loss": 1.5893, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.6403215003348962, |
| "grad_norm": 0.4583105742931366, |
| "learning_rate": 0.0001, |
| "loss": 1.4533, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.6406563965170797, |
| "grad_norm": 0.43470409512519836, |
| "learning_rate": 0.0001, |
| "loss": 1.417, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.6409912926992632, |
| "grad_norm": 0.4683259427547455, |
| "learning_rate": 0.0001, |
| "loss": 1.5425, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.6413261888814468, |
| "grad_norm": 0.43709784746170044, |
| "learning_rate": 0.0001, |
| "loss": 1.498, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.6416610850636303, |
| "grad_norm": 0.4760400950908661, |
| "learning_rate": 0.0001, |
| "loss": 1.5995, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.6419959812458138, |
| "grad_norm": 0.47921547293663025, |
| "learning_rate": 0.0001, |
| "loss": 1.5829, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.6423308774279973, |
| "grad_norm": 0.4762752056121826, |
| "learning_rate": 0.0001, |
| "loss": 1.6675, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.6426657736101808, |
| "grad_norm": 0.46576380729675293, |
| "learning_rate": 0.0001, |
| "loss": 1.5385, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.6430006697923644, |
| "grad_norm": 0.4593994915485382, |
| "learning_rate": 0.0001, |
| "loss": 1.4759, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.6433355659745479, |
| "grad_norm": 0.45551782846450806, |
| "learning_rate": 0.0001, |
| "loss": 1.4443, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.6436704621567314, |
| "grad_norm": 0.45817625522613525, |
| "learning_rate": 0.0001, |
| "loss": 1.4591, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.6440053583389149, |
| "grad_norm": 0.4861002564430237, |
| "learning_rate": 0.0001, |
| "loss": 1.5437, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.6443402545210984, |
| "grad_norm": 0.4582726061344147, |
| "learning_rate": 0.0001, |
| "loss": 1.4162, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.644675150703282, |
| "grad_norm": 0.45267635583877563, |
| "learning_rate": 0.0001, |
| "loss": 1.4411, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.6450100468854655, |
| "grad_norm": 0.4521283805370331, |
| "learning_rate": 0.0001, |
| "loss": 1.5681, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.645344943067649, |
| "grad_norm": 0.4693661332130432, |
| "learning_rate": 0.0001, |
| "loss": 1.5153, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.6456798392498325, |
| "grad_norm": 0.4437626302242279, |
| "learning_rate": 0.0001, |
| "loss": 1.3758, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.646014735432016, |
| "grad_norm": 0.4534982740879059, |
| "learning_rate": 0.0001, |
| "loss": 1.5104, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.6463496316141996, |
| "grad_norm": 0.4561862051486969, |
| "learning_rate": 0.0001, |
| "loss": 1.5212, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.6466845277963831, |
| "grad_norm": 0.46854645013809204, |
| "learning_rate": 0.0001, |
| "loss": 1.495, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.6470194239785666, |
| "grad_norm": 0.4689598083496094, |
| "learning_rate": 0.0001, |
| "loss": 1.6433, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.6473543201607501, |
| "grad_norm": 0.474661260843277, |
| "learning_rate": 0.0001, |
| "loss": 1.4658, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.6476892163429336, |
| "grad_norm": 0.48324620723724365, |
| "learning_rate": 0.0001, |
| "loss": 1.5707, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.6480241125251173, |
| "grad_norm": 0.4637855291366577, |
| "learning_rate": 0.0001, |
| "loss": 1.5954, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.6483590087073008, |
| "grad_norm": 0.4889835715293884, |
| "learning_rate": 0.0001, |
| "loss": 1.4605, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.6486939048894843, |
| "grad_norm": 0.4517784118652344, |
| "learning_rate": 0.0001, |
| "loss": 1.4816, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.6490288010716678, |
| "grad_norm": 0.47071799635887146, |
| "learning_rate": 0.0001, |
| "loss": 1.5544, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.6493636972538513, |
| "grad_norm": 0.45107269287109375, |
| "learning_rate": 0.0001, |
| "loss": 1.5698, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.6496985934360349, |
| "grad_norm": 0.47279268503189087, |
| "learning_rate": 0.0001, |
| "loss": 1.6169, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.6500334896182184, |
| "grad_norm": 0.4569358825683594, |
| "learning_rate": 0.0001, |
| "loss": 1.5735, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.6503683858004019, |
| "grad_norm": 0.49081119894981384, |
| "learning_rate": 0.0001, |
| "loss": 1.5683, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.6507032819825854, |
| "grad_norm": 0.45732182264328003, |
| "learning_rate": 0.0001, |
| "loss": 1.5146, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.6510381781647689, |
| "grad_norm": 0.48255446553230286, |
| "learning_rate": 0.0001, |
| "loss": 1.5982, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.6513730743469525, |
| "grad_norm": 0.4548420310020447, |
| "learning_rate": 0.0001, |
| "loss": 1.5714, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.651707970529136, |
| "grad_norm": 0.46165576577186584, |
| "learning_rate": 0.0001, |
| "loss": 1.6128, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.6520428667113195, |
| "grad_norm": 0.4421144425868988, |
| "learning_rate": 0.0001, |
| "loss": 1.4837, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.652377762893503, |
| "grad_norm": 0.4581396281719208, |
| "learning_rate": 0.0001, |
| "loss": 1.4513, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.6527126590756865, |
| "grad_norm": 0.45781993865966797, |
| "learning_rate": 0.0001, |
| "loss": 1.4742, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.6530475552578701, |
| "grad_norm": 0.4650152325630188, |
| "learning_rate": 0.0001, |
| "loss": 1.5343, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.6533824514400536, |
| "grad_norm": 0.47808071970939636, |
| "learning_rate": 0.0001, |
| "loss": 1.5066, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.6537173476222371, |
| "grad_norm": 0.4686528146266937, |
| "learning_rate": 0.0001, |
| "loss": 1.5331, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.6540522438044206, |
| "grad_norm": 0.497200071811676, |
| "learning_rate": 0.0001, |
| "loss": 1.591, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.6543871399866041, |
| "grad_norm": 0.4465658664703369, |
| "learning_rate": 0.0001, |
| "loss": 1.5553, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.6547220361687877, |
| "grad_norm": 0.44191426038742065, |
| "learning_rate": 0.0001, |
| "loss": 1.4675, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.6550569323509712, |
| "grad_norm": 0.4407011568546295, |
| "learning_rate": 0.0001, |
| "loss": 1.4161, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.6553918285331547, |
| "grad_norm": 0.4675586521625519, |
| "learning_rate": 0.0001, |
| "loss": 1.476, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.6557267247153382, |
| "grad_norm": 0.46932560205459595, |
| "learning_rate": 0.0001, |
| "loss": 1.5244, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.6560616208975217, |
| "grad_norm": 0.4337617754936218, |
| "learning_rate": 0.0001, |
| "loss": 1.5247, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.6563965170797053, |
| "grad_norm": 0.48785457015037537, |
| "learning_rate": 0.0001, |
| "loss": 1.5425, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.6567314132618888, |
| "grad_norm": 0.4481433033943176, |
| "learning_rate": 0.0001, |
| "loss": 1.4779, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.6570663094440723, |
| "grad_norm": 0.4887458384037018, |
| "learning_rate": 0.0001, |
| "loss": 1.4865, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.6574012056262558, |
| "grad_norm": 0.4612896740436554, |
| "learning_rate": 0.0001, |
| "loss": 1.5068, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.6577361018084393, |
| "grad_norm": 0.4455910623073578, |
| "learning_rate": 0.0001, |
| "loss": 1.4341, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.6580709979906229, |
| "grad_norm": 0.4743984043598175, |
| "learning_rate": 0.0001, |
| "loss": 1.5445, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.6584058941728065, |
| "grad_norm": 0.46120843291282654, |
| "learning_rate": 0.0001, |
| "loss": 1.4902, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.65874079035499, |
| "grad_norm": 0.4710855782032013, |
| "learning_rate": 0.0001, |
| "loss": 1.5625, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.6590756865371735, |
| "grad_norm": 0.44773563742637634, |
| "learning_rate": 0.0001, |
| "loss": 1.5546, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.659410582719357, |
| "grad_norm": 0.4674702286720276, |
| "learning_rate": 0.0001, |
| "loss": 1.5137, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.6597454789015406, |
| "grad_norm": 0.4413098096847534, |
| "learning_rate": 0.0001, |
| "loss": 1.4792, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.6600803750837241, |
| "grad_norm": 0.4673274755477905, |
| "learning_rate": 0.0001, |
| "loss": 1.5341, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.6604152712659076, |
| "grad_norm": 0.46262720227241516, |
| "learning_rate": 0.0001, |
| "loss": 1.4812, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.6607501674480911, |
| "grad_norm": 0.4727884829044342, |
| "learning_rate": 0.0001, |
| "loss": 1.6146, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.6610850636302746, |
| "grad_norm": 0.4771195650100708, |
| "learning_rate": 0.0001, |
| "loss": 1.5431, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.6614199598124582, |
| "grad_norm": 0.48989588022232056, |
| "learning_rate": 0.0001, |
| "loss": 1.5296, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.6617548559946417, |
| "grad_norm": 0.4625195264816284, |
| "learning_rate": 0.0001, |
| "loss": 1.4064, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.6620897521768252, |
| "grad_norm": 0.48750412464141846, |
| "learning_rate": 0.0001, |
| "loss": 1.4338, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.6624246483590087, |
| "grad_norm": 0.47945865988731384, |
| "learning_rate": 0.0001, |
| "loss": 1.5737, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.6627595445411922, |
| "grad_norm": 0.4766711890697479, |
| "learning_rate": 0.0001, |
| "loss": 1.5785, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.6630944407233758, |
| "grad_norm": 0.46893224120140076, |
| "learning_rate": 0.0001, |
| "loss": 1.5609, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.6634293369055593, |
| "grad_norm": 0.4679139256477356, |
| "learning_rate": 0.0001, |
| "loss": 1.5144, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.6637642330877428, |
| "grad_norm": 0.451107919216156, |
| "learning_rate": 0.0001, |
| "loss": 1.5252, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.6640991292699263, |
| "grad_norm": 0.4496656358242035, |
| "learning_rate": 0.0001, |
| "loss": 1.3826, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.6644340254521098, |
| "grad_norm": 0.46544507145881653, |
| "learning_rate": 0.0001, |
| "loss": 1.5085, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.6647689216342934, |
| "grad_norm": 0.4583112597465515, |
| "learning_rate": 0.0001, |
| "loss": 1.5213, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.6651038178164769, |
| "grad_norm": 0.4679916799068451, |
| "learning_rate": 0.0001, |
| "loss": 1.5077, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.6654387139986604, |
| "grad_norm": 0.4852266311645508, |
| "learning_rate": 0.0001, |
| "loss": 1.5449, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.6657736101808439, |
| "grad_norm": 0.46231263875961304, |
| "learning_rate": 0.0001, |
| "loss": 1.5846, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.6661085063630274, |
| "grad_norm": 0.4823322892189026, |
| "learning_rate": 0.0001, |
| "loss": 1.5246, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.666443402545211, |
| "grad_norm": 0.45604240894317627, |
| "learning_rate": 0.0001, |
| "loss": 1.4723, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.6667782987273945, |
| "grad_norm": 0.47065314650535583, |
| "learning_rate": 0.0001, |
| "loss": 1.4831, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.667113194909578, |
| "grad_norm": 0.46221935749053955, |
| "learning_rate": 0.0001, |
| "loss": 1.4796, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.6674480910917615, |
| "grad_norm": 0.475827157497406, |
| "learning_rate": 0.0001, |
| "loss": 1.5351, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.667782987273945, |
| "grad_norm": 0.45660120248794556, |
| "learning_rate": 0.0001, |
| "loss": 1.5737, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.6681178834561285, |
| "grad_norm": 0.4607517123222351, |
| "learning_rate": 0.0001, |
| "loss": 1.5358, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.6684527796383122, |
| "grad_norm": 0.4408629536628723, |
| "learning_rate": 0.0001, |
| "loss": 1.5249, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.6687876758204957, |
| "grad_norm": 0.44046109914779663, |
| "learning_rate": 0.0001, |
| "loss": 1.4979, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.6691225720026792, |
| "grad_norm": 0.45813706517219543, |
| "learning_rate": 0.0001, |
| "loss": 1.4541, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.6694574681848627, |
| "grad_norm": 0.4600253105163574, |
| "learning_rate": 0.0001, |
| "loss": 1.4795, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.6697923643670463, |
| "grad_norm": 0.4730280935764313, |
| "learning_rate": 0.0001, |
| "loss": 1.5925, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.6701272605492298, |
| "grad_norm": 0.4675295054912567, |
| "learning_rate": 0.0001, |
| "loss": 1.5452, |
| "step": 2001 |
| }, |
| { |
| "epoch": 0.6704621567314133, |
| "grad_norm": 0.4651346206665039, |
| "learning_rate": 0.0001, |
| "loss": 1.5475, |
| "step": 2002 |
| }, |
| { |
| "epoch": 0.6707970529135968, |
| "grad_norm": 0.4588277041912079, |
| "learning_rate": 0.0001, |
| "loss": 1.5906, |
| "step": 2003 |
| }, |
| { |
| "epoch": 0.6711319490957803, |
| "grad_norm": 0.4812864363193512, |
| "learning_rate": 0.0001, |
| "loss": 1.5754, |
| "step": 2004 |
| }, |
| { |
| "epoch": 0.6714668452779639, |
| "grad_norm": 0.45574623346328735, |
| "learning_rate": 0.0001, |
| "loss": 1.4828, |
| "step": 2005 |
| }, |
| { |
| "epoch": 0.6718017414601474, |
| "grad_norm": 0.46802258491516113, |
| "learning_rate": 0.0001, |
| "loss": 1.5108, |
| "step": 2006 |
| }, |
| { |
| "epoch": 0.6721366376423309, |
| "grad_norm": 0.47356149554252625, |
| "learning_rate": 0.0001, |
| "loss": 1.577, |
| "step": 2007 |
| }, |
| { |
| "epoch": 0.6724715338245144, |
| "grad_norm": 0.46529653668403625, |
| "learning_rate": 0.0001, |
| "loss": 1.478, |
| "step": 2008 |
| }, |
| { |
| "epoch": 0.6728064300066979, |
| "grad_norm": 0.4784877896308899, |
| "learning_rate": 0.0001, |
| "loss": 1.5498, |
| "step": 2009 |
| }, |
| { |
| "epoch": 0.6731413261888815, |
| "grad_norm": 0.47873613238334656, |
| "learning_rate": 0.0001, |
| "loss": 1.5988, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.673476222371065, |
| "grad_norm": 0.4695528447628021, |
| "learning_rate": 0.0001, |
| "loss": 1.5282, |
| "step": 2011 |
| }, |
| { |
| "epoch": 0.6738111185532485, |
| "grad_norm": 0.44687068462371826, |
| "learning_rate": 0.0001, |
| "loss": 1.5051, |
| "step": 2012 |
| }, |
| { |
| "epoch": 0.674146014735432, |
| "grad_norm": 0.4465655982494354, |
| "learning_rate": 0.0001, |
| "loss": 1.5711, |
| "step": 2013 |
| }, |
| { |
| "epoch": 0.6744809109176155, |
| "grad_norm": 0.5006896257400513, |
| "learning_rate": 0.0001, |
| "loss": 1.5201, |
| "step": 2014 |
| }, |
| { |
| "epoch": 0.674815807099799, |
| "grad_norm": 0.4736153185367584, |
| "learning_rate": 0.0001, |
| "loss": 1.5697, |
| "step": 2015 |
| }, |
| { |
| "epoch": 0.6751507032819826, |
| "grad_norm": 0.4669327735900879, |
| "learning_rate": 0.0001, |
| "loss": 1.5168, |
| "step": 2016 |
| }, |
| { |
| "epoch": 0.6754855994641661, |
| "grad_norm": 0.44387710094451904, |
| "learning_rate": 0.0001, |
| "loss": 1.4377, |
| "step": 2017 |
| }, |
| { |
| "epoch": 0.6758204956463496, |
| "grad_norm": 0.4410373568534851, |
| "learning_rate": 0.0001, |
| "loss": 1.4047, |
| "step": 2018 |
| }, |
| { |
| "epoch": 0.6761553918285331, |
| "grad_norm": 0.45553281903266907, |
| "learning_rate": 0.0001, |
| "loss": 1.5127, |
| "step": 2019 |
| }, |
| { |
| "epoch": 0.6764902880107166, |
| "grad_norm": 0.4672766327857971, |
| "learning_rate": 0.0001, |
| "loss": 1.5614, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.6768251841929002, |
| "grad_norm": 0.46312415599823, |
| "learning_rate": 0.0001, |
| "loss": 1.4514, |
| "step": 2021 |
| }, |
| { |
| "epoch": 0.6771600803750837, |
| "grad_norm": 0.4592866599559784, |
| "learning_rate": 0.0001, |
| "loss": 1.47, |
| "step": 2022 |
| }, |
| { |
| "epoch": 0.6774949765572672, |
| "grad_norm": 0.46353161334991455, |
| "learning_rate": 0.0001, |
| "loss": 1.5574, |
| "step": 2023 |
| }, |
| { |
| "epoch": 0.6778298727394507, |
| "grad_norm": 0.45665234327316284, |
| "learning_rate": 0.0001, |
| "loss": 1.5377, |
| "step": 2024 |
| }, |
| { |
| "epoch": 0.6781647689216342, |
| "grad_norm": 0.47426679730415344, |
| "learning_rate": 0.0001, |
| "loss": 1.5885, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.6784996651038178, |
| "grad_norm": 0.464711457490921, |
| "learning_rate": 0.0001, |
| "loss": 1.5344, |
| "step": 2026 |
| }, |
| { |
| "epoch": 0.6788345612860014, |
| "grad_norm": 0.4656915068626404, |
| "learning_rate": 0.0001, |
| "loss": 1.5718, |
| "step": 2027 |
| }, |
| { |
| "epoch": 0.6791694574681849, |
| "grad_norm": 0.48867180943489075, |
| "learning_rate": 0.0001, |
| "loss": 1.5578, |
| "step": 2028 |
| }, |
| { |
| "epoch": 0.6795043536503684, |
| "grad_norm": 0.43709492683410645, |
| "learning_rate": 0.0001, |
| "loss": 1.4615, |
| "step": 2029 |
| }, |
| { |
| "epoch": 0.679839249832552, |
| "grad_norm": 0.4825074374675751, |
| "learning_rate": 0.0001, |
| "loss": 1.5181, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.6801741460147355, |
| "grad_norm": 0.45096608996391296, |
| "learning_rate": 0.0001, |
| "loss": 1.6186, |
| "step": 2031 |
| }, |
| { |
| "epoch": 0.680509042196919, |
| "grad_norm": 0.449068158864975, |
| "learning_rate": 0.0001, |
| "loss": 1.4584, |
| "step": 2032 |
| }, |
| { |
| "epoch": 0.6808439383791025, |
| "grad_norm": 0.46091821789741516, |
| "learning_rate": 0.0001, |
| "loss": 1.5739, |
| "step": 2033 |
| }, |
| { |
| "epoch": 0.681178834561286, |
| "grad_norm": 0.4668620824813843, |
| "learning_rate": 0.0001, |
| "loss": 1.6237, |
| "step": 2034 |
| }, |
| { |
| "epoch": 0.6815137307434695, |
| "grad_norm": 0.4587905704975128, |
| "learning_rate": 0.0001, |
| "loss": 1.5535, |
| "step": 2035 |
| }, |
| { |
| "epoch": 0.6818486269256531, |
| "grad_norm": 0.4671448767185211, |
| "learning_rate": 0.0001, |
| "loss": 1.5589, |
| "step": 2036 |
| }, |
| { |
| "epoch": 0.6821835231078366, |
| "grad_norm": 0.45884931087493896, |
| "learning_rate": 0.0001, |
| "loss": 1.4523, |
| "step": 2037 |
| }, |
| { |
| "epoch": 0.6825184192900201, |
| "grad_norm": 0.46959635615348816, |
| "learning_rate": 0.0001, |
| "loss": 1.5474, |
| "step": 2038 |
| }, |
| { |
| "epoch": 0.6828533154722036, |
| "grad_norm": 0.45907384157180786, |
| "learning_rate": 0.0001, |
| "loss": 1.4985, |
| "step": 2039 |
| }, |
| { |
| "epoch": 0.6831882116543871, |
| "grad_norm": 0.4683312177658081, |
| "learning_rate": 0.0001, |
| "loss": 1.5353, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.6835231078365707, |
| "grad_norm": 0.4766319990158081, |
| "learning_rate": 0.0001, |
| "loss": 1.4801, |
| "step": 2041 |
| }, |
| { |
| "epoch": 0.6838580040187542, |
| "grad_norm": 0.4715334475040436, |
| "learning_rate": 0.0001, |
| "loss": 1.4394, |
| "step": 2042 |
| }, |
| { |
| "epoch": 0.6841929002009377, |
| "grad_norm": 0.4581170678138733, |
| "learning_rate": 0.0001, |
| "loss": 1.5153, |
| "step": 2043 |
| }, |
| { |
| "epoch": 0.6845277963831212, |
| "grad_norm": 0.469703733921051, |
| "learning_rate": 0.0001, |
| "loss": 1.5181, |
| "step": 2044 |
| }, |
| { |
| "epoch": 0.6848626925653047, |
| "grad_norm": 0.45137694478034973, |
| "learning_rate": 0.0001, |
| "loss": 1.4877, |
| "step": 2045 |
| }, |
| { |
| "epoch": 0.6851975887474883, |
| "grad_norm": 0.4811345636844635, |
| "learning_rate": 0.0001, |
| "loss": 1.6566, |
| "step": 2046 |
| }, |
| { |
| "epoch": 0.6855324849296718, |
| "grad_norm": 0.4660329222679138, |
| "learning_rate": 0.0001, |
| "loss": 1.5094, |
| "step": 2047 |
| }, |
| { |
| "epoch": 0.6858673811118553, |
| "grad_norm": 0.45194128155708313, |
| "learning_rate": 0.0001, |
| "loss": 1.5157, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.6862022772940388, |
| "grad_norm": 0.45705869793891907, |
| "learning_rate": 0.0001, |
| "loss": 1.5073, |
| "step": 2049 |
| }, |
| { |
| "epoch": 0.6865371734762223, |
| "grad_norm": 0.4675770103931427, |
| "learning_rate": 0.0001, |
| "loss": 1.5067, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.6868720696584059, |
| "grad_norm": 0.45620042085647583, |
| "learning_rate": 0.0001, |
| "loss": 1.5607, |
| "step": 2051 |
| }, |
| { |
| "epoch": 0.6872069658405894, |
| "grad_norm": 0.5035691261291504, |
| "learning_rate": 0.0001, |
| "loss": 1.5534, |
| "step": 2052 |
| }, |
| { |
| "epoch": 0.6875418620227729, |
| "grad_norm": 0.4658897817134857, |
| "learning_rate": 0.0001, |
| "loss": 1.5647, |
| "step": 2053 |
| }, |
| { |
| "epoch": 0.6878767582049564, |
| "grad_norm": 0.4606130123138428, |
| "learning_rate": 0.0001, |
| "loss": 1.5304, |
| "step": 2054 |
| }, |
| { |
| "epoch": 0.6882116543871399, |
| "grad_norm": 0.4701388478279114, |
| "learning_rate": 0.0001, |
| "loss": 1.506, |
| "step": 2055 |
| }, |
| { |
| "epoch": 0.6885465505693235, |
| "grad_norm": 0.4724370837211609, |
| "learning_rate": 0.0001, |
| "loss": 1.5547, |
| "step": 2056 |
| }, |
| { |
| "epoch": 0.6888814467515071, |
| "grad_norm": 0.44684672355651855, |
| "learning_rate": 0.0001, |
| "loss": 1.4687, |
| "step": 2057 |
| }, |
| { |
| "epoch": 0.6892163429336906, |
| "grad_norm": 0.5058313608169556, |
| "learning_rate": 0.0001, |
| "loss": 1.5251, |
| "step": 2058 |
| }, |
| { |
| "epoch": 0.6895512391158741, |
| "grad_norm": 0.4328201711177826, |
| "learning_rate": 0.0001, |
| "loss": 1.4719, |
| "step": 2059 |
| }, |
| { |
| "epoch": 0.6898861352980576, |
| "grad_norm": 0.4703109860420227, |
| "learning_rate": 0.0001, |
| "loss": 1.5043, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.6902210314802412, |
| "grad_norm": 0.46181461215019226, |
| "learning_rate": 0.0001, |
| "loss": 1.4884, |
| "step": 2061 |
| }, |
| { |
| "epoch": 0.6905559276624247, |
| "grad_norm": 0.4495771527290344, |
| "learning_rate": 0.0001, |
| "loss": 1.4585, |
| "step": 2062 |
| }, |
| { |
| "epoch": 0.6908908238446082, |
| "grad_norm": 0.5008009672164917, |
| "learning_rate": 0.0001, |
| "loss": 1.5687, |
| "step": 2063 |
| }, |
| { |
| "epoch": 0.6912257200267917, |
| "grad_norm": 0.4631938636302948, |
| "learning_rate": 0.0001, |
| "loss": 1.5049, |
| "step": 2064 |
| }, |
| { |
| "epoch": 0.6915606162089752, |
| "grad_norm": 0.5008888244628906, |
| "learning_rate": 0.0001, |
| "loss": 1.5, |
| "step": 2065 |
| }, |
| { |
| "epoch": 0.6918955123911588, |
| "grad_norm": 0.4508407413959503, |
| "learning_rate": 0.0001, |
| "loss": 1.4246, |
| "step": 2066 |
| }, |
| { |
| "epoch": 0.6922304085733423, |
| "grad_norm": 0.48627397418022156, |
| "learning_rate": 0.0001, |
| "loss": 1.4769, |
| "step": 2067 |
| }, |
| { |
| "epoch": 0.6925653047555258, |
| "grad_norm": 0.49048787355422974, |
| "learning_rate": 0.0001, |
| "loss": 1.5519, |
| "step": 2068 |
| }, |
| { |
| "epoch": 0.6929002009377093, |
| "grad_norm": 0.4490218758583069, |
| "learning_rate": 0.0001, |
| "loss": 1.4597, |
| "step": 2069 |
| }, |
| { |
| "epoch": 0.6932350971198928, |
| "grad_norm": 0.4886975884437561, |
| "learning_rate": 0.0001, |
| "loss": 1.5541, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.6935699933020764, |
| "grad_norm": 0.4652385115623474, |
| "learning_rate": 0.0001, |
| "loss": 1.5426, |
| "step": 2071 |
| }, |
| { |
| "epoch": 0.6939048894842599, |
| "grad_norm": 0.48018738627433777, |
| "learning_rate": 0.0001, |
| "loss": 1.556, |
| "step": 2072 |
| }, |
| { |
| "epoch": 0.6942397856664434, |
| "grad_norm": 0.45230361819267273, |
| "learning_rate": 0.0001, |
| "loss": 1.5265, |
| "step": 2073 |
| }, |
| { |
| "epoch": 0.6945746818486269, |
| "grad_norm": 0.5099328756332397, |
| "learning_rate": 0.0001, |
| "loss": 1.5578, |
| "step": 2074 |
| }, |
| { |
| "epoch": 0.6949095780308104, |
| "grad_norm": 0.45329004526138306, |
| "learning_rate": 0.0001, |
| "loss": 1.4306, |
| "step": 2075 |
| }, |
| { |
| "epoch": 0.695244474212994, |
| "grad_norm": 0.4592442810535431, |
| "learning_rate": 0.0001, |
| "loss": 1.5326, |
| "step": 2076 |
| }, |
| { |
| "epoch": 0.6955793703951775, |
| "grad_norm": 0.46230995655059814, |
| "learning_rate": 0.0001, |
| "loss": 1.603, |
| "step": 2077 |
| }, |
| { |
| "epoch": 0.695914266577361, |
| "grad_norm": 0.47789496183395386, |
| "learning_rate": 0.0001, |
| "loss": 1.5768, |
| "step": 2078 |
| }, |
| { |
| "epoch": 0.6962491627595445, |
| "grad_norm": 0.46773889660835266, |
| "learning_rate": 0.0001, |
| "loss": 1.5329, |
| "step": 2079 |
| }, |
| { |
| "epoch": 0.696584058941728, |
| "grad_norm": 0.46148133277893066, |
| "learning_rate": 0.0001, |
| "loss": 1.5334, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.6969189551239116, |
| "grad_norm": 0.46110233664512634, |
| "learning_rate": 0.0001, |
| "loss": 1.5567, |
| "step": 2081 |
| }, |
| { |
| "epoch": 0.6972538513060951, |
| "grad_norm": 0.4659191370010376, |
| "learning_rate": 0.0001, |
| "loss": 1.5402, |
| "step": 2082 |
| }, |
| { |
| "epoch": 0.6975887474882786, |
| "grad_norm": 0.45607033371925354, |
| "learning_rate": 0.0001, |
| "loss": 1.4262, |
| "step": 2083 |
| }, |
| { |
| "epoch": 0.6979236436704621, |
| "grad_norm": 0.4767032563686371, |
| "learning_rate": 0.0001, |
| "loss": 1.5247, |
| "step": 2084 |
| }, |
| { |
| "epoch": 0.6982585398526456, |
| "grad_norm": 0.4534342288970947, |
| "learning_rate": 0.0001, |
| "loss": 1.4731, |
| "step": 2085 |
| }, |
| { |
| "epoch": 0.6985934360348292, |
| "grad_norm": 0.46111854910850525, |
| "learning_rate": 0.0001, |
| "loss": 1.614, |
| "step": 2086 |
| }, |
| { |
| "epoch": 0.6989283322170127, |
| "grad_norm": 0.4589134156703949, |
| "learning_rate": 0.0001, |
| "loss": 1.5565, |
| "step": 2087 |
| }, |
| { |
| "epoch": 0.6992632283991963, |
| "grad_norm": 0.4603373110294342, |
| "learning_rate": 0.0001, |
| "loss": 1.4874, |
| "step": 2088 |
| }, |
| { |
| "epoch": 0.6995981245813798, |
| "grad_norm": 0.466277539730072, |
| "learning_rate": 0.0001, |
| "loss": 1.5705, |
| "step": 2089 |
| }, |
| { |
| "epoch": 0.6999330207635633, |
| "grad_norm": 0.4790922701358795, |
| "learning_rate": 0.0001, |
| "loss": 1.4459, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.7002679169457469, |
| "grad_norm": 0.47170254588127136, |
| "learning_rate": 0.0001, |
| "loss": 1.5706, |
| "step": 2091 |
| }, |
| { |
| "epoch": 0.7006028131279304, |
| "grad_norm": 0.4699598252773285, |
| "learning_rate": 0.0001, |
| "loss": 1.5987, |
| "step": 2092 |
| }, |
| { |
| "epoch": 0.7009377093101139, |
| "grad_norm": 0.46038177609443665, |
| "learning_rate": 0.0001, |
| "loss": 1.5629, |
| "step": 2093 |
| }, |
| { |
| "epoch": 0.7012726054922974, |
| "grad_norm": 0.4709053635597229, |
| "learning_rate": 0.0001, |
| "loss": 1.5994, |
| "step": 2094 |
| }, |
| { |
| "epoch": 0.7016075016744809, |
| "grad_norm": 0.4605868458747864, |
| "learning_rate": 0.0001, |
| "loss": 1.5267, |
| "step": 2095 |
| }, |
| { |
| "epoch": 0.7019423978566645, |
| "grad_norm": 0.47352883219718933, |
| "learning_rate": 0.0001, |
| "loss": 1.5299, |
| "step": 2096 |
| }, |
| { |
| "epoch": 0.702277294038848, |
| "grad_norm": 0.4487178921699524, |
| "learning_rate": 0.0001, |
| "loss": 1.4862, |
| "step": 2097 |
| }, |
| { |
| "epoch": 0.7026121902210315, |
| "grad_norm": 0.4435498118400574, |
| "learning_rate": 0.0001, |
| "loss": 1.434, |
| "step": 2098 |
| }, |
| { |
| "epoch": 0.702947086403215, |
| "grad_norm": 0.4770961403846741, |
| "learning_rate": 0.0001, |
| "loss": 1.5706, |
| "step": 2099 |
| }, |
| { |
| "epoch": 0.7032819825853985, |
| "grad_norm": 0.5055303573608398, |
| "learning_rate": 0.0001, |
| "loss": 1.6134, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.703616878767582, |
| "grad_norm": 0.46639499068260193, |
| "learning_rate": 0.0001, |
| "loss": 1.5706, |
| "step": 2101 |
| }, |
| { |
| "epoch": 0.7039517749497656, |
| "grad_norm": 0.47837772965431213, |
| "learning_rate": 0.0001, |
| "loss": 1.5261, |
| "step": 2102 |
| }, |
| { |
| "epoch": 0.7042866711319491, |
| "grad_norm": 0.4420396089553833, |
| "learning_rate": 0.0001, |
| "loss": 1.554, |
| "step": 2103 |
| }, |
| { |
| "epoch": 0.7046215673141326, |
| "grad_norm": 0.4579044282436371, |
| "learning_rate": 0.0001, |
| "loss": 1.496, |
| "step": 2104 |
| }, |
| { |
| "epoch": 0.7049564634963161, |
| "grad_norm": 0.4740173816680908, |
| "learning_rate": 0.0001, |
| "loss": 1.6011, |
| "step": 2105 |
| }, |
| { |
| "epoch": 0.7052913596784997, |
| "grad_norm": 0.46156203746795654, |
| "learning_rate": 0.0001, |
| "loss": 1.4938, |
| "step": 2106 |
| }, |
| { |
| "epoch": 0.7056262558606832, |
| "grad_norm": 0.47833749651908875, |
| "learning_rate": 0.0001, |
| "loss": 1.4794, |
| "step": 2107 |
| }, |
| { |
| "epoch": 0.7059611520428667, |
| "grad_norm": 0.45706647634506226, |
| "learning_rate": 0.0001, |
| "loss": 1.4822, |
| "step": 2108 |
| }, |
| { |
| "epoch": 0.7062960482250502, |
| "grad_norm": 0.4594823122024536, |
| "learning_rate": 0.0001, |
| "loss": 1.4496, |
| "step": 2109 |
| }, |
| { |
| "epoch": 0.7066309444072337, |
| "grad_norm": 0.4742507040500641, |
| "learning_rate": 0.0001, |
| "loss": 1.4566, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.7069658405894172, |
| "grad_norm": 0.46597233414649963, |
| "learning_rate": 0.0001, |
| "loss": 1.54, |
| "step": 2111 |
| }, |
| { |
| "epoch": 0.7073007367716008, |
| "grad_norm": 0.4715932011604309, |
| "learning_rate": 0.0001, |
| "loss": 1.5444, |
| "step": 2112 |
| }, |
| { |
| "epoch": 0.7076356329537843, |
| "grad_norm": 0.45990297198295593, |
| "learning_rate": 0.0001, |
| "loss": 1.4125, |
| "step": 2113 |
| }, |
| { |
| "epoch": 0.7079705291359678, |
| "grad_norm": 0.4889219105243683, |
| "learning_rate": 0.0001, |
| "loss": 1.5741, |
| "step": 2114 |
| }, |
| { |
| "epoch": 0.7083054253181513, |
| "grad_norm": 0.46439915895462036, |
| "learning_rate": 0.0001, |
| "loss": 1.5618, |
| "step": 2115 |
| }, |
| { |
| "epoch": 0.7086403215003348, |
| "grad_norm": 0.4761582612991333, |
| "learning_rate": 0.0001, |
| "loss": 1.5833, |
| "step": 2116 |
| }, |
| { |
| "epoch": 0.7089752176825184, |
| "grad_norm": 0.46475279331207275, |
| "learning_rate": 0.0001, |
| "loss": 1.5725, |
| "step": 2117 |
| }, |
| { |
| "epoch": 0.709310113864702, |
| "grad_norm": 0.48302775621414185, |
| "learning_rate": 0.0001, |
| "loss": 1.5543, |
| "step": 2118 |
| }, |
| { |
| "epoch": 0.7096450100468855, |
| "grad_norm": 0.45327621698379517, |
| "learning_rate": 0.0001, |
| "loss": 1.5391, |
| "step": 2119 |
| }, |
| { |
| "epoch": 0.709979906229069, |
| "grad_norm": 0.4502553939819336, |
| "learning_rate": 0.0001, |
| "loss": 1.4403, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.7103148024112526, |
| "grad_norm": 0.47124046087265015, |
| "learning_rate": 0.0001, |
| "loss": 1.5433, |
| "step": 2121 |
| }, |
| { |
| "epoch": 0.7106496985934361, |
| "grad_norm": 0.4871177077293396, |
| "learning_rate": 0.0001, |
| "loss": 1.5333, |
| "step": 2122 |
| }, |
| { |
| "epoch": 0.7109845947756196, |
| "grad_norm": 0.45931074023246765, |
| "learning_rate": 0.0001, |
| "loss": 1.4978, |
| "step": 2123 |
| }, |
| { |
| "epoch": 0.7113194909578031, |
| "grad_norm": 0.46881625056266785, |
| "learning_rate": 0.0001, |
| "loss": 1.518, |
| "step": 2124 |
| }, |
| { |
| "epoch": 0.7116543871399866, |
| "grad_norm": 0.47189566493034363, |
| "learning_rate": 0.0001, |
| "loss": 1.5414, |
| "step": 2125 |
| }, |
| { |
| "epoch": 0.7119892833221702, |
| "grad_norm": 0.49166449904441833, |
| "learning_rate": 0.0001, |
| "loss": 1.5914, |
| "step": 2126 |
| }, |
| { |
| "epoch": 0.7123241795043537, |
| "grad_norm": 0.4626203775405884, |
| "learning_rate": 0.0001, |
| "loss": 1.5008, |
| "step": 2127 |
| }, |
| { |
| "epoch": 0.7126590756865372, |
| "grad_norm": 0.5054082274436951, |
| "learning_rate": 0.0001, |
| "loss": 1.5388, |
| "step": 2128 |
| }, |
| { |
| "epoch": 0.7129939718687207, |
| "grad_norm": 0.46761465072631836, |
| "learning_rate": 0.0001, |
| "loss": 1.6087, |
| "step": 2129 |
| }, |
| { |
| "epoch": 0.7133288680509042, |
| "grad_norm": 0.46342405676841736, |
| "learning_rate": 0.0001, |
| "loss": 1.5171, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.7136637642330877, |
| "grad_norm": 0.43117156624794006, |
| "learning_rate": 0.0001, |
| "loss": 1.5104, |
| "step": 2131 |
| }, |
| { |
| "epoch": 0.7139986604152713, |
| "grad_norm": 0.4568924307823181, |
| "learning_rate": 0.0001, |
| "loss": 1.4607, |
| "step": 2132 |
| }, |
| { |
| "epoch": 0.7143335565974548, |
| "grad_norm": 0.47409340739250183, |
| "learning_rate": 0.0001, |
| "loss": 1.5482, |
| "step": 2133 |
| }, |
| { |
| "epoch": 0.7146684527796383, |
| "grad_norm": 0.44349172711372375, |
| "learning_rate": 0.0001, |
| "loss": 1.5406, |
| "step": 2134 |
| }, |
| { |
| "epoch": 0.7150033489618218, |
| "grad_norm": 0.4581291973590851, |
| "learning_rate": 0.0001, |
| "loss": 1.5583, |
| "step": 2135 |
| }, |
| { |
| "epoch": 0.7153382451440053, |
| "grad_norm": 0.46366629004478455, |
| "learning_rate": 0.0001, |
| "loss": 1.4944, |
| "step": 2136 |
| }, |
| { |
| "epoch": 0.7156731413261889, |
| "grad_norm": 0.4668786823749542, |
| "learning_rate": 0.0001, |
| "loss": 1.4843, |
| "step": 2137 |
| }, |
| { |
| "epoch": 0.7160080375083724, |
| "grad_norm": 0.473763644695282, |
| "learning_rate": 0.0001, |
| "loss": 1.5267, |
| "step": 2138 |
| }, |
| { |
| "epoch": 0.7163429336905559, |
| "grad_norm": 0.4776982069015503, |
| "learning_rate": 0.0001, |
| "loss": 1.4722, |
| "step": 2139 |
| }, |
| { |
| "epoch": 0.7166778298727394, |
| "grad_norm": 0.4626188576221466, |
| "learning_rate": 0.0001, |
| "loss": 1.5093, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.7170127260549229, |
| "grad_norm": 0.4587743580341339, |
| "learning_rate": 0.0001, |
| "loss": 1.4735, |
| "step": 2141 |
| }, |
| { |
| "epoch": 0.7173476222371065, |
| "grad_norm": 0.48433372378349304, |
| "learning_rate": 0.0001, |
| "loss": 1.5957, |
| "step": 2142 |
| }, |
| { |
| "epoch": 0.71768251841929, |
| "grad_norm": 0.46150970458984375, |
| "learning_rate": 0.0001, |
| "loss": 1.5504, |
| "step": 2143 |
| }, |
| { |
| "epoch": 0.7180174146014735, |
| "grad_norm": 0.46284884214401245, |
| "learning_rate": 0.0001, |
| "loss": 1.4929, |
| "step": 2144 |
| }, |
| { |
| "epoch": 0.718352310783657, |
| "grad_norm": 0.46241495013237, |
| "learning_rate": 0.0001, |
| "loss": 1.5442, |
| "step": 2145 |
| }, |
| { |
| "epoch": 0.7186872069658405, |
| "grad_norm": 0.46660250425338745, |
| "learning_rate": 0.0001, |
| "loss": 1.593, |
| "step": 2146 |
| }, |
| { |
| "epoch": 0.7190221031480241, |
| "grad_norm": 0.45663195848464966, |
| "learning_rate": 0.0001, |
| "loss": 1.5503, |
| "step": 2147 |
| }, |
| { |
| "epoch": 0.7193569993302076, |
| "grad_norm": 0.43789803981781006, |
| "learning_rate": 0.0001, |
| "loss": 1.3862, |
| "step": 2148 |
| }, |
| { |
| "epoch": 0.7196918955123912, |
| "grad_norm": 0.5114313364028931, |
| "learning_rate": 0.0001, |
| "loss": 1.6686, |
| "step": 2149 |
| }, |
| { |
| "epoch": 0.7200267916945747, |
| "grad_norm": 0.4642086625099182, |
| "learning_rate": 0.0001, |
| "loss": 1.5078, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.7203616878767582, |
| "grad_norm": 0.48922207951545715, |
| "learning_rate": 0.0001, |
| "loss": 1.4943, |
| "step": 2151 |
| }, |
| { |
| "epoch": 0.7206965840589418, |
| "grad_norm": 0.4588397145271301, |
| "learning_rate": 0.0001, |
| "loss": 1.5204, |
| "step": 2152 |
| }, |
| { |
| "epoch": 0.7210314802411253, |
| "grad_norm": 0.4893192648887634, |
| "learning_rate": 0.0001, |
| "loss": 1.4953, |
| "step": 2153 |
| }, |
| { |
| "epoch": 0.7213663764233088, |
| "grad_norm": 0.4586998224258423, |
| "learning_rate": 0.0001, |
| "loss": 1.5461, |
| "step": 2154 |
| }, |
| { |
| "epoch": 0.7217012726054923, |
| "grad_norm": 0.4510471522808075, |
| "learning_rate": 0.0001, |
| "loss": 1.4525, |
| "step": 2155 |
| }, |
| { |
| "epoch": 0.7220361687876758, |
| "grad_norm": 0.45169365406036377, |
| "learning_rate": 0.0001, |
| "loss": 1.4289, |
| "step": 2156 |
| }, |
| { |
| "epoch": 0.7223710649698594, |
| "grad_norm": 0.46228212118148804, |
| "learning_rate": 0.0001, |
| "loss": 1.4531, |
| "step": 2157 |
| }, |
| { |
| "epoch": 0.7227059611520429, |
| "grad_norm": 0.4868308901786804, |
| "learning_rate": 0.0001, |
| "loss": 1.5175, |
| "step": 2158 |
| }, |
| { |
| "epoch": 0.7230408573342264, |
| "grad_norm": 0.4654078483581543, |
| "learning_rate": 0.0001, |
| "loss": 1.5008, |
| "step": 2159 |
| }, |
| { |
| "epoch": 0.7233757535164099, |
| "grad_norm": 0.4278988838195801, |
| "learning_rate": 0.0001, |
| "loss": 1.4565, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.7237106496985934, |
| "grad_norm": 0.46262645721435547, |
| "learning_rate": 0.0001, |
| "loss": 1.6055, |
| "step": 2161 |
| }, |
| { |
| "epoch": 0.724045545880777, |
| "grad_norm": 0.44615933299064636, |
| "learning_rate": 0.0001, |
| "loss": 1.4298, |
| "step": 2162 |
| }, |
| { |
| "epoch": 0.7243804420629605, |
| "grad_norm": 0.4570693373680115, |
| "learning_rate": 0.0001, |
| "loss": 1.5527, |
| "step": 2163 |
| }, |
| { |
| "epoch": 0.724715338245144, |
| "grad_norm": 0.4507233202457428, |
| "learning_rate": 0.0001, |
| "loss": 1.4555, |
| "step": 2164 |
| }, |
| { |
| "epoch": 0.7250502344273275, |
| "grad_norm": 0.48417288064956665, |
| "learning_rate": 0.0001, |
| "loss": 1.6364, |
| "step": 2165 |
| }, |
| { |
| "epoch": 0.725385130609511, |
| "grad_norm": 0.45867475867271423, |
| "learning_rate": 0.0001, |
| "loss": 1.5087, |
| "step": 2166 |
| }, |
| { |
| "epoch": 0.7257200267916946, |
| "grad_norm": 0.47540226578712463, |
| "learning_rate": 0.0001, |
| "loss": 1.657, |
| "step": 2167 |
| }, |
| { |
| "epoch": 0.7260549229738781, |
| "grad_norm": 0.48101478815078735, |
| "learning_rate": 0.0001, |
| "loss": 1.534, |
| "step": 2168 |
| }, |
| { |
| "epoch": 0.7263898191560616, |
| "grad_norm": 0.47601044178009033, |
| "learning_rate": 0.0001, |
| "loss": 1.6616, |
| "step": 2169 |
| }, |
| { |
| "epoch": 0.7267247153382451, |
| "grad_norm": 0.45919114351272583, |
| "learning_rate": 0.0001, |
| "loss": 1.582, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.7270596115204286, |
| "grad_norm": 0.4518474042415619, |
| "learning_rate": 0.0001, |
| "loss": 1.3507, |
| "step": 2171 |
| }, |
| { |
| "epoch": 0.7273945077026122, |
| "grad_norm": 0.4650135636329651, |
| "learning_rate": 0.0001, |
| "loss": 1.445, |
| "step": 2172 |
| }, |
| { |
| "epoch": 0.7277294038847957, |
| "grad_norm": 0.45777347683906555, |
| "learning_rate": 0.0001, |
| "loss": 1.5559, |
| "step": 2173 |
| }, |
| { |
| "epoch": 0.7280643000669792, |
| "grad_norm": 0.4657188653945923, |
| "learning_rate": 0.0001, |
| "loss": 1.5193, |
| "step": 2174 |
| }, |
| { |
| "epoch": 0.7283991962491627, |
| "grad_norm": 0.4442221224308014, |
| "learning_rate": 0.0001, |
| "loss": 1.4896, |
| "step": 2175 |
| }, |
| { |
| "epoch": 0.7287340924313462, |
| "grad_norm": 0.4653560519218445, |
| "learning_rate": 0.0001, |
| "loss": 1.4669, |
| "step": 2176 |
| }, |
| { |
| "epoch": 0.7290689886135298, |
| "grad_norm": 0.4594500660896301, |
| "learning_rate": 0.0001, |
| "loss": 1.5145, |
| "step": 2177 |
| }, |
| { |
| "epoch": 0.7294038847957133, |
| "grad_norm": 0.4747893214225769, |
| "learning_rate": 0.0001, |
| "loss": 1.6847, |
| "step": 2178 |
| }, |
| { |
| "epoch": 0.7297387809778969, |
| "grad_norm": 0.47377869486808777, |
| "learning_rate": 0.0001, |
| "loss": 1.5533, |
| "step": 2179 |
| }, |
| { |
| "epoch": 0.7300736771600804, |
| "grad_norm": 0.4621736407279968, |
| "learning_rate": 0.0001, |
| "loss": 1.4768, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.730408573342264, |
| "grad_norm": 0.45624223351478577, |
| "learning_rate": 0.0001, |
| "loss": 1.5619, |
| "step": 2181 |
| }, |
| { |
| "epoch": 0.7307434695244475, |
| "grad_norm": 0.46043768525123596, |
| "learning_rate": 0.0001, |
| "loss": 1.4666, |
| "step": 2182 |
| }, |
| { |
| "epoch": 0.731078365706631, |
| "grad_norm": 0.4694119095802307, |
| "learning_rate": 0.0001, |
| "loss": 1.5557, |
| "step": 2183 |
| }, |
| { |
| "epoch": 0.7314132618888145, |
| "grad_norm": 0.46328192949295044, |
| "learning_rate": 0.0001, |
| "loss": 1.5293, |
| "step": 2184 |
| }, |
| { |
| "epoch": 0.731748158070998, |
| "grad_norm": 0.4788912236690521, |
| "learning_rate": 0.0001, |
| "loss": 1.5616, |
| "step": 2185 |
| }, |
| { |
| "epoch": 0.7320830542531815, |
| "grad_norm": 0.4542524814605713, |
| "learning_rate": 0.0001, |
| "loss": 1.5318, |
| "step": 2186 |
| }, |
| { |
| "epoch": 0.7324179504353651, |
| "grad_norm": 0.457428514957428, |
| "learning_rate": 0.0001, |
| "loss": 1.4104, |
| "step": 2187 |
| }, |
| { |
| "epoch": 0.7327528466175486, |
| "grad_norm": 0.4589458405971527, |
| "learning_rate": 0.0001, |
| "loss": 1.5151, |
| "step": 2188 |
| }, |
| { |
| "epoch": 0.7330877427997321, |
| "grad_norm": 0.48504841327667236, |
| "learning_rate": 0.0001, |
| "loss": 1.5749, |
| "step": 2189 |
| }, |
| { |
| "epoch": 0.7334226389819156, |
| "grad_norm": 0.47457072138786316, |
| "learning_rate": 0.0001, |
| "loss": 1.5697, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.7337575351640991, |
| "grad_norm": 0.4697708785533905, |
| "learning_rate": 0.0001, |
| "loss": 1.5097, |
| "step": 2191 |
| }, |
| { |
| "epoch": 0.7340924313462827, |
| "grad_norm": 0.48357653617858887, |
| "learning_rate": 0.0001, |
| "loss": 1.5399, |
| "step": 2192 |
| }, |
| { |
| "epoch": 0.7344273275284662, |
| "grad_norm": 0.45093438029289246, |
| "learning_rate": 0.0001, |
| "loss": 1.5439, |
| "step": 2193 |
| }, |
| { |
| "epoch": 0.7347622237106497, |
| "grad_norm": 0.5057450532913208, |
| "learning_rate": 0.0001, |
| "loss": 1.4819, |
| "step": 2194 |
| }, |
| { |
| "epoch": 0.7350971198928332, |
| "grad_norm": 0.4725852310657501, |
| "learning_rate": 0.0001, |
| "loss": 1.4298, |
| "step": 2195 |
| }, |
| { |
| "epoch": 0.7354320160750167, |
| "grad_norm": 0.48134663701057434, |
| "learning_rate": 0.0001, |
| "loss": 1.5225, |
| "step": 2196 |
| }, |
| { |
| "epoch": 0.7357669122572003, |
| "grad_norm": 0.4459664821624756, |
| "learning_rate": 0.0001, |
| "loss": 1.446, |
| "step": 2197 |
| }, |
| { |
| "epoch": 0.7361018084393838, |
| "grad_norm": 0.44949108362197876, |
| "learning_rate": 0.0001, |
| "loss": 1.534, |
| "step": 2198 |
| }, |
| { |
| "epoch": 0.7364367046215673, |
| "grad_norm": 0.4862693250179291, |
| "learning_rate": 0.0001, |
| "loss": 1.5543, |
| "step": 2199 |
| }, |
| { |
| "epoch": 0.7367716008037508, |
| "grad_norm": 0.45357629656791687, |
| "learning_rate": 0.0001, |
| "loss": 1.5667, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.7371064969859343, |
| "grad_norm": 0.47933268547058105, |
| "learning_rate": 0.0001, |
| "loss": 1.5888, |
| "step": 2201 |
| }, |
| { |
| "epoch": 0.7374413931681179, |
| "grad_norm": 0.4429778754711151, |
| "learning_rate": 0.0001, |
| "loss": 1.5209, |
| "step": 2202 |
| }, |
| { |
| "epoch": 0.7377762893503014, |
| "grad_norm": 0.4456193745136261, |
| "learning_rate": 0.0001, |
| "loss": 1.4034, |
| "step": 2203 |
| }, |
| { |
| "epoch": 0.7381111855324849, |
| "grad_norm": 0.494873583316803, |
| "learning_rate": 0.0001, |
| "loss": 1.581, |
| "step": 2204 |
| }, |
| { |
| "epoch": 0.7384460817146684, |
| "grad_norm": 0.4704873859882355, |
| "learning_rate": 0.0001, |
| "loss": 1.4645, |
| "step": 2205 |
| }, |
| { |
| "epoch": 0.7387809778968519, |
| "grad_norm": 0.5142254829406738, |
| "learning_rate": 0.0001, |
| "loss": 1.5792, |
| "step": 2206 |
| }, |
| { |
| "epoch": 0.7391158740790355, |
| "grad_norm": 0.46415936946868896, |
| "learning_rate": 0.0001, |
| "loss": 1.5007, |
| "step": 2207 |
| }, |
| { |
| "epoch": 0.739450770261219, |
| "grad_norm": 0.5375964641571045, |
| "learning_rate": 0.0001, |
| "loss": 1.5214, |
| "step": 2208 |
| }, |
| { |
| "epoch": 0.7397856664434025, |
| "grad_norm": 0.47598183155059814, |
| "learning_rate": 0.0001, |
| "loss": 1.466, |
| "step": 2209 |
| }, |
| { |
| "epoch": 0.7401205626255861, |
| "grad_norm": 0.49870026111602783, |
| "learning_rate": 0.0001, |
| "loss": 1.5136, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.7404554588077696, |
| "grad_norm": 0.49020954966545105, |
| "learning_rate": 0.0001, |
| "loss": 1.595, |
| "step": 2211 |
| }, |
| { |
| "epoch": 0.7407903549899532, |
| "grad_norm": 0.4574756324291229, |
| "learning_rate": 0.0001, |
| "loss": 1.5294, |
| "step": 2212 |
| }, |
| { |
| "epoch": 0.7411252511721367, |
| "grad_norm": 0.4988451898097992, |
| "learning_rate": 0.0001, |
| "loss": 1.6423, |
| "step": 2213 |
| }, |
| { |
| "epoch": 0.7414601473543202, |
| "grad_norm": 0.46195825934410095, |
| "learning_rate": 0.0001, |
| "loss": 1.5234, |
| "step": 2214 |
| }, |
| { |
| "epoch": 0.7417950435365037, |
| "grad_norm": 0.47354868054389954, |
| "learning_rate": 0.0001, |
| "loss": 1.4892, |
| "step": 2215 |
| }, |
| { |
| "epoch": 0.7421299397186872, |
| "grad_norm": 0.45956993103027344, |
| "learning_rate": 0.0001, |
| "loss": 1.5841, |
| "step": 2216 |
| }, |
| { |
| "epoch": 0.7424648359008708, |
| "grad_norm": 0.46149998903274536, |
| "learning_rate": 0.0001, |
| "loss": 1.5191, |
| "step": 2217 |
| }, |
| { |
| "epoch": 0.7427997320830543, |
| "grad_norm": 0.46643468737602234, |
| "learning_rate": 0.0001, |
| "loss": 1.5481, |
| "step": 2218 |
| }, |
| { |
| "epoch": 0.7431346282652378, |
| "grad_norm": 0.457994282245636, |
| "learning_rate": 0.0001, |
| "loss": 1.4564, |
| "step": 2219 |
| }, |
| { |
| "epoch": 0.7434695244474213, |
| "grad_norm": 0.48355191946029663, |
| "learning_rate": 0.0001, |
| "loss": 1.4359, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.7438044206296048, |
| "grad_norm": 0.4696873724460602, |
| "learning_rate": 0.0001, |
| "loss": 1.5165, |
| "step": 2221 |
| }, |
| { |
| "epoch": 0.7441393168117884, |
| "grad_norm": 0.4898888170719147, |
| "learning_rate": 0.0001, |
| "loss": 1.5201, |
| "step": 2222 |
| }, |
| { |
| "epoch": 0.7444742129939719, |
| "grad_norm": 0.47065985202789307, |
| "learning_rate": 0.0001, |
| "loss": 1.5632, |
| "step": 2223 |
| }, |
| { |
| "epoch": 0.7448091091761554, |
| "grad_norm": 0.47417351603507996, |
| "learning_rate": 0.0001, |
| "loss": 1.4845, |
| "step": 2224 |
| }, |
| { |
| "epoch": 0.7451440053583389, |
| "grad_norm": 0.4841914176940918, |
| "learning_rate": 0.0001, |
| "loss": 1.5638, |
| "step": 2225 |
| }, |
| { |
| "epoch": 0.7454789015405224, |
| "grad_norm": 0.47083160281181335, |
| "learning_rate": 0.0001, |
| "loss": 1.5412, |
| "step": 2226 |
| }, |
| { |
| "epoch": 0.745813797722706, |
| "grad_norm": 0.4515422582626343, |
| "learning_rate": 0.0001, |
| "loss": 1.4791, |
| "step": 2227 |
| }, |
| { |
| "epoch": 0.7461486939048895, |
| "grad_norm": 0.4730348587036133, |
| "learning_rate": 0.0001, |
| "loss": 1.4716, |
| "step": 2228 |
| }, |
| { |
| "epoch": 0.746483590087073, |
| "grad_norm": 0.4598521888256073, |
| "learning_rate": 0.0001, |
| "loss": 1.5684, |
| "step": 2229 |
| }, |
| { |
| "epoch": 0.7468184862692565, |
| "grad_norm": 0.45444709062576294, |
| "learning_rate": 0.0001, |
| "loss": 1.5607, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.74715338245144, |
| "grad_norm": 0.4803113639354706, |
| "learning_rate": 0.0001, |
| "loss": 1.6007, |
| "step": 2231 |
| }, |
| { |
| "epoch": 0.7474882786336235, |
| "grad_norm": 0.515312671661377, |
| "learning_rate": 0.0001, |
| "loss": 1.5927, |
| "step": 2232 |
| }, |
| { |
| "epoch": 0.7478231748158071, |
| "grad_norm": 0.44947949051856995, |
| "learning_rate": 0.0001, |
| "loss": 1.4926, |
| "step": 2233 |
| }, |
| { |
| "epoch": 0.7481580709979906, |
| "grad_norm": 0.5019223093986511, |
| "learning_rate": 0.0001, |
| "loss": 1.5951, |
| "step": 2234 |
| }, |
| { |
| "epoch": 0.7484929671801741, |
| "grad_norm": 0.4727962017059326, |
| "learning_rate": 0.0001, |
| "loss": 1.5391, |
| "step": 2235 |
| }, |
| { |
| "epoch": 0.7488278633623576, |
| "grad_norm": 0.45799902081489563, |
| "learning_rate": 0.0001, |
| "loss": 1.5248, |
| "step": 2236 |
| }, |
| { |
| "epoch": 0.7491627595445411, |
| "grad_norm": 0.4660114049911499, |
| "learning_rate": 0.0001, |
| "loss": 1.5082, |
| "step": 2237 |
| }, |
| { |
| "epoch": 0.7494976557267247, |
| "grad_norm": 0.4819839298725128, |
| "learning_rate": 0.0001, |
| "loss": 1.559, |
| "step": 2238 |
| }, |
| { |
| "epoch": 0.7498325519089082, |
| "grad_norm": 0.47706669569015503, |
| "learning_rate": 0.0001, |
| "loss": 1.5178, |
| "step": 2239 |
| }, |
| { |
| "epoch": 0.7501674480910918, |
| "grad_norm": 0.48049312829971313, |
| "learning_rate": 0.0001, |
| "loss": 1.5086, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.7505023442732753, |
| "grad_norm": 0.4732329547405243, |
| "learning_rate": 0.0001, |
| "loss": 1.4876, |
| "step": 2241 |
| }, |
| { |
| "epoch": 0.7508372404554589, |
| "grad_norm": 0.4817315936088562, |
| "learning_rate": 0.0001, |
| "loss": 1.5802, |
| "step": 2242 |
| }, |
| { |
| "epoch": 0.7511721366376424, |
| "grad_norm": 0.44466477632522583, |
| "learning_rate": 0.0001, |
| "loss": 1.4521, |
| "step": 2243 |
| }, |
| { |
| "epoch": 0.7515070328198259, |
| "grad_norm": 0.4766438603401184, |
| "learning_rate": 0.0001, |
| "loss": 1.559, |
| "step": 2244 |
| }, |
| { |
| "epoch": 0.7518419290020094, |
| "grad_norm": 0.5050947070121765, |
| "learning_rate": 0.0001, |
| "loss": 1.5825, |
| "step": 2245 |
| }, |
| { |
| "epoch": 0.7521768251841929, |
| "grad_norm": 0.4919584393501282, |
| "learning_rate": 0.0001, |
| "loss": 1.6224, |
| "step": 2246 |
| }, |
| { |
| "epoch": 0.7525117213663765, |
| "grad_norm": 0.4567074179649353, |
| "learning_rate": 0.0001, |
| "loss": 1.476, |
| "step": 2247 |
| }, |
| { |
| "epoch": 0.75284661754856, |
| "grad_norm": 0.490669846534729, |
| "learning_rate": 0.0001, |
| "loss": 1.4554, |
| "step": 2248 |
| }, |
| { |
| "epoch": 0.7531815137307435, |
| "grad_norm": 0.4712928533554077, |
| "learning_rate": 0.0001, |
| "loss": 1.4727, |
| "step": 2249 |
| }, |
| { |
| "epoch": 0.753516409912927, |
| "grad_norm": 0.45836925506591797, |
| "learning_rate": 0.0001, |
| "loss": 1.479, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.7538513060951105, |
| "grad_norm": 0.4567963480949402, |
| "learning_rate": 0.0001, |
| "loss": 1.5333, |
| "step": 2251 |
| }, |
| { |
| "epoch": 0.754186202277294, |
| "grad_norm": 0.4772929847240448, |
| "learning_rate": 0.0001, |
| "loss": 1.564, |
| "step": 2252 |
| }, |
| { |
| "epoch": 0.7545210984594776, |
| "grad_norm": 0.45997437834739685, |
| "learning_rate": 0.0001, |
| "loss": 1.5597, |
| "step": 2253 |
| }, |
| { |
| "epoch": 0.7548559946416611, |
| "grad_norm": 0.46403026580810547, |
| "learning_rate": 0.0001, |
| "loss": 1.5363, |
| "step": 2254 |
| }, |
| { |
| "epoch": 0.7551908908238446, |
| "grad_norm": 0.4640863239765167, |
| "learning_rate": 0.0001, |
| "loss": 1.6155, |
| "step": 2255 |
| }, |
| { |
| "epoch": 0.7555257870060281, |
| "grad_norm": 0.46999940276145935, |
| "learning_rate": 0.0001, |
| "loss": 1.459, |
| "step": 2256 |
| }, |
| { |
| "epoch": 0.7558606831882116, |
| "grad_norm": 0.4665103852748871, |
| "learning_rate": 0.0001, |
| "loss": 1.4763, |
| "step": 2257 |
| }, |
| { |
| "epoch": 0.7561955793703952, |
| "grad_norm": 0.471189945936203, |
| "learning_rate": 0.0001, |
| "loss": 1.4699, |
| "step": 2258 |
| }, |
| { |
| "epoch": 0.7565304755525787, |
| "grad_norm": 0.467632532119751, |
| "learning_rate": 0.0001, |
| "loss": 1.5045, |
| "step": 2259 |
| }, |
| { |
| "epoch": 0.7568653717347622, |
| "grad_norm": 0.44308537244796753, |
| "learning_rate": 0.0001, |
| "loss": 1.4245, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.7572002679169457, |
| "grad_norm": 0.46536949276924133, |
| "learning_rate": 0.0001, |
| "loss": 1.4407, |
| "step": 2261 |
| }, |
| { |
| "epoch": 0.7575351640991292, |
| "grad_norm": 0.45661500096321106, |
| "learning_rate": 0.0001, |
| "loss": 1.4973, |
| "step": 2262 |
| }, |
| { |
| "epoch": 0.7578700602813128, |
| "grad_norm": 0.48596176505088806, |
| "learning_rate": 0.0001, |
| "loss": 1.5173, |
| "step": 2263 |
| }, |
| { |
| "epoch": 0.7582049564634963, |
| "grad_norm": 0.47135865688323975, |
| "learning_rate": 0.0001, |
| "loss": 1.5616, |
| "step": 2264 |
| }, |
| { |
| "epoch": 0.7585398526456798, |
| "grad_norm": 0.48448967933654785, |
| "learning_rate": 0.0001, |
| "loss": 1.5076, |
| "step": 2265 |
| }, |
| { |
| "epoch": 0.7588747488278633, |
| "grad_norm": 0.4838770627975464, |
| "learning_rate": 0.0001, |
| "loss": 1.4286, |
| "step": 2266 |
| }, |
| { |
| "epoch": 0.7592096450100468, |
| "grad_norm": 0.4586263597011566, |
| "learning_rate": 0.0001, |
| "loss": 1.505, |
| "step": 2267 |
| }, |
| { |
| "epoch": 0.7595445411922304, |
| "grad_norm": 0.4685727655887604, |
| "learning_rate": 0.0001, |
| "loss": 1.5812, |
| "step": 2268 |
| }, |
| { |
| "epoch": 0.7598794373744139, |
| "grad_norm": 0.47122806310653687, |
| "learning_rate": 0.0001, |
| "loss": 1.5494, |
| "step": 2269 |
| }, |
| { |
| "epoch": 0.7602143335565975, |
| "grad_norm": 0.449631929397583, |
| "learning_rate": 0.0001, |
| "loss": 1.5016, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.760549229738781, |
| "grad_norm": 0.46288001537323, |
| "learning_rate": 0.0001, |
| "loss": 1.5631, |
| "step": 2271 |
| }, |
| { |
| "epoch": 0.7608841259209645, |
| "grad_norm": 0.462016224861145, |
| "learning_rate": 0.0001, |
| "loss": 1.5226, |
| "step": 2272 |
| }, |
| { |
| "epoch": 0.7612190221031481, |
| "grad_norm": 0.4567805230617523, |
| "learning_rate": 0.0001, |
| "loss": 1.4918, |
| "step": 2273 |
| }, |
| { |
| "epoch": 0.7615539182853316, |
| "grad_norm": 0.4616073668003082, |
| "learning_rate": 0.0001, |
| "loss": 1.5156, |
| "step": 2274 |
| }, |
| { |
| "epoch": 0.7618888144675151, |
| "grad_norm": 0.4843860864639282, |
| "learning_rate": 0.0001, |
| "loss": 1.5841, |
| "step": 2275 |
| }, |
| { |
| "epoch": 0.7622237106496986, |
| "grad_norm": 0.4428068995475769, |
| "learning_rate": 0.0001, |
| "loss": 1.4995, |
| "step": 2276 |
| }, |
| { |
| "epoch": 0.7625586068318821, |
| "grad_norm": 0.4614906311035156, |
| "learning_rate": 0.0001, |
| "loss": 1.5775, |
| "step": 2277 |
| }, |
| { |
| "epoch": 0.7628935030140657, |
| "grad_norm": 0.4830222725868225, |
| "learning_rate": 0.0001, |
| "loss": 1.6047, |
| "step": 2278 |
| }, |
| { |
| "epoch": 0.7632283991962492, |
| "grad_norm": 0.4605547785758972, |
| "learning_rate": 0.0001, |
| "loss": 1.5153, |
| "step": 2279 |
| }, |
| { |
| "epoch": 0.7635632953784327, |
| "grad_norm": 0.4535585641860962, |
| "learning_rate": 0.0001, |
| "loss": 1.4303, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.7638981915606162, |
| "grad_norm": 0.4725355803966522, |
| "learning_rate": 0.0001, |
| "loss": 1.584, |
| "step": 2281 |
| }, |
| { |
| "epoch": 0.7642330877427997, |
| "grad_norm": 0.45823484659194946, |
| "learning_rate": 0.0001, |
| "loss": 1.464, |
| "step": 2282 |
| }, |
| { |
| "epoch": 0.7645679839249833, |
| "grad_norm": 0.47183576226234436, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 2283 |
| }, |
| { |
| "epoch": 0.7649028801071668, |
| "grad_norm": 0.46699899435043335, |
| "learning_rate": 0.0001, |
| "loss": 1.4754, |
| "step": 2284 |
| }, |
| { |
| "epoch": 0.7652377762893503, |
| "grad_norm": 0.4842059016227722, |
| "learning_rate": 0.0001, |
| "loss": 1.5127, |
| "step": 2285 |
| }, |
| { |
| "epoch": 0.7655726724715338, |
| "grad_norm": 0.46355974674224854, |
| "learning_rate": 0.0001, |
| "loss": 1.4563, |
| "step": 2286 |
| }, |
| { |
| "epoch": 0.7659075686537173, |
| "grad_norm": 0.44617757201194763, |
| "learning_rate": 0.0001, |
| "loss": 1.4338, |
| "step": 2287 |
| }, |
| { |
| "epoch": 0.7662424648359009, |
| "grad_norm": 0.4712759852409363, |
| "learning_rate": 0.0001, |
| "loss": 1.5535, |
| "step": 2288 |
| }, |
| { |
| "epoch": 0.7665773610180844, |
| "grad_norm": 0.44124874472618103, |
| "learning_rate": 0.0001, |
| "loss": 1.5111, |
| "step": 2289 |
| }, |
| { |
| "epoch": 0.7669122572002679, |
| "grad_norm": 0.45401328802108765, |
| "learning_rate": 0.0001, |
| "loss": 1.5066, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.7672471533824514, |
| "grad_norm": 0.4824156165122986, |
| "learning_rate": 0.0001, |
| "loss": 1.5143, |
| "step": 2291 |
| }, |
| { |
| "epoch": 0.7675820495646349, |
| "grad_norm": 0.4732379913330078, |
| "learning_rate": 0.0001, |
| "loss": 1.5505, |
| "step": 2292 |
| }, |
| { |
| "epoch": 0.7679169457468185, |
| "grad_norm": 0.4697463810443878, |
| "learning_rate": 0.0001, |
| "loss": 1.5309, |
| "step": 2293 |
| }, |
| { |
| "epoch": 0.768251841929002, |
| "grad_norm": 0.45176729559898376, |
| "learning_rate": 0.0001, |
| "loss": 1.4686, |
| "step": 2294 |
| }, |
| { |
| "epoch": 0.7685867381111855, |
| "grad_norm": 0.44826486706733704, |
| "learning_rate": 0.0001, |
| "loss": 1.5225, |
| "step": 2295 |
| }, |
| { |
| "epoch": 0.768921634293369, |
| "grad_norm": 0.45865607261657715, |
| "learning_rate": 0.0001, |
| "loss": 1.4993, |
| "step": 2296 |
| }, |
| { |
| "epoch": 0.7692565304755525, |
| "grad_norm": 0.4361908733844757, |
| "learning_rate": 0.0001, |
| "loss": 1.4904, |
| "step": 2297 |
| }, |
| { |
| "epoch": 0.769591426657736, |
| "grad_norm": 0.47969165444374084, |
| "learning_rate": 0.0001, |
| "loss": 1.5102, |
| "step": 2298 |
| }, |
| { |
| "epoch": 0.7699263228399196, |
| "grad_norm": 0.48042747378349304, |
| "learning_rate": 0.0001, |
| "loss": 1.5363, |
| "step": 2299 |
| }, |
| { |
| "epoch": 0.7702612190221031, |
| "grad_norm": 0.452140212059021, |
| "learning_rate": 0.0001, |
| "loss": 1.4566, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.7705961152042867, |
| "grad_norm": 0.48098182678222656, |
| "learning_rate": 0.0001, |
| "loss": 1.5807, |
| "step": 2301 |
| }, |
| { |
| "epoch": 0.7709310113864702, |
| "grad_norm": 0.45571669936180115, |
| "learning_rate": 0.0001, |
| "loss": 1.5332, |
| "step": 2302 |
| }, |
| { |
| "epoch": 0.7712659075686538, |
| "grad_norm": 0.4881724715232849, |
| "learning_rate": 0.0001, |
| "loss": 1.566, |
| "step": 2303 |
| }, |
| { |
| "epoch": 0.7716008037508373, |
| "grad_norm": 0.4681125283241272, |
| "learning_rate": 0.0001, |
| "loss": 1.5146, |
| "step": 2304 |
| }, |
| { |
| "epoch": 0.7719356999330208, |
| "grad_norm": 0.5105034708976746, |
| "learning_rate": 0.0001, |
| "loss": 1.5381, |
| "step": 2305 |
| }, |
| { |
| "epoch": 0.7722705961152043, |
| "grad_norm": 0.4545895457267761, |
| "learning_rate": 0.0001, |
| "loss": 1.4814, |
| "step": 2306 |
| }, |
| { |
| "epoch": 0.7726054922973878, |
| "grad_norm": 0.4493445158004761, |
| "learning_rate": 0.0001, |
| "loss": 1.5643, |
| "step": 2307 |
| }, |
| { |
| "epoch": 0.7729403884795714, |
| "grad_norm": 0.4737282395362854, |
| "learning_rate": 0.0001, |
| "loss": 1.4749, |
| "step": 2308 |
| }, |
| { |
| "epoch": 0.7732752846617549, |
| "grad_norm": 0.47303324937820435, |
| "learning_rate": 0.0001, |
| "loss": 1.5203, |
| "step": 2309 |
| }, |
| { |
| "epoch": 0.7736101808439384, |
| "grad_norm": 0.478311687707901, |
| "learning_rate": 0.0001, |
| "loss": 1.5582, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.7739450770261219, |
| "grad_norm": 0.5018264651298523, |
| "learning_rate": 0.0001, |
| "loss": 1.6107, |
| "step": 2311 |
| }, |
| { |
| "epoch": 0.7742799732083054, |
| "grad_norm": 0.4647297263145447, |
| "learning_rate": 0.0001, |
| "loss": 1.5217, |
| "step": 2312 |
| }, |
| { |
| "epoch": 0.774614869390489, |
| "grad_norm": 0.4564414918422699, |
| "learning_rate": 0.0001, |
| "loss": 1.5125, |
| "step": 2313 |
| }, |
| { |
| "epoch": 0.7749497655726725, |
| "grad_norm": 0.4897812008857727, |
| "learning_rate": 0.0001, |
| "loss": 1.5987, |
| "step": 2314 |
| }, |
| { |
| "epoch": 0.775284661754856, |
| "grad_norm": 0.47572436928749084, |
| "learning_rate": 0.0001, |
| "loss": 1.3898, |
| "step": 2315 |
| }, |
| { |
| "epoch": 0.7756195579370395, |
| "grad_norm": 0.4664483964443207, |
| "learning_rate": 0.0001, |
| "loss": 1.5198, |
| "step": 2316 |
| }, |
| { |
| "epoch": 0.775954454119223, |
| "grad_norm": 0.4727928936481476, |
| "learning_rate": 0.0001, |
| "loss": 1.5286, |
| "step": 2317 |
| }, |
| { |
| "epoch": 0.7762893503014066, |
| "grad_norm": 0.48919540643692017, |
| "learning_rate": 0.0001, |
| "loss": 1.539, |
| "step": 2318 |
| }, |
| { |
| "epoch": 0.7766242464835901, |
| "grad_norm": 0.46668481826782227, |
| "learning_rate": 0.0001, |
| "loss": 1.5559, |
| "step": 2319 |
| }, |
| { |
| "epoch": 0.7769591426657736, |
| "grad_norm": 0.46336182951927185, |
| "learning_rate": 0.0001, |
| "loss": 1.5859, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.7772940388479571, |
| "grad_norm": 0.45855921506881714, |
| "learning_rate": 0.0001, |
| "loss": 1.4619, |
| "step": 2321 |
| }, |
| { |
| "epoch": 0.7776289350301406, |
| "grad_norm": 0.46973612904548645, |
| "learning_rate": 0.0001, |
| "loss": 1.5, |
| "step": 2322 |
| }, |
| { |
| "epoch": 0.7779638312123242, |
| "grad_norm": 0.44328898191452026, |
| "learning_rate": 0.0001, |
| "loss": 1.4844, |
| "step": 2323 |
| }, |
| { |
| "epoch": 0.7782987273945077, |
| "grad_norm": 0.4648163914680481, |
| "learning_rate": 0.0001, |
| "loss": 1.5545, |
| "step": 2324 |
| }, |
| { |
| "epoch": 0.7786336235766912, |
| "grad_norm": 0.459846556186676, |
| "learning_rate": 0.0001, |
| "loss": 1.4493, |
| "step": 2325 |
| }, |
| { |
| "epoch": 0.7789685197588747, |
| "grad_norm": 0.45208072662353516, |
| "learning_rate": 0.0001, |
| "loss": 1.5098, |
| "step": 2326 |
| }, |
| { |
| "epoch": 0.7793034159410582, |
| "grad_norm": 0.4754399061203003, |
| "learning_rate": 0.0001, |
| "loss": 1.5381, |
| "step": 2327 |
| }, |
| { |
| "epoch": 0.7796383121232418, |
| "grad_norm": 0.48218896985054016, |
| "learning_rate": 0.0001, |
| "loss": 1.6072, |
| "step": 2328 |
| }, |
| { |
| "epoch": 0.7799732083054253, |
| "grad_norm": 0.49892452359199524, |
| "learning_rate": 0.0001, |
| "loss": 1.517, |
| "step": 2329 |
| }, |
| { |
| "epoch": 0.7803081044876088, |
| "grad_norm": 0.49093908071517944, |
| "learning_rate": 0.0001, |
| "loss": 1.5653, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.7806430006697924, |
| "grad_norm": 0.46677327156066895, |
| "learning_rate": 0.0001, |
| "loss": 1.5399, |
| "step": 2331 |
| }, |
| { |
| "epoch": 0.7809778968519759, |
| "grad_norm": 0.45744234323501587, |
| "learning_rate": 0.0001, |
| "loss": 1.4072, |
| "step": 2332 |
| }, |
| { |
| "epoch": 0.7813127930341595, |
| "grad_norm": 0.48118555545806885, |
| "learning_rate": 0.0001, |
| "loss": 1.4989, |
| "step": 2333 |
| }, |
| { |
| "epoch": 0.781647689216343, |
| "grad_norm": 0.4569566547870636, |
| "learning_rate": 0.0001, |
| "loss": 1.5025, |
| "step": 2334 |
| }, |
| { |
| "epoch": 0.7819825853985265, |
| "grad_norm": 0.4544038772583008, |
| "learning_rate": 0.0001, |
| "loss": 1.5297, |
| "step": 2335 |
| }, |
| { |
| "epoch": 0.78231748158071, |
| "grad_norm": 0.4806601107120514, |
| "learning_rate": 0.0001, |
| "loss": 1.5866, |
| "step": 2336 |
| }, |
| { |
| "epoch": 0.7826523777628935, |
| "grad_norm": 0.47200947999954224, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 2337 |
| }, |
| { |
| "epoch": 0.7829872739450771, |
| "grad_norm": 0.4612894654273987, |
| "learning_rate": 0.0001, |
| "loss": 1.544, |
| "step": 2338 |
| }, |
| { |
| "epoch": 0.7833221701272606, |
| "grad_norm": 0.4631419777870178, |
| "learning_rate": 0.0001, |
| "loss": 1.527, |
| "step": 2339 |
| }, |
| { |
| "epoch": 0.7836570663094441, |
| "grad_norm": 0.4725373685359955, |
| "learning_rate": 0.0001, |
| "loss": 1.5301, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.7839919624916276, |
| "grad_norm": 0.4709610641002655, |
| "learning_rate": 0.0001, |
| "loss": 1.5305, |
| "step": 2341 |
| }, |
| { |
| "epoch": 0.7843268586738111, |
| "grad_norm": 0.45424067974090576, |
| "learning_rate": 0.0001, |
| "loss": 1.4967, |
| "step": 2342 |
| }, |
| { |
| "epoch": 0.7846617548559947, |
| "grad_norm": 0.4698200225830078, |
| "learning_rate": 0.0001, |
| "loss": 1.5697, |
| "step": 2343 |
| }, |
| { |
| "epoch": 0.7849966510381782, |
| "grad_norm": 0.474651962518692, |
| "learning_rate": 0.0001, |
| "loss": 1.4652, |
| "step": 2344 |
| }, |
| { |
| "epoch": 0.7853315472203617, |
| "grad_norm": 0.4748631417751312, |
| "learning_rate": 0.0001, |
| "loss": 1.6113, |
| "step": 2345 |
| }, |
| { |
| "epoch": 0.7856664434025452, |
| "grad_norm": 0.4516560137271881, |
| "learning_rate": 0.0001, |
| "loss": 1.4746, |
| "step": 2346 |
| }, |
| { |
| "epoch": 0.7860013395847287, |
| "grad_norm": 0.47162577509880066, |
| "learning_rate": 0.0001, |
| "loss": 1.4741, |
| "step": 2347 |
| }, |
| { |
| "epoch": 0.7863362357669123, |
| "grad_norm": 0.4563868045806885, |
| "learning_rate": 0.0001, |
| "loss": 1.5713, |
| "step": 2348 |
| }, |
| { |
| "epoch": 0.7866711319490958, |
| "grad_norm": 0.48005253076553345, |
| "learning_rate": 0.0001, |
| "loss": 1.5462, |
| "step": 2349 |
| }, |
| { |
| "epoch": 0.7870060281312793, |
| "grad_norm": 0.47020062804222107, |
| "learning_rate": 0.0001, |
| "loss": 1.4656, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.7873409243134628, |
| "grad_norm": 0.4505898058414459, |
| "learning_rate": 0.0001, |
| "loss": 1.4406, |
| "step": 2351 |
| }, |
| { |
| "epoch": 0.7876758204956463, |
| "grad_norm": 0.4679824709892273, |
| "learning_rate": 0.0001, |
| "loss": 1.5187, |
| "step": 2352 |
| }, |
| { |
| "epoch": 0.7880107166778298, |
| "grad_norm": 0.4632118344306946, |
| "learning_rate": 0.0001, |
| "loss": 1.4943, |
| "step": 2353 |
| }, |
| { |
| "epoch": 0.7883456128600134, |
| "grad_norm": 0.4546247720718384, |
| "learning_rate": 0.0001, |
| "loss": 1.4485, |
| "step": 2354 |
| }, |
| { |
| "epoch": 0.7886805090421969, |
| "grad_norm": 0.48107215762138367, |
| "learning_rate": 0.0001, |
| "loss": 1.4792, |
| "step": 2355 |
| }, |
| { |
| "epoch": 0.7890154052243804, |
| "grad_norm": 0.473503977060318, |
| "learning_rate": 0.0001, |
| "loss": 1.5215, |
| "step": 2356 |
| }, |
| { |
| "epoch": 0.7893503014065639, |
| "grad_norm": 0.4569712281227112, |
| "learning_rate": 0.0001, |
| "loss": 1.4349, |
| "step": 2357 |
| }, |
| { |
| "epoch": 0.7896851975887474, |
| "grad_norm": 0.44963976740837097, |
| "learning_rate": 0.0001, |
| "loss": 1.464, |
| "step": 2358 |
| }, |
| { |
| "epoch": 0.790020093770931, |
| "grad_norm": 0.4830678701400757, |
| "learning_rate": 0.0001, |
| "loss": 1.6487, |
| "step": 2359 |
| }, |
| { |
| "epoch": 0.7903549899531145, |
| "grad_norm": 0.4756050407886505, |
| "learning_rate": 0.0001, |
| "loss": 1.3998, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.790689886135298, |
| "grad_norm": 0.46634572744369507, |
| "learning_rate": 0.0001, |
| "loss": 1.5183, |
| "step": 2361 |
| }, |
| { |
| "epoch": 0.7910247823174816, |
| "grad_norm": 0.4650283753871918, |
| "learning_rate": 0.0001, |
| "loss": 1.4784, |
| "step": 2362 |
| }, |
| { |
| "epoch": 0.7913596784996652, |
| "grad_norm": 0.45666956901550293, |
| "learning_rate": 0.0001, |
| "loss": 1.3785, |
| "step": 2363 |
| }, |
| { |
| "epoch": 0.7916945746818487, |
| "grad_norm": 0.45927444100379944, |
| "learning_rate": 0.0001, |
| "loss": 1.4673, |
| "step": 2364 |
| }, |
| { |
| "epoch": 0.7920294708640322, |
| "grad_norm": 0.47485730051994324, |
| "learning_rate": 0.0001, |
| "loss": 1.4951, |
| "step": 2365 |
| }, |
| { |
| "epoch": 0.7923643670462157, |
| "grad_norm": 0.4480372369289398, |
| "learning_rate": 0.0001, |
| "loss": 1.5052, |
| "step": 2366 |
| }, |
| { |
| "epoch": 0.7926992632283992, |
| "grad_norm": 0.4613422155380249, |
| "learning_rate": 0.0001, |
| "loss": 1.4325, |
| "step": 2367 |
| }, |
| { |
| "epoch": 0.7930341594105828, |
| "grad_norm": 0.48185497522354126, |
| "learning_rate": 0.0001, |
| "loss": 1.5527, |
| "step": 2368 |
| }, |
| { |
| "epoch": 0.7933690555927663, |
| "grad_norm": 0.46273407340049744, |
| "learning_rate": 0.0001, |
| "loss": 1.5327, |
| "step": 2369 |
| }, |
| { |
| "epoch": 0.7937039517749498, |
| "grad_norm": 0.4855313003063202, |
| "learning_rate": 0.0001, |
| "loss": 1.5299, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.7940388479571333, |
| "grad_norm": 0.46928760409355164, |
| "learning_rate": 0.0001, |
| "loss": 1.5184, |
| "step": 2371 |
| }, |
| { |
| "epoch": 0.7943737441393168, |
| "grad_norm": 0.4650458097457886, |
| "learning_rate": 0.0001, |
| "loss": 1.4791, |
| "step": 2372 |
| }, |
| { |
| "epoch": 0.7947086403215003, |
| "grad_norm": 0.4902491271495819, |
| "learning_rate": 0.0001, |
| "loss": 1.5162, |
| "step": 2373 |
| }, |
| { |
| "epoch": 0.7950435365036839, |
| "grad_norm": 0.5136062502861023, |
| "learning_rate": 0.0001, |
| "loss": 1.4842, |
| "step": 2374 |
| }, |
| { |
| "epoch": 0.7953784326858674, |
| "grad_norm": 0.4830358028411865, |
| "learning_rate": 0.0001, |
| "loss": 1.5568, |
| "step": 2375 |
| }, |
| { |
| "epoch": 0.7957133288680509, |
| "grad_norm": 0.4639476537704468, |
| "learning_rate": 0.0001, |
| "loss": 1.4308, |
| "step": 2376 |
| }, |
| { |
| "epoch": 0.7960482250502344, |
| "grad_norm": 0.5048962235450745, |
| "learning_rate": 0.0001, |
| "loss": 1.5455, |
| "step": 2377 |
| }, |
| { |
| "epoch": 0.796383121232418, |
| "grad_norm": 0.4968639612197876, |
| "learning_rate": 0.0001, |
| "loss": 1.5439, |
| "step": 2378 |
| }, |
| { |
| "epoch": 0.7967180174146015, |
| "grad_norm": 0.48246777057647705, |
| "learning_rate": 0.0001, |
| "loss": 1.5037, |
| "step": 2379 |
| }, |
| { |
| "epoch": 0.797052913596785, |
| "grad_norm": 0.46209555864334106, |
| "learning_rate": 0.0001, |
| "loss": 1.6492, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.7973878097789685, |
| "grad_norm": 0.4774399995803833, |
| "learning_rate": 0.0001, |
| "loss": 1.4763, |
| "step": 2381 |
| }, |
| { |
| "epoch": 0.797722705961152, |
| "grad_norm": 0.48918208479881287, |
| "learning_rate": 0.0001, |
| "loss": 1.5102, |
| "step": 2382 |
| }, |
| { |
| "epoch": 0.7980576021433355, |
| "grad_norm": 0.48564663529396057, |
| "learning_rate": 0.0001, |
| "loss": 1.5395, |
| "step": 2383 |
| }, |
| { |
| "epoch": 0.7983924983255191, |
| "grad_norm": 0.4565158486366272, |
| "learning_rate": 0.0001, |
| "loss": 1.4679, |
| "step": 2384 |
| }, |
| { |
| "epoch": 0.7987273945077026, |
| "grad_norm": 0.47012779116630554, |
| "learning_rate": 0.0001, |
| "loss": 1.6424, |
| "step": 2385 |
| }, |
| { |
| "epoch": 0.7990622906898861, |
| "grad_norm": 0.4518689513206482, |
| "learning_rate": 0.0001, |
| "loss": 1.5085, |
| "step": 2386 |
| }, |
| { |
| "epoch": 0.7993971868720696, |
| "grad_norm": 0.4934041500091553, |
| "learning_rate": 0.0001, |
| "loss": 1.4546, |
| "step": 2387 |
| }, |
| { |
| "epoch": 0.7997320830542531, |
| "grad_norm": 0.47564956545829773, |
| "learning_rate": 0.0001, |
| "loss": 1.5534, |
| "step": 2388 |
| }, |
| { |
| "epoch": 0.8000669792364367, |
| "grad_norm": 0.4532725214958191, |
| "learning_rate": 0.0001, |
| "loss": 1.3697, |
| "step": 2389 |
| }, |
| { |
| "epoch": 0.8004018754186202, |
| "grad_norm": 0.4541550874710083, |
| "learning_rate": 0.0001, |
| "loss": 1.3951, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.8007367716008037, |
| "grad_norm": 0.4611206352710724, |
| "learning_rate": 0.0001, |
| "loss": 1.5683, |
| "step": 2391 |
| }, |
| { |
| "epoch": 0.8010716677829873, |
| "grad_norm": 0.46378272771835327, |
| "learning_rate": 0.0001, |
| "loss": 1.4767, |
| "step": 2392 |
| }, |
| { |
| "epoch": 0.8014065639651708, |
| "grad_norm": 0.4524199366569519, |
| "learning_rate": 0.0001, |
| "loss": 1.6076, |
| "step": 2393 |
| }, |
| { |
| "epoch": 0.8017414601473544, |
| "grad_norm": 0.46128812432289124, |
| "learning_rate": 0.0001, |
| "loss": 1.4649, |
| "step": 2394 |
| }, |
| { |
| "epoch": 0.8020763563295379, |
| "grad_norm": 0.4794237017631531, |
| "learning_rate": 0.0001, |
| "loss": 1.5022, |
| "step": 2395 |
| }, |
| { |
| "epoch": 0.8024112525117214, |
| "grad_norm": 0.45057275891304016, |
| "learning_rate": 0.0001, |
| "loss": 1.4823, |
| "step": 2396 |
| }, |
| { |
| "epoch": 0.8027461486939049, |
| "grad_norm": 0.4925333857536316, |
| "learning_rate": 0.0001, |
| "loss": 1.4393, |
| "step": 2397 |
| }, |
| { |
| "epoch": 0.8030810448760884, |
| "grad_norm": 0.4900800287723541, |
| "learning_rate": 0.0001, |
| "loss": 1.5873, |
| "step": 2398 |
| }, |
| { |
| "epoch": 0.803415941058272, |
| "grad_norm": 0.501660168170929, |
| "learning_rate": 0.0001, |
| "loss": 1.4849, |
| "step": 2399 |
| }, |
| { |
| "epoch": 0.8037508372404555, |
| "grad_norm": 0.4713497459888458, |
| "learning_rate": 0.0001, |
| "loss": 1.5155, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.804085733422639, |
| "grad_norm": 0.4639057219028473, |
| "learning_rate": 0.0001, |
| "loss": 1.4363, |
| "step": 2401 |
| }, |
| { |
| "epoch": 0.8044206296048225, |
| "grad_norm": 0.4746173620223999, |
| "learning_rate": 0.0001, |
| "loss": 1.4313, |
| "step": 2402 |
| }, |
| { |
| "epoch": 0.804755525787006, |
| "grad_norm": 0.45180779695510864, |
| "learning_rate": 0.0001, |
| "loss": 1.4934, |
| "step": 2403 |
| }, |
| { |
| "epoch": 0.8050904219691896, |
| "grad_norm": 0.5304993391036987, |
| "learning_rate": 0.0001, |
| "loss": 1.5424, |
| "step": 2404 |
| }, |
| { |
| "epoch": 0.8054253181513731, |
| "grad_norm": 0.4532136917114258, |
| "learning_rate": 0.0001, |
| "loss": 1.5413, |
| "step": 2405 |
| }, |
| { |
| "epoch": 0.8057602143335566, |
| "grad_norm": 0.48047250509262085, |
| "learning_rate": 0.0001, |
| "loss": 1.5941, |
| "step": 2406 |
| }, |
| { |
| "epoch": 0.8060951105157401, |
| "grad_norm": 0.46145549416542053, |
| "learning_rate": 0.0001, |
| "loss": 1.3725, |
| "step": 2407 |
| }, |
| { |
| "epoch": 0.8064300066979236, |
| "grad_norm": 0.4635370969772339, |
| "learning_rate": 0.0001, |
| "loss": 1.3842, |
| "step": 2408 |
| }, |
| { |
| "epoch": 0.8067649028801072, |
| "grad_norm": 0.4741126298904419, |
| "learning_rate": 0.0001, |
| "loss": 1.5344, |
| "step": 2409 |
| }, |
| { |
| "epoch": 0.8070997990622907, |
| "grad_norm": 0.45318159461021423, |
| "learning_rate": 0.0001, |
| "loss": 1.442, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.8074346952444742, |
| "grad_norm": 0.46751123666763306, |
| "learning_rate": 0.0001, |
| "loss": 1.5001, |
| "step": 2411 |
| }, |
| { |
| "epoch": 0.8077695914266577, |
| "grad_norm": 0.47589755058288574, |
| "learning_rate": 0.0001, |
| "loss": 1.5458, |
| "step": 2412 |
| }, |
| { |
| "epoch": 0.8081044876088412, |
| "grad_norm": 0.4462743103504181, |
| "learning_rate": 0.0001, |
| "loss": 1.4044, |
| "step": 2413 |
| }, |
| { |
| "epoch": 0.8084393837910248, |
| "grad_norm": 0.4586702585220337, |
| "learning_rate": 0.0001, |
| "loss": 1.5026, |
| "step": 2414 |
| }, |
| { |
| "epoch": 0.8087742799732083, |
| "grad_norm": 0.4458579123020172, |
| "learning_rate": 0.0001, |
| "loss": 1.4296, |
| "step": 2415 |
| }, |
| { |
| "epoch": 0.8091091761553918, |
| "grad_norm": 0.46752509474754333, |
| "learning_rate": 0.0001, |
| "loss": 1.4309, |
| "step": 2416 |
| }, |
| { |
| "epoch": 0.8094440723375753, |
| "grad_norm": 0.4684821665287018, |
| "learning_rate": 0.0001, |
| "loss": 1.5132, |
| "step": 2417 |
| }, |
| { |
| "epoch": 0.8097789685197588, |
| "grad_norm": 0.48130378127098083, |
| "learning_rate": 0.0001, |
| "loss": 1.4572, |
| "step": 2418 |
| }, |
| { |
| "epoch": 0.8101138647019424, |
| "grad_norm": 0.45288190245628357, |
| "learning_rate": 0.0001, |
| "loss": 1.3387, |
| "step": 2419 |
| }, |
| { |
| "epoch": 0.8104487608841259, |
| "grad_norm": 0.47424936294555664, |
| "learning_rate": 0.0001, |
| "loss": 1.4783, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.8107836570663094, |
| "grad_norm": 0.4607580006122589, |
| "learning_rate": 0.0001, |
| "loss": 1.4313, |
| "step": 2421 |
| }, |
| { |
| "epoch": 0.8111185532484929, |
| "grad_norm": 0.4597252905368805, |
| "learning_rate": 0.0001, |
| "loss": 1.5051, |
| "step": 2422 |
| }, |
| { |
| "epoch": 0.8114534494306765, |
| "grad_norm": 0.4557250738143921, |
| "learning_rate": 0.0001, |
| "loss": 1.5233, |
| "step": 2423 |
| }, |
| { |
| "epoch": 0.8117883456128601, |
| "grad_norm": 0.46700024604797363, |
| "learning_rate": 0.0001, |
| "loss": 1.5103, |
| "step": 2424 |
| }, |
| { |
| "epoch": 0.8121232417950436, |
| "grad_norm": 0.46530982851982117, |
| "learning_rate": 0.0001, |
| "loss": 1.4654, |
| "step": 2425 |
| }, |
| { |
| "epoch": 0.8124581379772271, |
| "grad_norm": 0.4776183068752289, |
| "learning_rate": 0.0001, |
| "loss": 1.4358, |
| "step": 2426 |
| }, |
| { |
| "epoch": 0.8127930341594106, |
| "grad_norm": 0.4590524733066559, |
| "learning_rate": 0.0001, |
| "loss": 1.5037, |
| "step": 2427 |
| }, |
| { |
| "epoch": 0.8131279303415941, |
| "grad_norm": 0.47458121180534363, |
| "learning_rate": 0.0001, |
| "loss": 1.5376, |
| "step": 2428 |
| }, |
| { |
| "epoch": 0.8134628265237777, |
| "grad_norm": 0.4598158001899719, |
| "learning_rate": 0.0001, |
| "loss": 1.5651, |
| "step": 2429 |
| }, |
| { |
| "epoch": 0.8137977227059612, |
| "grad_norm": 0.4738381505012512, |
| "learning_rate": 0.0001, |
| "loss": 1.5423, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.8141326188881447, |
| "grad_norm": 0.5065074563026428, |
| "learning_rate": 0.0001, |
| "loss": 1.554, |
| "step": 2431 |
| }, |
| { |
| "epoch": 0.8144675150703282, |
| "grad_norm": 0.45927509665489197, |
| "learning_rate": 0.0001, |
| "loss": 1.5128, |
| "step": 2432 |
| }, |
| { |
| "epoch": 0.8148024112525117, |
| "grad_norm": 0.48357442021369934, |
| "learning_rate": 0.0001, |
| "loss": 1.4922, |
| "step": 2433 |
| }, |
| { |
| "epoch": 0.8151373074346953, |
| "grad_norm": 0.47746044397354126, |
| "learning_rate": 0.0001, |
| "loss": 1.5573, |
| "step": 2434 |
| }, |
| { |
| "epoch": 0.8154722036168788, |
| "grad_norm": 0.4867241382598877, |
| "learning_rate": 0.0001, |
| "loss": 1.4885, |
| "step": 2435 |
| }, |
| { |
| "epoch": 0.8158070997990623, |
| "grad_norm": 0.4920485317707062, |
| "learning_rate": 0.0001, |
| "loss": 1.5621, |
| "step": 2436 |
| }, |
| { |
| "epoch": 0.8161419959812458, |
| "grad_norm": 0.473763644695282, |
| "learning_rate": 0.0001, |
| "loss": 1.5618, |
| "step": 2437 |
| }, |
| { |
| "epoch": 0.8164768921634293, |
| "grad_norm": 0.4919544458389282, |
| "learning_rate": 0.0001, |
| "loss": 1.4857, |
| "step": 2438 |
| }, |
| { |
| "epoch": 0.8168117883456129, |
| "grad_norm": 0.47004011273384094, |
| "learning_rate": 0.0001, |
| "loss": 1.4635, |
| "step": 2439 |
| }, |
| { |
| "epoch": 0.8171466845277964, |
| "grad_norm": 0.4616757035255432, |
| "learning_rate": 0.0001, |
| "loss": 1.5002, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.8174815807099799, |
| "grad_norm": 0.49343329668045044, |
| "learning_rate": 0.0001, |
| "loss": 1.6058, |
| "step": 2441 |
| }, |
| { |
| "epoch": 0.8178164768921634, |
| "grad_norm": 0.47879213094711304, |
| "learning_rate": 0.0001, |
| "loss": 1.6205, |
| "step": 2442 |
| }, |
| { |
| "epoch": 0.8181513730743469, |
| "grad_norm": 0.4748070538043976, |
| "learning_rate": 0.0001, |
| "loss": 1.4841, |
| "step": 2443 |
| }, |
| { |
| "epoch": 0.8184862692565305, |
| "grad_norm": 0.44889041781425476, |
| "learning_rate": 0.0001, |
| "loss": 1.3804, |
| "step": 2444 |
| }, |
| { |
| "epoch": 0.818821165438714, |
| "grad_norm": 0.47727763652801514, |
| "learning_rate": 0.0001, |
| "loss": 1.5209, |
| "step": 2445 |
| }, |
| { |
| "epoch": 0.8191560616208975, |
| "grad_norm": 0.4846794307231903, |
| "learning_rate": 0.0001, |
| "loss": 1.5644, |
| "step": 2446 |
| }, |
| { |
| "epoch": 0.819490957803081, |
| "grad_norm": 0.47240149974823, |
| "learning_rate": 0.0001, |
| "loss": 1.4015, |
| "step": 2447 |
| }, |
| { |
| "epoch": 0.8198258539852645, |
| "grad_norm": 0.4686112701892853, |
| "learning_rate": 0.0001, |
| "loss": 1.5246, |
| "step": 2448 |
| }, |
| { |
| "epoch": 0.820160750167448, |
| "grad_norm": 0.4680188000202179, |
| "learning_rate": 0.0001, |
| "loss": 1.4998, |
| "step": 2449 |
| }, |
| { |
| "epoch": 0.8204956463496316, |
| "grad_norm": 0.4550737142562866, |
| "learning_rate": 0.0001, |
| "loss": 1.5079, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.8208305425318151, |
| "grad_norm": 0.4652599096298218, |
| "learning_rate": 0.0001, |
| "loss": 1.4914, |
| "step": 2451 |
| }, |
| { |
| "epoch": 0.8211654387139986, |
| "grad_norm": 0.46419402956962585, |
| "learning_rate": 0.0001, |
| "loss": 1.5244, |
| "step": 2452 |
| }, |
| { |
| "epoch": 0.8215003348961822, |
| "grad_norm": 0.4659050703048706, |
| "learning_rate": 0.0001, |
| "loss": 1.5253, |
| "step": 2453 |
| }, |
| { |
| "epoch": 0.8218352310783658, |
| "grad_norm": 0.4788404405117035, |
| "learning_rate": 0.0001, |
| "loss": 1.6053, |
| "step": 2454 |
| }, |
| { |
| "epoch": 0.8221701272605493, |
| "grad_norm": 0.4813288450241089, |
| "learning_rate": 0.0001, |
| "loss": 1.5119, |
| "step": 2455 |
| }, |
| { |
| "epoch": 0.8225050234427328, |
| "grad_norm": 0.47103598713874817, |
| "learning_rate": 0.0001, |
| "loss": 1.4902, |
| "step": 2456 |
| }, |
| { |
| "epoch": 0.8228399196249163, |
| "grad_norm": 0.47177964448928833, |
| "learning_rate": 0.0001, |
| "loss": 1.5243, |
| "step": 2457 |
| }, |
| { |
| "epoch": 0.8231748158070998, |
| "grad_norm": 0.4908981919288635, |
| "learning_rate": 0.0001, |
| "loss": 1.5389, |
| "step": 2458 |
| }, |
| { |
| "epoch": 0.8235097119892834, |
| "grad_norm": 0.46003222465515137, |
| "learning_rate": 0.0001, |
| "loss": 1.4344, |
| "step": 2459 |
| }, |
| { |
| "epoch": 0.8238446081714669, |
| "grad_norm": 0.4566117227077484, |
| "learning_rate": 0.0001, |
| "loss": 1.4996, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.8241795043536504, |
| "grad_norm": 0.47622841596603394, |
| "learning_rate": 0.0001, |
| "loss": 1.5919, |
| "step": 2461 |
| }, |
| { |
| "epoch": 0.8245144005358339, |
| "grad_norm": 0.4662770926952362, |
| "learning_rate": 0.0001, |
| "loss": 1.4792, |
| "step": 2462 |
| }, |
| { |
| "epoch": 0.8248492967180174, |
| "grad_norm": 0.44657108187675476, |
| "learning_rate": 0.0001, |
| "loss": 1.4932, |
| "step": 2463 |
| }, |
| { |
| "epoch": 0.825184192900201, |
| "grad_norm": 0.46892106533050537, |
| "learning_rate": 0.0001, |
| "loss": 1.5541, |
| "step": 2464 |
| }, |
| { |
| "epoch": 0.8255190890823845, |
| "grad_norm": 0.47214871644973755, |
| "learning_rate": 0.0001, |
| "loss": 1.5707, |
| "step": 2465 |
| }, |
| { |
| "epoch": 0.825853985264568, |
| "grad_norm": 0.45614370703697205, |
| "learning_rate": 0.0001, |
| "loss": 1.5309, |
| "step": 2466 |
| }, |
| { |
| "epoch": 0.8261888814467515, |
| "grad_norm": 0.46787700057029724, |
| "learning_rate": 0.0001, |
| "loss": 1.4708, |
| "step": 2467 |
| }, |
| { |
| "epoch": 0.826523777628935, |
| "grad_norm": 0.466823548078537, |
| "learning_rate": 0.0001, |
| "loss": 1.5455, |
| "step": 2468 |
| }, |
| { |
| "epoch": 0.8268586738111185, |
| "grad_norm": 0.4862380027770996, |
| "learning_rate": 0.0001, |
| "loss": 1.539, |
| "step": 2469 |
| }, |
| { |
| "epoch": 0.8271935699933021, |
| "grad_norm": 0.45864713191986084, |
| "learning_rate": 0.0001, |
| "loss": 1.4956, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.8275284661754856, |
| "grad_norm": 0.4562831521034241, |
| "learning_rate": 0.0001, |
| "loss": 1.467, |
| "step": 2471 |
| }, |
| { |
| "epoch": 0.8278633623576691, |
| "grad_norm": 0.4680713415145874, |
| "learning_rate": 0.0001, |
| "loss": 1.5045, |
| "step": 2472 |
| }, |
| { |
| "epoch": 0.8281982585398526, |
| "grad_norm": 0.4776260554790497, |
| "learning_rate": 0.0001, |
| "loss": 1.4162, |
| "step": 2473 |
| }, |
| { |
| "epoch": 0.8285331547220361, |
| "grad_norm": 0.4711471199989319, |
| "learning_rate": 0.0001, |
| "loss": 1.5081, |
| "step": 2474 |
| }, |
| { |
| "epoch": 0.8288680509042197, |
| "grad_norm": 0.4526406526565552, |
| "learning_rate": 0.0001, |
| "loss": 1.5155, |
| "step": 2475 |
| }, |
| { |
| "epoch": 0.8292029470864032, |
| "grad_norm": 0.48618829250335693, |
| "learning_rate": 0.0001, |
| "loss": 1.5989, |
| "step": 2476 |
| }, |
| { |
| "epoch": 0.8295378432685867, |
| "grad_norm": 0.4556841552257538, |
| "learning_rate": 0.0001, |
| "loss": 1.4377, |
| "step": 2477 |
| }, |
| { |
| "epoch": 0.8298727394507702, |
| "grad_norm": 0.47651731967926025, |
| "learning_rate": 0.0001, |
| "loss": 1.4536, |
| "step": 2478 |
| }, |
| { |
| "epoch": 0.8302076356329537, |
| "grad_norm": 0.4996659755706787, |
| "learning_rate": 0.0001, |
| "loss": 1.6023, |
| "step": 2479 |
| }, |
| { |
| "epoch": 0.8305425318151373, |
| "grad_norm": 0.46138516068458557, |
| "learning_rate": 0.0001, |
| "loss": 1.4767, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.8308774279973208, |
| "grad_norm": 0.4543902277946472, |
| "learning_rate": 0.0001, |
| "loss": 1.5485, |
| "step": 2481 |
| }, |
| { |
| "epoch": 0.8312123241795043, |
| "grad_norm": 0.4841060936450958, |
| "learning_rate": 0.0001, |
| "loss": 1.4308, |
| "step": 2482 |
| }, |
| { |
| "epoch": 0.8315472203616878, |
| "grad_norm": 0.4554094076156616, |
| "learning_rate": 0.0001, |
| "loss": 1.4633, |
| "step": 2483 |
| }, |
| { |
| "epoch": 0.8318821165438715, |
| "grad_norm": 0.4548550844192505, |
| "learning_rate": 0.0001, |
| "loss": 1.4629, |
| "step": 2484 |
| }, |
| { |
| "epoch": 0.832217012726055, |
| "grad_norm": 0.4791390299797058, |
| "learning_rate": 0.0001, |
| "loss": 1.5586, |
| "step": 2485 |
| }, |
| { |
| "epoch": 0.8325519089082385, |
| "grad_norm": 0.4800601005554199, |
| "learning_rate": 0.0001, |
| "loss": 1.5139, |
| "step": 2486 |
| }, |
| { |
| "epoch": 0.832886805090422, |
| "grad_norm": 0.4501197040081024, |
| "learning_rate": 0.0001, |
| "loss": 1.4459, |
| "step": 2487 |
| }, |
| { |
| "epoch": 0.8332217012726055, |
| "grad_norm": 0.4678644835948944, |
| "learning_rate": 0.0001, |
| "loss": 1.5499, |
| "step": 2488 |
| }, |
| { |
| "epoch": 0.833556597454789, |
| "grad_norm": 0.47049519419670105, |
| "learning_rate": 0.0001, |
| "loss": 1.5105, |
| "step": 2489 |
| }, |
| { |
| "epoch": 0.8338914936369726, |
| "grad_norm": 0.4660375416278839, |
| "learning_rate": 0.0001, |
| "loss": 1.514, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.8342263898191561, |
| "grad_norm": 0.4684686064720154, |
| "learning_rate": 0.0001, |
| "loss": 1.4937, |
| "step": 2491 |
| }, |
| { |
| "epoch": 0.8345612860013396, |
| "grad_norm": 0.47460314631462097, |
| "learning_rate": 0.0001, |
| "loss": 1.5526, |
| "step": 2492 |
| }, |
| { |
| "epoch": 0.8348961821835231, |
| "grad_norm": 0.49566179513931274, |
| "learning_rate": 0.0001, |
| "loss": 1.5733, |
| "step": 2493 |
| }, |
| { |
| "epoch": 0.8352310783657066, |
| "grad_norm": 0.4774349331855774, |
| "learning_rate": 0.0001, |
| "loss": 1.5573, |
| "step": 2494 |
| }, |
| { |
| "epoch": 0.8355659745478902, |
| "grad_norm": 0.5103961825370789, |
| "learning_rate": 0.0001, |
| "loss": 1.6416, |
| "step": 2495 |
| }, |
| { |
| "epoch": 0.8359008707300737, |
| "grad_norm": 0.47637876868247986, |
| "learning_rate": 0.0001, |
| "loss": 1.5274, |
| "step": 2496 |
| }, |
| { |
| "epoch": 0.8362357669122572, |
| "grad_norm": 0.4631972908973694, |
| "learning_rate": 0.0001, |
| "loss": 1.4233, |
| "step": 2497 |
| }, |
| { |
| "epoch": 0.8365706630944407, |
| "grad_norm": 0.4680608808994293, |
| "learning_rate": 0.0001, |
| "loss": 1.5582, |
| "step": 2498 |
| }, |
| { |
| "epoch": 0.8369055592766242, |
| "grad_norm": 0.46085795760154724, |
| "learning_rate": 0.0001, |
| "loss": 1.5227, |
| "step": 2499 |
| }, |
| { |
| "epoch": 0.8372404554588078, |
| "grad_norm": 0.47325894236564636, |
| "learning_rate": 0.0001, |
| "loss": 1.4911, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.8375753516409913, |
| "grad_norm": 0.45909950137138367, |
| "learning_rate": 0.0001, |
| "loss": 1.4819, |
| "step": 2501 |
| }, |
| { |
| "epoch": 0.8379102478231748, |
| "grad_norm": 0.43451809883117676, |
| "learning_rate": 0.0001, |
| "loss": 1.3889, |
| "step": 2502 |
| }, |
| { |
| "epoch": 0.8382451440053583, |
| "grad_norm": 0.4574137330055237, |
| "learning_rate": 0.0001, |
| "loss": 1.4686, |
| "step": 2503 |
| }, |
| { |
| "epoch": 0.8385800401875418, |
| "grad_norm": 0.48103952407836914, |
| "learning_rate": 0.0001, |
| "loss": 1.5808, |
| "step": 2504 |
| }, |
| { |
| "epoch": 0.8389149363697254, |
| "grad_norm": 0.5158580541610718, |
| "learning_rate": 0.0001, |
| "loss": 1.5612, |
| "step": 2505 |
| }, |
| { |
| "epoch": 0.8392498325519089, |
| "grad_norm": 0.4528539776802063, |
| "learning_rate": 0.0001, |
| "loss": 1.4579, |
| "step": 2506 |
| }, |
| { |
| "epoch": 0.8395847287340924, |
| "grad_norm": 0.5151484608650208, |
| "learning_rate": 0.0001, |
| "loss": 1.5334, |
| "step": 2507 |
| }, |
| { |
| "epoch": 0.8399196249162759, |
| "grad_norm": 0.4524150490760803, |
| "learning_rate": 0.0001, |
| "loss": 1.4369, |
| "step": 2508 |
| }, |
| { |
| "epoch": 0.8402545210984594, |
| "grad_norm": 0.4746531844139099, |
| "learning_rate": 0.0001, |
| "loss": 1.4458, |
| "step": 2509 |
| }, |
| { |
| "epoch": 0.840589417280643, |
| "grad_norm": 0.4763810336589813, |
| "learning_rate": 0.0001, |
| "loss": 1.55, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.8409243134628265, |
| "grad_norm": 0.4495566189289093, |
| "learning_rate": 0.0001, |
| "loss": 1.5156, |
| "step": 2511 |
| }, |
| { |
| "epoch": 0.84125920964501, |
| "grad_norm": 0.5042752623558044, |
| "learning_rate": 0.0001, |
| "loss": 1.5954, |
| "step": 2512 |
| }, |
| { |
| "epoch": 0.8415941058271935, |
| "grad_norm": 0.4725266396999359, |
| "learning_rate": 0.0001, |
| "loss": 1.459, |
| "step": 2513 |
| }, |
| { |
| "epoch": 0.8419290020093771, |
| "grad_norm": 0.48124271631240845, |
| "learning_rate": 0.0001, |
| "loss": 1.6029, |
| "step": 2514 |
| }, |
| { |
| "epoch": 0.8422638981915607, |
| "grad_norm": 0.46474096179008484, |
| "learning_rate": 0.0001, |
| "loss": 1.5342, |
| "step": 2515 |
| }, |
| { |
| "epoch": 0.8425987943737442, |
| "grad_norm": 0.48470795154571533, |
| "learning_rate": 0.0001, |
| "loss": 1.4792, |
| "step": 2516 |
| }, |
| { |
| "epoch": 0.8429336905559277, |
| "grad_norm": 0.5086537599563599, |
| "learning_rate": 0.0001, |
| "loss": 1.578, |
| "step": 2517 |
| }, |
| { |
| "epoch": 0.8432685867381112, |
| "grad_norm": 0.4417508542537689, |
| "learning_rate": 0.0001, |
| "loss": 1.4967, |
| "step": 2518 |
| }, |
| { |
| "epoch": 0.8436034829202947, |
| "grad_norm": 0.474541038274765, |
| "learning_rate": 0.0001, |
| "loss": 1.5432, |
| "step": 2519 |
| }, |
| { |
| "epoch": 0.8439383791024783, |
| "grad_norm": 0.46175190806388855, |
| "learning_rate": 0.0001, |
| "loss": 1.4789, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.8442732752846618, |
| "grad_norm": 0.489646852016449, |
| "learning_rate": 0.0001, |
| "loss": 1.5607, |
| "step": 2521 |
| }, |
| { |
| "epoch": 0.8446081714668453, |
| "grad_norm": 0.4702965319156647, |
| "learning_rate": 0.0001, |
| "loss": 1.5672, |
| "step": 2522 |
| }, |
| { |
| "epoch": 0.8449430676490288, |
| "grad_norm": 0.47818228602409363, |
| "learning_rate": 0.0001, |
| "loss": 1.5023, |
| "step": 2523 |
| }, |
| { |
| "epoch": 0.8452779638312123, |
| "grad_norm": 0.4713198244571686, |
| "learning_rate": 0.0001, |
| "loss": 1.5231, |
| "step": 2524 |
| }, |
| { |
| "epoch": 0.8456128600133959, |
| "grad_norm": 0.4650212824344635, |
| "learning_rate": 0.0001, |
| "loss": 1.4677, |
| "step": 2525 |
| }, |
| { |
| "epoch": 0.8459477561955794, |
| "grad_norm": 0.47101250290870667, |
| "learning_rate": 0.0001, |
| "loss": 1.5705, |
| "step": 2526 |
| }, |
| { |
| "epoch": 0.8462826523777629, |
| "grad_norm": 0.4593891203403473, |
| "learning_rate": 0.0001, |
| "loss": 1.5138, |
| "step": 2527 |
| }, |
| { |
| "epoch": 0.8466175485599464, |
| "grad_norm": 0.4726957678794861, |
| "learning_rate": 0.0001, |
| "loss": 1.5812, |
| "step": 2528 |
| }, |
| { |
| "epoch": 0.8469524447421299, |
| "grad_norm": 0.45414677262306213, |
| "learning_rate": 0.0001, |
| "loss": 1.4513, |
| "step": 2529 |
| }, |
| { |
| "epoch": 0.8472873409243135, |
| "grad_norm": 0.46414148807525635, |
| "learning_rate": 0.0001, |
| "loss": 1.5247, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.847622237106497, |
| "grad_norm": 0.4923802614212036, |
| "learning_rate": 0.0001, |
| "loss": 1.5454, |
| "step": 2531 |
| }, |
| { |
| "epoch": 0.8479571332886805, |
| "grad_norm": 0.4653525650501251, |
| "learning_rate": 0.0001, |
| "loss": 1.4374, |
| "step": 2532 |
| }, |
| { |
| "epoch": 0.848292029470864, |
| "grad_norm": 0.49471646547317505, |
| "learning_rate": 0.0001, |
| "loss": 1.6124, |
| "step": 2533 |
| }, |
| { |
| "epoch": 0.8486269256530475, |
| "grad_norm": 0.48391300439834595, |
| "learning_rate": 0.0001, |
| "loss": 1.5271, |
| "step": 2534 |
| }, |
| { |
| "epoch": 0.8489618218352311, |
| "grad_norm": 0.4469664990901947, |
| "learning_rate": 0.0001, |
| "loss": 1.4229, |
| "step": 2535 |
| }, |
| { |
| "epoch": 0.8492967180174146, |
| "grad_norm": 0.48336634039878845, |
| "learning_rate": 0.0001, |
| "loss": 1.4498, |
| "step": 2536 |
| }, |
| { |
| "epoch": 0.8496316141995981, |
| "grad_norm": 0.48580068349838257, |
| "learning_rate": 0.0001, |
| "loss": 1.5695, |
| "step": 2537 |
| }, |
| { |
| "epoch": 0.8499665103817816, |
| "grad_norm": 0.45114821195602417, |
| "learning_rate": 0.0001, |
| "loss": 1.4471, |
| "step": 2538 |
| }, |
| { |
| "epoch": 0.8503014065639651, |
| "grad_norm": 0.47201669216156006, |
| "learning_rate": 0.0001, |
| "loss": 1.5152, |
| "step": 2539 |
| }, |
| { |
| "epoch": 0.8506363027461487, |
| "grad_norm": 0.49140065908432007, |
| "learning_rate": 0.0001, |
| "loss": 1.4929, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.8509711989283322, |
| "grad_norm": 0.511402428150177, |
| "learning_rate": 0.0001, |
| "loss": 1.6764, |
| "step": 2541 |
| }, |
| { |
| "epoch": 0.8513060951105157, |
| "grad_norm": 0.46448859572410583, |
| "learning_rate": 0.0001, |
| "loss": 1.3813, |
| "step": 2542 |
| }, |
| { |
| "epoch": 0.8516409912926992, |
| "grad_norm": 0.46996361017227173, |
| "learning_rate": 0.0001, |
| "loss": 1.5166, |
| "step": 2543 |
| }, |
| { |
| "epoch": 0.8519758874748827, |
| "grad_norm": 0.482006698846817, |
| "learning_rate": 0.0001, |
| "loss": 1.6143, |
| "step": 2544 |
| }, |
| { |
| "epoch": 0.8523107836570664, |
| "grad_norm": 0.46881818771362305, |
| "learning_rate": 0.0001, |
| "loss": 1.501, |
| "step": 2545 |
| }, |
| { |
| "epoch": 0.8526456798392499, |
| "grad_norm": 0.45807763934135437, |
| "learning_rate": 0.0001, |
| "loss": 1.4859, |
| "step": 2546 |
| }, |
| { |
| "epoch": 0.8529805760214334, |
| "grad_norm": 0.4654110372066498, |
| "learning_rate": 0.0001, |
| "loss": 1.4679, |
| "step": 2547 |
| }, |
| { |
| "epoch": 0.8533154722036169, |
| "grad_norm": 0.47218427062034607, |
| "learning_rate": 0.0001, |
| "loss": 1.5526, |
| "step": 2548 |
| }, |
| { |
| "epoch": 0.8536503683858004, |
| "grad_norm": 0.4757080674171448, |
| "learning_rate": 0.0001, |
| "loss": 1.5045, |
| "step": 2549 |
| }, |
| { |
| "epoch": 0.853985264567984, |
| "grad_norm": 0.4587678909301758, |
| "learning_rate": 0.0001, |
| "loss": 1.5146, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.8543201607501675, |
| "grad_norm": 0.49881237745285034, |
| "learning_rate": 0.0001, |
| "loss": 1.5074, |
| "step": 2551 |
| }, |
| { |
| "epoch": 0.854655056932351, |
| "grad_norm": 0.4800609350204468, |
| "learning_rate": 0.0001, |
| "loss": 1.5654, |
| "step": 2552 |
| }, |
| { |
| "epoch": 0.8549899531145345, |
| "grad_norm": 0.4712275564670563, |
| "learning_rate": 0.0001, |
| "loss": 1.5084, |
| "step": 2553 |
| }, |
| { |
| "epoch": 0.855324849296718, |
| "grad_norm": 0.4913301467895508, |
| "learning_rate": 0.0001, |
| "loss": 1.4832, |
| "step": 2554 |
| }, |
| { |
| "epoch": 0.8556597454789016, |
| "grad_norm": 0.481899619102478, |
| "learning_rate": 0.0001, |
| "loss": 1.4984, |
| "step": 2555 |
| }, |
| { |
| "epoch": 0.8559946416610851, |
| "grad_norm": 0.4633455276489258, |
| "learning_rate": 0.0001, |
| "loss": 1.4273, |
| "step": 2556 |
| }, |
| { |
| "epoch": 0.8563295378432686, |
| "grad_norm": 0.48490163683891296, |
| "learning_rate": 0.0001, |
| "loss": 1.5484, |
| "step": 2557 |
| }, |
| { |
| "epoch": 0.8566644340254521, |
| "grad_norm": 0.47869372367858887, |
| "learning_rate": 0.0001, |
| "loss": 1.5389, |
| "step": 2558 |
| }, |
| { |
| "epoch": 0.8569993302076356, |
| "grad_norm": 0.4712234437465668, |
| "learning_rate": 0.0001, |
| "loss": 1.4553, |
| "step": 2559 |
| }, |
| { |
| "epoch": 0.8573342263898192, |
| "grad_norm": 0.48260584473609924, |
| "learning_rate": 0.0001, |
| "loss": 1.5749, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.8576691225720027, |
| "grad_norm": 0.4660404622554779, |
| "learning_rate": 0.0001, |
| "loss": 1.512, |
| "step": 2561 |
| }, |
| { |
| "epoch": 0.8580040187541862, |
| "grad_norm": 0.47612521052360535, |
| "learning_rate": 0.0001, |
| "loss": 1.4752, |
| "step": 2562 |
| }, |
| { |
| "epoch": 0.8583389149363697, |
| "grad_norm": 0.4593373239040375, |
| "learning_rate": 0.0001, |
| "loss": 1.5109, |
| "step": 2563 |
| }, |
| { |
| "epoch": 0.8586738111185532, |
| "grad_norm": 0.4994997978210449, |
| "learning_rate": 0.0001, |
| "loss": 1.6041, |
| "step": 2564 |
| }, |
| { |
| "epoch": 0.8590087073007368, |
| "grad_norm": 0.46223050355911255, |
| "learning_rate": 0.0001, |
| "loss": 1.4896, |
| "step": 2565 |
| }, |
| { |
| "epoch": 0.8593436034829203, |
| "grad_norm": 0.48477256298065186, |
| "learning_rate": 0.0001, |
| "loss": 1.5081, |
| "step": 2566 |
| }, |
| { |
| "epoch": 0.8596784996651038, |
| "grad_norm": 0.4974391758441925, |
| "learning_rate": 0.0001, |
| "loss": 1.6294, |
| "step": 2567 |
| }, |
| { |
| "epoch": 0.8600133958472873, |
| "grad_norm": 0.48786434531211853, |
| "learning_rate": 0.0001, |
| "loss": 1.4691, |
| "step": 2568 |
| }, |
| { |
| "epoch": 0.8603482920294708, |
| "grad_norm": 0.4549929201602936, |
| "learning_rate": 0.0001, |
| "loss": 1.4901, |
| "step": 2569 |
| }, |
| { |
| "epoch": 0.8606831882116543, |
| "grad_norm": 0.5011422634124756, |
| "learning_rate": 0.0001, |
| "loss": 1.5825, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.8610180843938379, |
| "grad_norm": 0.475400447845459, |
| "learning_rate": 0.0001, |
| "loss": 1.5564, |
| "step": 2571 |
| }, |
| { |
| "epoch": 0.8613529805760214, |
| "grad_norm": 0.4625644087791443, |
| "learning_rate": 0.0001, |
| "loss": 1.4707, |
| "step": 2572 |
| }, |
| { |
| "epoch": 0.8616878767582049, |
| "grad_norm": 0.46159297227859497, |
| "learning_rate": 0.0001, |
| "loss": 1.4398, |
| "step": 2573 |
| }, |
| { |
| "epoch": 0.8620227729403884, |
| "grad_norm": 0.48057782649993896, |
| "learning_rate": 0.0001, |
| "loss": 1.5164, |
| "step": 2574 |
| }, |
| { |
| "epoch": 0.8623576691225721, |
| "grad_norm": 0.45620816946029663, |
| "learning_rate": 0.0001, |
| "loss": 1.5166, |
| "step": 2575 |
| }, |
| { |
| "epoch": 0.8626925653047556, |
| "grad_norm": 0.4661911427974701, |
| "learning_rate": 0.0001, |
| "loss": 1.4998, |
| "step": 2576 |
| }, |
| { |
| "epoch": 0.8630274614869391, |
| "grad_norm": 0.470892995595932, |
| "learning_rate": 0.0001, |
| "loss": 1.5692, |
| "step": 2577 |
| }, |
| { |
| "epoch": 0.8633623576691226, |
| "grad_norm": 0.47721078991889954, |
| "learning_rate": 0.0001, |
| "loss": 1.558, |
| "step": 2578 |
| }, |
| { |
| "epoch": 0.8636972538513061, |
| "grad_norm": 0.5240944027900696, |
| "learning_rate": 0.0001, |
| "loss": 1.5675, |
| "step": 2579 |
| }, |
| { |
| "epoch": 0.8640321500334897, |
| "grad_norm": 0.48352697491645813, |
| "learning_rate": 0.0001, |
| "loss": 1.513, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.8643670462156732, |
| "grad_norm": 0.47838255763053894, |
| "learning_rate": 0.0001, |
| "loss": 1.4973, |
| "step": 2581 |
| }, |
| { |
| "epoch": 0.8647019423978567, |
| "grad_norm": 0.4820500314235687, |
| "learning_rate": 0.0001, |
| "loss": 1.5632, |
| "step": 2582 |
| }, |
| { |
| "epoch": 0.8650368385800402, |
| "grad_norm": 0.44904258847236633, |
| "learning_rate": 0.0001, |
| "loss": 1.464, |
| "step": 2583 |
| }, |
| { |
| "epoch": 0.8653717347622237, |
| "grad_norm": 0.4510500133037567, |
| "learning_rate": 0.0001, |
| "loss": 1.4805, |
| "step": 2584 |
| }, |
| { |
| "epoch": 0.8657066309444073, |
| "grad_norm": 0.48458313941955566, |
| "learning_rate": 0.0001, |
| "loss": 1.5889, |
| "step": 2585 |
| }, |
| { |
| "epoch": 0.8660415271265908, |
| "grad_norm": 0.4644976258277893, |
| "learning_rate": 0.0001, |
| "loss": 1.5072, |
| "step": 2586 |
| }, |
| { |
| "epoch": 0.8663764233087743, |
| "grad_norm": 0.47620436549186707, |
| "learning_rate": 0.0001, |
| "loss": 1.4598, |
| "step": 2587 |
| }, |
| { |
| "epoch": 0.8667113194909578, |
| "grad_norm": 0.4634052813053131, |
| "learning_rate": 0.0001, |
| "loss": 1.4223, |
| "step": 2588 |
| }, |
| { |
| "epoch": 0.8670462156731413, |
| "grad_norm": 0.48688220977783203, |
| "learning_rate": 0.0001, |
| "loss": 1.6314, |
| "step": 2589 |
| }, |
| { |
| "epoch": 0.8673811118553248, |
| "grad_norm": 0.4728447198867798, |
| "learning_rate": 0.0001, |
| "loss": 1.5439, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.8677160080375084, |
| "grad_norm": 0.4576449692249298, |
| "learning_rate": 0.0001, |
| "loss": 1.493, |
| "step": 2591 |
| }, |
| { |
| "epoch": 0.8680509042196919, |
| "grad_norm": 0.47686463594436646, |
| "learning_rate": 0.0001, |
| "loss": 1.5732, |
| "step": 2592 |
| }, |
| { |
| "epoch": 0.8683858004018754, |
| "grad_norm": 0.49087628722190857, |
| "learning_rate": 0.0001, |
| "loss": 1.4753, |
| "step": 2593 |
| }, |
| { |
| "epoch": 0.8687206965840589, |
| "grad_norm": 0.45120036602020264, |
| "learning_rate": 0.0001, |
| "loss": 1.4855, |
| "step": 2594 |
| }, |
| { |
| "epoch": 0.8690555927662424, |
| "grad_norm": 0.458211749792099, |
| "learning_rate": 0.0001, |
| "loss": 1.5443, |
| "step": 2595 |
| }, |
| { |
| "epoch": 0.869390488948426, |
| "grad_norm": 0.4843875765800476, |
| "learning_rate": 0.0001, |
| "loss": 1.4995, |
| "step": 2596 |
| }, |
| { |
| "epoch": 0.8697253851306095, |
| "grad_norm": 0.4657866358757019, |
| "learning_rate": 0.0001, |
| "loss": 1.5781, |
| "step": 2597 |
| }, |
| { |
| "epoch": 0.870060281312793, |
| "grad_norm": 0.4589475691318512, |
| "learning_rate": 0.0001, |
| "loss": 1.4494, |
| "step": 2598 |
| }, |
| { |
| "epoch": 0.8703951774949765, |
| "grad_norm": 0.4740186035633087, |
| "learning_rate": 0.0001, |
| "loss": 1.6285, |
| "step": 2599 |
| }, |
| { |
| "epoch": 0.87073007367716, |
| "grad_norm": 0.49049800634384155, |
| "learning_rate": 0.0001, |
| "loss": 1.5859, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.8710649698593436, |
| "grad_norm": 0.48565539717674255, |
| "learning_rate": 0.0001, |
| "loss": 1.4981, |
| "step": 2601 |
| }, |
| { |
| "epoch": 0.8713998660415271, |
| "grad_norm": 0.4501047134399414, |
| "learning_rate": 0.0001, |
| "loss": 1.4426, |
| "step": 2602 |
| }, |
| { |
| "epoch": 0.8717347622237106, |
| "grad_norm": 0.4414789378643036, |
| "learning_rate": 0.0001, |
| "loss": 1.395, |
| "step": 2603 |
| }, |
| { |
| "epoch": 0.8720696584058941, |
| "grad_norm": 0.49444058537483215, |
| "learning_rate": 0.0001, |
| "loss": 1.4857, |
| "step": 2604 |
| }, |
| { |
| "epoch": 0.8724045545880776, |
| "grad_norm": 0.4695194661617279, |
| "learning_rate": 0.0001, |
| "loss": 1.4485, |
| "step": 2605 |
| }, |
| { |
| "epoch": 0.8727394507702613, |
| "grad_norm": 0.49149829149246216, |
| "learning_rate": 0.0001, |
| "loss": 1.5755, |
| "step": 2606 |
| }, |
| { |
| "epoch": 0.8730743469524448, |
| "grad_norm": 0.4663974940776825, |
| "learning_rate": 0.0001, |
| "loss": 1.4947, |
| "step": 2607 |
| }, |
| { |
| "epoch": 0.8734092431346283, |
| "grad_norm": 0.4728209972381592, |
| "learning_rate": 0.0001, |
| "loss": 1.3464, |
| "step": 2608 |
| }, |
| { |
| "epoch": 0.8737441393168118, |
| "grad_norm": 0.47123968601226807, |
| "learning_rate": 0.0001, |
| "loss": 1.4939, |
| "step": 2609 |
| }, |
| { |
| "epoch": 0.8740790354989953, |
| "grad_norm": 0.5068251490592957, |
| "learning_rate": 0.0001, |
| "loss": 1.5115, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.8744139316811789, |
| "grad_norm": 0.4849814474582672, |
| "learning_rate": 0.0001, |
| "loss": 1.532, |
| "step": 2611 |
| }, |
| { |
| "epoch": 0.8747488278633624, |
| "grad_norm": 0.5009174346923828, |
| "learning_rate": 0.0001, |
| "loss": 1.6127, |
| "step": 2612 |
| }, |
| { |
| "epoch": 0.8750837240455459, |
| "grad_norm": 0.46775639057159424, |
| "learning_rate": 0.0001, |
| "loss": 1.4938, |
| "step": 2613 |
| }, |
| { |
| "epoch": 0.8754186202277294, |
| "grad_norm": 0.4642925560474396, |
| "learning_rate": 0.0001, |
| "loss": 1.5143, |
| "step": 2614 |
| }, |
| { |
| "epoch": 0.875753516409913, |
| "grad_norm": 0.5046592354774475, |
| "learning_rate": 0.0001, |
| "loss": 1.5009, |
| "step": 2615 |
| }, |
| { |
| "epoch": 0.8760884125920965, |
| "grad_norm": 0.44560351967811584, |
| "learning_rate": 0.0001, |
| "loss": 1.464, |
| "step": 2616 |
| }, |
| { |
| "epoch": 0.87642330877428, |
| "grad_norm": 0.4847624599933624, |
| "learning_rate": 0.0001, |
| "loss": 1.5868, |
| "step": 2617 |
| }, |
| { |
| "epoch": 0.8767582049564635, |
| "grad_norm": 0.4620243310928345, |
| "learning_rate": 0.0001, |
| "loss": 1.478, |
| "step": 2618 |
| }, |
| { |
| "epoch": 0.877093101138647, |
| "grad_norm": 0.48844829201698303, |
| "learning_rate": 0.0001, |
| "loss": 1.4811, |
| "step": 2619 |
| }, |
| { |
| "epoch": 0.8774279973208305, |
| "grad_norm": 0.49304163455963135, |
| "learning_rate": 0.0001, |
| "loss": 1.5897, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.8777628935030141, |
| "grad_norm": 0.48174530267715454, |
| "learning_rate": 0.0001, |
| "loss": 1.5173, |
| "step": 2621 |
| }, |
| { |
| "epoch": 0.8780977896851976, |
| "grad_norm": 0.47570034861564636, |
| "learning_rate": 0.0001, |
| "loss": 1.4714, |
| "step": 2622 |
| }, |
| { |
| "epoch": 0.8784326858673811, |
| "grad_norm": 0.4666658937931061, |
| "learning_rate": 0.0001, |
| "loss": 1.4931, |
| "step": 2623 |
| }, |
| { |
| "epoch": 0.8787675820495646, |
| "grad_norm": 0.49084752798080444, |
| "learning_rate": 0.0001, |
| "loss": 1.6196, |
| "step": 2624 |
| }, |
| { |
| "epoch": 0.8791024782317481, |
| "grad_norm": 0.4708821475505829, |
| "learning_rate": 0.0001, |
| "loss": 1.4557, |
| "step": 2625 |
| }, |
| { |
| "epoch": 0.8794373744139317, |
| "grad_norm": 0.4662223160266876, |
| "learning_rate": 0.0001, |
| "loss": 1.4757, |
| "step": 2626 |
| }, |
| { |
| "epoch": 0.8797722705961152, |
| "grad_norm": 0.4955317974090576, |
| "learning_rate": 0.0001, |
| "loss": 1.4234, |
| "step": 2627 |
| }, |
| { |
| "epoch": 0.8801071667782987, |
| "grad_norm": 0.4606055021286011, |
| "learning_rate": 0.0001, |
| "loss": 1.4818, |
| "step": 2628 |
| }, |
| { |
| "epoch": 0.8804420629604822, |
| "grad_norm": 0.4685207009315491, |
| "learning_rate": 0.0001, |
| "loss": 1.5082, |
| "step": 2629 |
| }, |
| { |
| "epoch": 0.8807769591426657, |
| "grad_norm": 0.4986380338668823, |
| "learning_rate": 0.0001, |
| "loss": 1.5582, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.8811118553248493, |
| "grad_norm": 0.4687456786632538, |
| "learning_rate": 0.0001, |
| "loss": 1.4779, |
| "step": 2631 |
| }, |
| { |
| "epoch": 0.8814467515070328, |
| "grad_norm": 0.4698921740055084, |
| "learning_rate": 0.0001, |
| "loss": 1.5505, |
| "step": 2632 |
| }, |
| { |
| "epoch": 0.8817816476892163, |
| "grad_norm": 0.46344539523124695, |
| "learning_rate": 0.0001, |
| "loss": 1.4528, |
| "step": 2633 |
| }, |
| { |
| "epoch": 0.8821165438713998, |
| "grad_norm": 0.4594753384590149, |
| "learning_rate": 0.0001, |
| "loss": 1.5792, |
| "step": 2634 |
| }, |
| { |
| "epoch": 0.8824514400535833, |
| "grad_norm": 0.4745759963989258, |
| "learning_rate": 0.0001, |
| "loss": 1.4701, |
| "step": 2635 |
| }, |
| { |
| "epoch": 0.882786336235767, |
| "grad_norm": 0.4678467810153961, |
| "learning_rate": 0.0001, |
| "loss": 1.509, |
| "step": 2636 |
| }, |
| { |
| "epoch": 0.8831212324179505, |
| "grad_norm": 0.4778388738632202, |
| "learning_rate": 0.0001, |
| "loss": 1.4904, |
| "step": 2637 |
| }, |
| { |
| "epoch": 0.883456128600134, |
| "grad_norm": 0.45584356784820557, |
| "learning_rate": 0.0001, |
| "loss": 1.5583, |
| "step": 2638 |
| }, |
| { |
| "epoch": 0.8837910247823175, |
| "grad_norm": 0.5151787996292114, |
| "learning_rate": 0.0001, |
| "loss": 1.6115, |
| "step": 2639 |
| }, |
| { |
| "epoch": 0.884125920964501, |
| "grad_norm": 0.4784322679042816, |
| "learning_rate": 0.0001, |
| "loss": 1.6164, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.8844608171466846, |
| "grad_norm": 0.4793466329574585, |
| "learning_rate": 0.0001, |
| "loss": 1.5444, |
| "step": 2641 |
| }, |
| { |
| "epoch": 0.8847957133288681, |
| "grad_norm": 0.477232962846756, |
| "learning_rate": 0.0001, |
| "loss": 1.4949, |
| "step": 2642 |
| }, |
| { |
| "epoch": 0.8851306095110516, |
| "grad_norm": 0.4777398109436035, |
| "learning_rate": 0.0001, |
| "loss": 1.4061, |
| "step": 2643 |
| }, |
| { |
| "epoch": 0.8854655056932351, |
| "grad_norm": 0.4389171004295349, |
| "learning_rate": 0.0001, |
| "loss": 1.4163, |
| "step": 2644 |
| }, |
| { |
| "epoch": 0.8858004018754186, |
| "grad_norm": 0.4764946401119232, |
| "learning_rate": 0.0001, |
| "loss": 1.5869, |
| "step": 2645 |
| }, |
| { |
| "epoch": 0.8861352980576022, |
| "grad_norm": 0.46828514337539673, |
| "learning_rate": 0.0001, |
| "loss": 1.4037, |
| "step": 2646 |
| }, |
| { |
| "epoch": 0.8864701942397857, |
| "grad_norm": 0.4633961319923401, |
| "learning_rate": 0.0001, |
| "loss": 1.5485, |
| "step": 2647 |
| }, |
| { |
| "epoch": 0.8868050904219692, |
| "grad_norm": 0.4534933865070343, |
| "learning_rate": 0.0001, |
| "loss": 1.4125, |
| "step": 2648 |
| }, |
| { |
| "epoch": 0.8871399866041527, |
| "grad_norm": 0.4641197621822357, |
| "learning_rate": 0.0001, |
| "loss": 1.4686, |
| "step": 2649 |
| }, |
| { |
| "epoch": 0.8874748827863362, |
| "grad_norm": 0.4668722450733185, |
| "learning_rate": 0.0001, |
| "loss": 1.5477, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.8878097789685198, |
| "grad_norm": 0.5055553317070007, |
| "learning_rate": 0.0001, |
| "loss": 1.6096, |
| "step": 2651 |
| }, |
| { |
| "epoch": 0.8881446751507033, |
| "grad_norm": 0.48708033561706543, |
| "learning_rate": 0.0001, |
| "loss": 1.6343, |
| "step": 2652 |
| }, |
| { |
| "epoch": 0.8884795713328868, |
| "grad_norm": 0.468066930770874, |
| "learning_rate": 0.0001, |
| "loss": 1.5492, |
| "step": 2653 |
| }, |
| { |
| "epoch": 0.8888144675150703, |
| "grad_norm": 0.47345656156539917, |
| "learning_rate": 0.0001, |
| "loss": 1.4374, |
| "step": 2654 |
| }, |
| { |
| "epoch": 0.8891493636972538, |
| "grad_norm": 0.46650683879852295, |
| "learning_rate": 0.0001, |
| "loss": 1.4227, |
| "step": 2655 |
| }, |
| { |
| "epoch": 0.8894842598794374, |
| "grad_norm": 0.45780879259109497, |
| "learning_rate": 0.0001, |
| "loss": 1.4844, |
| "step": 2656 |
| }, |
| { |
| "epoch": 0.8898191560616209, |
| "grad_norm": 0.4695964455604553, |
| "learning_rate": 0.0001, |
| "loss": 1.5558, |
| "step": 2657 |
| }, |
| { |
| "epoch": 0.8901540522438044, |
| "grad_norm": 0.4641098380088806, |
| "learning_rate": 0.0001, |
| "loss": 1.561, |
| "step": 2658 |
| }, |
| { |
| "epoch": 0.8904889484259879, |
| "grad_norm": 0.4501829147338867, |
| "learning_rate": 0.0001, |
| "loss": 1.362, |
| "step": 2659 |
| }, |
| { |
| "epoch": 0.8908238446081714, |
| "grad_norm": 0.4778416156768799, |
| "learning_rate": 0.0001, |
| "loss": 1.4539, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.891158740790355, |
| "grad_norm": 0.4750802516937256, |
| "learning_rate": 0.0001, |
| "loss": 1.5012, |
| "step": 2661 |
| }, |
| { |
| "epoch": 0.8914936369725385, |
| "grad_norm": 0.47056931257247925, |
| "learning_rate": 0.0001, |
| "loss": 1.4979, |
| "step": 2662 |
| }, |
| { |
| "epoch": 0.891828533154722, |
| "grad_norm": 0.4810597002506256, |
| "learning_rate": 0.0001, |
| "loss": 1.4733, |
| "step": 2663 |
| }, |
| { |
| "epoch": 0.8921634293369055, |
| "grad_norm": 0.5094153881072998, |
| "learning_rate": 0.0001, |
| "loss": 1.6202, |
| "step": 2664 |
| }, |
| { |
| "epoch": 0.892498325519089, |
| "grad_norm": 0.5079136490821838, |
| "learning_rate": 0.0001, |
| "loss": 1.6265, |
| "step": 2665 |
| }, |
| { |
| "epoch": 0.8928332217012726, |
| "grad_norm": 0.48242053389549255, |
| "learning_rate": 0.0001, |
| "loss": 1.5071, |
| "step": 2666 |
| }, |
| { |
| "epoch": 0.8931681178834562, |
| "grad_norm": 0.4616110026836395, |
| "learning_rate": 0.0001, |
| "loss": 1.5125, |
| "step": 2667 |
| }, |
| { |
| "epoch": 0.8935030140656397, |
| "grad_norm": 0.4890819787979126, |
| "learning_rate": 0.0001, |
| "loss": 1.5626, |
| "step": 2668 |
| }, |
| { |
| "epoch": 0.8938379102478232, |
| "grad_norm": 0.47077295184135437, |
| "learning_rate": 0.0001, |
| "loss": 1.4627, |
| "step": 2669 |
| }, |
| { |
| "epoch": 0.8941728064300067, |
| "grad_norm": 0.4646243155002594, |
| "learning_rate": 0.0001, |
| "loss": 1.3951, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.8945077026121903, |
| "grad_norm": 0.48134827613830566, |
| "learning_rate": 0.0001, |
| "loss": 1.4382, |
| "step": 2671 |
| }, |
| { |
| "epoch": 0.8948425987943738, |
| "grad_norm": 0.5001153349876404, |
| "learning_rate": 0.0001, |
| "loss": 1.4841, |
| "step": 2672 |
| }, |
| { |
| "epoch": 0.8951774949765573, |
| "grad_norm": 0.4859412610530853, |
| "learning_rate": 0.0001, |
| "loss": 1.5493, |
| "step": 2673 |
| }, |
| { |
| "epoch": 0.8955123911587408, |
| "grad_norm": 0.4802533984184265, |
| "learning_rate": 0.0001, |
| "loss": 1.539, |
| "step": 2674 |
| }, |
| { |
| "epoch": 0.8958472873409243, |
| "grad_norm": 0.48965784907341003, |
| "learning_rate": 0.0001, |
| "loss": 1.535, |
| "step": 2675 |
| }, |
| { |
| "epoch": 0.8961821835231079, |
| "grad_norm": 0.44885024428367615, |
| "learning_rate": 0.0001, |
| "loss": 1.5096, |
| "step": 2676 |
| }, |
| { |
| "epoch": 0.8965170797052914, |
| "grad_norm": 0.4732823371887207, |
| "learning_rate": 0.0001, |
| "loss": 1.4385, |
| "step": 2677 |
| }, |
| { |
| "epoch": 0.8968519758874749, |
| "grad_norm": 0.4734841585159302, |
| "learning_rate": 0.0001, |
| "loss": 1.4837, |
| "step": 2678 |
| }, |
| { |
| "epoch": 0.8971868720696584, |
| "grad_norm": 0.46776431798934937, |
| "learning_rate": 0.0001, |
| "loss": 1.4658, |
| "step": 2679 |
| }, |
| { |
| "epoch": 0.8975217682518419, |
| "grad_norm": 0.4772953391075134, |
| "learning_rate": 0.0001, |
| "loss": 1.4721, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.8978566644340255, |
| "grad_norm": 0.4706651568412781, |
| "learning_rate": 0.0001, |
| "loss": 1.4561, |
| "step": 2681 |
| }, |
| { |
| "epoch": 0.898191560616209, |
| "grad_norm": 0.4698059558868408, |
| "learning_rate": 0.0001, |
| "loss": 1.5312, |
| "step": 2682 |
| }, |
| { |
| "epoch": 0.8985264567983925, |
| "grad_norm": 0.468711256980896, |
| "learning_rate": 0.0001, |
| "loss": 1.5379, |
| "step": 2683 |
| }, |
| { |
| "epoch": 0.898861352980576, |
| "grad_norm": 0.479682594537735, |
| "learning_rate": 0.0001, |
| "loss": 1.4411, |
| "step": 2684 |
| }, |
| { |
| "epoch": 0.8991962491627595, |
| "grad_norm": 0.49723494052886963, |
| "learning_rate": 0.0001, |
| "loss": 1.5347, |
| "step": 2685 |
| }, |
| { |
| "epoch": 0.899531145344943, |
| "grad_norm": 0.46617788076400757, |
| "learning_rate": 0.0001, |
| "loss": 1.55, |
| "step": 2686 |
| }, |
| { |
| "epoch": 0.8998660415271266, |
| "grad_norm": 0.44119560718536377, |
| "learning_rate": 0.0001, |
| "loss": 1.5047, |
| "step": 2687 |
| }, |
| { |
| "epoch": 0.9002009377093101, |
| "grad_norm": 0.4710986614227295, |
| "learning_rate": 0.0001, |
| "loss": 1.4465, |
| "step": 2688 |
| }, |
| { |
| "epoch": 0.9005358338914936, |
| "grad_norm": 0.4694075286388397, |
| "learning_rate": 0.0001, |
| "loss": 1.5825, |
| "step": 2689 |
| }, |
| { |
| "epoch": 0.9008707300736771, |
| "grad_norm": 0.4742186963558197, |
| "learning_rate": 0.0001, |
| "loss": 1.5765, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.9012056262558606, |
| "grad_norm": 0.47852757573127747, |
| "learning_rate": 0.0001, |
| "loss": 1.4914, |
| "step": 2691 |
| }, |
| { |
| "epoch": 0.9015405224380442, |
| "grad_norm": 0.47422271966934204, |
| "learning_rate": 0.0001, |
| "loss": 1.5882, |
| "step": 2692 |
| }, |
| { |
| "epoch": 0.9018754186202277, |
| "grad_norm": 0.4616380035877228, |
| "learning_rate": 0.0001, |
| "loss": 1.5565, |
| "step": 2693 |
| }, |
| { |
| "epoch": 0.9022103148024112, |
| "grad_norm": 0.46155980229377747, |
| "learning_rate": 0.0001, |
| "loss": 1.5269, |
| "step": 2694 |
| }, |
| { |
| "epoch": 0.9025452109845947, |
| "grad_norm": 0.48004859685897827, |
| "learning_rate": 0.0001, |
| "loss": 1.5727, |
| "step": 2695 |
| }, |
| { |
| "epoch": 0.9028801071667782, |
| "grad_norm": 0.5059171915054321, |
| "learning_rate": 0.0001, |
| "loss": 1.5574, |
| "step": 2696 |
| }, |
| { |
| "epoch": 0.9032150033489619, |
| "grad_norm": 0.4653322994709015, |
| "learning_rate": 0.0001, |
| "loss": 1.5277, |
| "step": 2697 |
| }, |
| { |
| "epoch": 0.9035498995311454, |
| "grad_norm": 0.4666169285774231, |
| "learning_rate": 0.0001, |
| "loss": 1.4112, |
| "step": 2698 |
| }, |
| { |
| "epoch": 0.9038847957133289, |
| "grad_norm": 0.5089284777641296, |
| "learning_rate": 0.0001, |
| "loss": 1.5601, |
| "step": 2699 |
| }, |
| { |
| "epoch": 0.9042196918955124, |
| "grad_norm": 0.4853679835796356, |
| "learning_rate": 0.0001, |
| "loss": 1.5193, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.904554588077696, |
| "grad_norm": 0.46001389622688293, |
| "learning_rate": 0.0001, |
| "loss": 1.4143, |
| "step": 2701 |
| }, |
| { |
| "epoch": 0.9048894842598795, |
| "grad_norm": 0.4837956130504608, |
| "learning_rate": 0.0001, |
| "loss": 1.5314, |
| "step": 2702 |
| }, |
| { |
| "epoch": 0.905224380442063, |
| "grad_norm": 0.483035147190094, |
| "learning_rate": 0.0001, |
| "loss": 1.5956, |
| "step": 2703 |
| }, |
| { |
| "epoch": 0.9055592766242465, |
| "grad_norm": 0.4750951826572418, |
| "learning_rate": 0.0001, |
| "loss": 1.5746, |
| "step": 2704 |
| }, |
| { |
| "epoch": 0.90589417280643, |
| "grad_norm": 0.4538957476615906, |
| "learning_rate": 0.0001, |
| "loss": 1.4919, |
| "step": 2705 |
| }, |
| { |
| "epoch": 0.9062290689886136, |
| "grad_norm": 0.4735284447669983, |
| "learning_rate": 0.0001, |
| "loss": 1.4893, |
| "step": 2706 |
| }, |
| { |
| "epoch": 0.9065639651707971, |
| "grad_norm": 0.46414610743522644, |
| "learning_rate": 0.0001, |
| "loss": 1.5412, |
| "step": 2707 |
| }, |
| { |
| "epoch": 0.9068988613529806, |
| "grad_norm": 0.4622376561164856, |
| "learning_rate": 0.0001, |
| "loss": 1.5589, |
| "step": 2708 |
| }, |
| { |
| "epoch": 0.9072337575351641, |
| "grad_norm": 0.4785987138748169, |
| "learning_rate": 0.0001, |
| "loss": 1.3986, |
| "step": 2709 |
| }, |
| { |
| "epoch": 0.9075686537173476, |
| "grad_norm": 0.47775954008102417, |
| "learning_rate": 0.0001, |
| "loss": 1.5207, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.9079035498995311, |
| "grad_norm": 0.47103944420814514, |
| "learning_rate": 0.0001, |
| "loss": 1.4681, |
| "step": 2711 |
| }, |
| { |
| "epoch": 0.9082384460817147, |
| "grad_norm": 0.4682188928127289, |
| "learning_rate": 0.0001, |
| "loss": 1.5046, |
| "step": 2712 |
| }, |
| { |
| "epoch": 0.9085733422638982, |
| "grad_norm": 0.47409960627555847, |
| "learning_rate": 0.0001, |
| "loss": 1.5121, |
| "step": 2713 |
| }, |
| { |
| "epoch": 0.9089082384460817, |
| "grad_norm": 0.48302337527275085, |
| "learning_rate": 0.0001, |
| "loss": 1.4893, |
| "step": 2714 |
| }, |
| { |
| "epoch": 0.9092431346282652, |
| "grad_norm": 0.46522191166877747, |
| "learning_rate": 0.0001, |
| "loss": 1.5449, |
| "step": 2715 |
| }, |
| { |
| "epoch": 0.9095780308104487, |
| "grad_norm": 0.4832364618778229, |
| "learning_rate": 0.0001, |
| "loss": 1.6024, |
| "step": 2716 |
| }, |
| { |
| "epoch": 0.9099129269926323, |
| "grad_norm": 0.4565618634223938, |
| "learning_rate": 0.0001, |
| "loss": 1.5348, |
| "step": 2717 |
| }, |
| { |
| "epoch": 0.9102478231748158, |
| "grad_norm": 0.4762784540653229, |
| "learning_rate": 0.0001, |
| "loss": 1.5382, |
| "step": 2718 |
| }, |
| { |
| "epoch": 0.9105827193569993, |
| "grad_norm": 0.4576496481895447, |
| "learning_rate": 0.0001, |
| "loss": 1.4464, |
| "step": 2719 |
| }, |
| { |
| "epoch": 0.9109176155391828, |
| "grad_norm": 0.5007464289665222, |
| "learning_rate": 0.0001, |
| "loss": 1.4941, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.9112525117213663, |
| "grad_norm": 0.461931973695755, |
| "learning_rate": 0.0001, |
| "loss": 1.5397, |
| "step": 2721 |
| }, |
| { |
| "epoch": 0.9115874079035499, |
| "grad_norm": 0.4863291084766388, |
| "learning_rate": 0.0001, |
| "loss": 1.4821, |
| "step": 2722 |
| }, |
| { |
| "epoch": 0.9119223040857334, |
| "grad_norm": 0.4515281319618225, |
| "learning_rate": 0.0001, |
| "loss": 1.4083, |
| "step": 2723 |
| }, |
| { |
| "epoch": 0.9122572002679169, |
| "grad_norm": 0.4881787598133087, |
| "learning_rate": 0.0001, |
| "loss": 1.5151, |
| "step": 2724 |
| }, |
| { |
| "epoch": 0.9125920964501004, |
| "grad_norm": 0.47101062536239624, |
| "learning_rate": 0.0001, |
| "loss": 1.5006, |
| "step": 2725 |
| }, |
| { |
| "epoch": 0.9129269926322839, |
| "grad_norm": 0.4862211048603058, |
| "learning_rate": 0.0001, |
| "loss": 1.5351, |
| "step": 2726 |
| }, |
| { |
| "epoch": 0.9132618888144675, |
| "grad_norm": 0.493637353181839, |
| "learning_rate": 0.0001, |
| "loss": 1.5856, |
| "step": 2727 |
| }, |
| { |
| "epoch": 0.9135967849966511, |
| "grad_norm": 0.4932537078857422, |
| "learning_rate": 0.0001, |
| "loss": 1.5612, |
| "step": 2728 |
| }, |
| { |
| "epoch": 0.9139316811788346, |
| "grad_norm": 0.46137386560440063, |
| "learning_rate": 0.0001, |
| "loss": 1.4894, |
| "step": 2729 |
| }, |
| { |
| "epoch": 0.9142665773610181, |
| "grad_norm": 0.47620338201522827, |
| "learning_rate": 0.0001, |
| "loss": 1.4682, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.9146014735432016, |
| "grad_norm": 0.45134079456329346, |
| "learning_rate": 0.0001, |
| "loss": 1.4403, |
| "step": 2731 |
| }, |
| { |
| "epoch": 0.9149363697253852, |
| "grad_norm": 0.4865524172782898, |
| "learning_rate": 0.0001, |
| "loss": 1.509, |
| "step": 2732 |
| }, |
| { |
| "epoch": 0.9152712659075687, |
| "grad_norm": 0.46394023299217224, |
| "learning_rate": 0.0001, |
| "loss": 1.4478, |
| "step": 2733 |
| }, |
| { |
| "epoch": 0.9156061620897522, |
| "grad_norm": 0.4625716209411621, |
| "learning_rate": 0.0001, |
| "loss": 1.4191, |
| "step": 2734 |
| }, |
| { |
| "epoch": 0.9159410582719357, |
| "grad_norm": 0.5031517148017883, |
| "learning_rate": 0.0001, |
| "loss": 1.4143, |
| "step": 2735 |
| }, |
| { |
| "epoch": 0.9162759544541192, |
| "grad_norm": 0.4808129370212555, |
| "learning_rate": 0.0001, |
| "loss": 1.5321, |
| "step": 2736 |
| }, |
| { |
| "epoch": 0.9166108506363028, |
| "grad_norm": 0.4948432743549347, |
| "learning_rate": 0.0001, |
| "loss": 1.5454, |
| "step": 2737 |
| }, |
| { |
| "epoch": 0.9169457468184863, |
| "grad_norm": 0.4727959930896759, |
| "learning_rate": 0.0001, |
| "loss": 1.4962, |
| "step": 2738 |
| }, |
| { |
| "epoch": 0.9172806430006698, |
| "grad_norm": 0.5126287937164307, |
| "learning_rate": 0.0001, |
| "loss": 1.6403, |
| "step": 2739 |
| }, |
| { |
| "epoch": 0.9176155391828533, |
| "grad_norm": 0.47310253977775574, |
| "learning_rate": 0.0001, |
| "loss": 1.5214, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.9179504353650368, |
| "grad_norm": 0.4822677969932556, |
| "learning_rate": 0.0001, |
| "loss": 1.4709, |
| "step": 2741 |
| }, |
| { |
| "epoch": 0.9182853315472204, |
| "grad_norm": 0.44480496644973755, |
| "learning_rate": 0.0001, |
| "loss": 1.4394, |
| "step": 2742 |
| }, |
| { |
| "epoch": 0.9186202277294039, |
| "grad_norm": 0.5021243095397949, |
| "learning_rate": 0.0001, |
| "loss": 1.5516, |
| "step": 2743 |
| }, |
| { |
| "epoch": 0.9189551239115874, |
| "grad_norm": 0.49861401319503784, |
| "learning_rate": 0.0001, |
| "loss": 1.4953, |
| "step": 2744 |
| }, |
| { |
| "epoch": 0.9192900200937709, |
| "grad_norm": 0.49875929951667786, |
| "learning_rate": 0.0001, |
| "loss": 1.4971, |
| "step": 2745 |
| }, |
| { |
| "epoch": 0.9196249162759544, |
| "grad_norm": 0.4642607271671295, |
| "learning_rate": 0.0001, |
| "loss": 1.4477, |
| "step": 2746 |
| }, |
| { |
| "epoch": 0.919959812458138, |
| "grad_norm": 0.49525538086891174, |
| "learning_rate": 0.0001, |
| "loss": 1.5837, |
| "step": 2747 |
| }, |
| { |
| "epoch": 0.9202947086403215, |
| "grad_norm": 0.4807230532169342, |
| "learning_rate": 0.0001, |
| "loss": 1.4708, |
| "step": 2748 |
| }, |
| { |
| "epoch": 0.920629604822505, |
| "grad_norm": 0.48713254928588867, |
| "learning_rate": 0.0001, |
| "loss": 1.4758, |
| "step": 2749 |
| }, |
| { |
| "epoch": 0.9209645010046885, |
| "grad_norm": 0.4638206958770752, |
| "learning_rate": 0.0001, |
| "loss": 1.4854, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.921299397186872, |
| "grad_norm": 0.4701377749443054, |
| "learning_rate": 0.0001, |
| "loss": 1.5067, |
| "step": 2751 |
| }, |
| { |
| "epoch": 0.9216342933690556, |
| "grad_norm": 0.46651843190193176, |
| "learning_rate": 0.0001, |
| "loss": 1.4635, |
| "step": 2752 |
| }, |
| { |
| "epoch": 0.9219691895512391, |
| "grad_norm": 0.4966948926448822, |
| "learning_rate": 0.0001, |
| "loss": 1.5526, |
| "step": 2753 |
| }, |
| { |
| "epoch": 0.9223040857334226, |
| "grad_norm": 0.4626345932483673, |
| "learning_rate": 0.0001, |
| "loss": 1.5453, |
| "step": 2754 |
| }, |
| { |
| "epoch": 0.9226389819156061, |
| "grad_norm": 0.4624701738357544, |
| "learning_rate": 0.0001, |
| "loss": 1.4544, |
| "step": 2755 |
| }, |
| { |
| "epoch": 0.9229738780977896, |
| "grad_norm": 0.47414034605026245, |
| "learning_rate": 0.0001, |
| "loss": 1.5305, |
| "step": 2756 |
| }, |
| { |
| "epoch": 0.9233087742799732, |
| "grad_norm": 0.4446532726287842, |
| "learning_rate": 0.0001, |
| "loss": 1.464, |
| "step": 2757 |
| }, |
| { |
| "epoch": 0.9236436704621568, |
| "grad_norm": 0.47026002407073975, |
| "learning_rate": 0.0001, |
| "loss": 1.5562, |
| "step": 2758 |
| }, |
| { |
| "epoch": 0.9239785666443403, |
| "grad_norm": 0.4398569166660309, |
| "learning_rate": 0.0001, |
| "loss": 1.4474, |
| "step": 2759 |
| }, |
| { |
| "epoch": 0.9243134628265238, |
| "grad_norm": 0.49884065985679626, |
| "learning_rate": 0.0001, |
| "loss": 1.559, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.9246483590087073, |
| "grad_norm": 0.4818177819252014, |
| "learning_rate": 0.0001, |
| "loss": 1.5044, |
| "step": 2761 |
| }, |
| { |
| "epoch": 0.9249832551908909, |
| "grad_norm": 0.46590423583984375, |
| "learning_rate": 0.0001, |
| "loss": 1.4717, |
| "step": 2762 |
| }, |
| { |
| "epoch": 0.9253181513730744, |
| "grad_norm": 0.4742557406425476, |
| "learning_rate": 0.0001, |
| "loss": 1.5131, |
| "step": 2763 |
| }, |
| { |
| "epoch": 0.9256530475552579, |
| "grad_norm": 0.4409778118133545, |
| "learning_rate": 0.0001, |
| "loss": 1.4234, |
| "step": 2764 |
| }, |
| { |
| "epoch": 0.9259879437374414, |
| "grad_norm": 0.4703803062438965, |
| "learning_rate": 0.0001, |
| "loss": 1.3772, |
| "step": 2765 |
| }, |
| { |
| "epoch": 0.9263228399196249, |
| "grad_norm": 0.46631020307540894, |
| "learning_rate": 0.0001, |
| "loss": 1.4025, |
| "step": 2766 |
| }, |
| { |
| "epoch": 0.9266577361018085, |
| "grad_norm": 0.4509109556674957, |
| "learning_rate": 0.0001, |
| "loss": 1.4883, |
| "step": 2767 |
| }, |
| { |
| "epoch": 0.926992632283992, |
| "grad_norm": 0.49394387006759644, |
| "learning_rate": 0.0001, |
| "loss": 1.564, |
| "step": 2768 |
| }, |
| { |
| "epoch": 0.9273275284661755, |
| "grad_norm": 0.4773907959461212, |
| "learning_rate": 0.0001, |
| "loss": 1.4377, |
| "step": 2769 |
| }, |
| { |
| "epoch": 0.927662424648359, |
| "grad_norm": 0.47449010610580444, |
| "learning_rate": 0.0001, |
| "loss": 1.5045, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.9279973208305425, |
| "grad_norm": 0.5142382383346558, |
| "learning_rate": 0.0001, |
| "loss": 1.5277, |
| "step": 2771 |
| }, |
| { |
| "epoch": 0.9283322170127261, |
| "grad_norm": 0.44858208298683167, |
| "learning_rate": 0.0001, |
| "loss": 1.4339, |
| "step": 2772 |
| }, |
| { |
| "epoch": 0.9286671131949096, |
| "grad_norm": 0.4873579442501068, |
| "learning_rate": 0.0001, |
| "loss": 1.5366, |
| "step": 2773 |
| }, |
| { |
| "epoch": 0.9290020093770931, |
| "grad_norm": 0.48021912574768066, |
| "learning_rate": 0.0001, |
| "loss": 1.4933, |
| "step": 2774 |
| }, |
| { |
| "epoch": 0.9293369055592766, |
| "grad_norm": 0.47593623399734497, |
| "learning_rate": 0.0001, |
| "loss": 1.5591, |
| "step": 2775 |
| }, |
| { |
| "epoch": 0.9296718017414601, |
| "grad_norm": 0.46372953057289124, |
| "learning_rate": 0.0001, |
| "loss": 1.5825, |
| "step": 2776 |
| }, |
| { |
| "epoch": 0.9300066979236437, |
| "grad_norm": 0.47707000374794006, |
| "learning_rate": 0.0001, |
| "loss": 1.5242, |
| "step": 2777 |
| }, |
| { |
| "epoch": 0.9303415941058272, |
| "grad_norm": 0.47080013155937195, |
| "learning_rate": 0.0001, |
| "loss": 1.5807, |
| "step": 2778 |
| }, |
| { |
| "epoch": 0.9306764902880107, |
| "grad_norm": 0.46404528617858887, |
| "learning_rate": 0.0001, |
| "loss": 1.4384, |
| "step": 2779 |
| }, |
| { |
| "epoch": 0.9310113864701942, |
| "grad_norm": 0.46434518694877625, |
| "learning_rate": 0.0001, |
| "loss": 1.4945, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.9313462826523777, |
| "grad_norm": 0.45197415351867676, |
| "learning_rate": 0.0001, |
| "loss": 1.4594, |
| "step": 2781 |
| }, |
| { |
| "epoch": 0.9316811788345613, |
| "grad_norm": 0.4818415939807892, |
| "learning_rate": 0.0001, |
| "loss": 1.5772, |
| "step": 2782 |
| }, |
| { |
| "epoch": 0.9320160750167448, |
| "grad_norm": 0.4950558841228485, |
| "learning_rate": 0.0001, |
| "loss": 1.5536, |
| "step": 2783 |
| }, |
| { |
| "epoch": 0.9323509711989283, |
| "grad_norm": 0.429934561252594, |
| "learning_rate": 0.0001, |
| "loss": 1.3729, |
| "step": 2784 |
| }, |
| { |
| "epoch": 0.9326858673811118, |
| "grad_norm": 0.45653441548347473, |
| "learning_rate": 0.0001, |
| "loss": 1.4731, |
| "step": 2785 |
| }, |
| { |
| "epoch": 0.9330207635632953, |
| "grad_norm": 0.47883328795433044, |
| "learning_rate": 0.0001, |
| "loss": 1.4964, |
| "step": 2786 |
| }, |
| { |
| "epoch": 0.9333556597454788, |
| "grad_norm": 0.45890769362449646, |
| "learning_rate": 0.0001, |
| "loss": 1.4338, |
| "step": 2787 |
| }, |
| { |
| "epoch": 0.9336905559276625, |
| "grad_norm": 0.48469284176826477, |
| "learning_rate": 0.0001, |
| "loss": 1.519, |
| "step": 2788 |
| }, |
| { |
| "epoch": 0.934025452109846, |
| "grad_norm": 0.47782102227211, |
| "learning_rate": 0.0001, |
| "loss": 1.4085, |
| "step": 2789 |
| }, |
| { |
| "epoch": 0.9343603482920295, |
| "grad_norm": 0.4606318473815918, |
| "learning_rate": 0.0001, |
| "loss": 1.4495, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.934695244474213, |
| "grad_norm": 0.48102056980133057, |
| "learning_rate": 0.0001, |
| "loss": 1.5266, |
| "step": 2791 |
| }, |
| { |
| "epoch": 0.9350301406563966, |
| "grad_norm": 0.505702018737793, |
| "learning_rate": 0.0001, |
| "loss": 1.496, |
| "step": 2792 |
| }, |
| { |
| "epoch": 0.9353650368385801, |
| "grad_norm": 0.48012757301330566, |
| "learning_rate": 0.0001, |
| "loss": 1.5956, |
| "step": 2793 |
| }, |
| { |
| "epoch": 0.9356999330207636, |
| "grad_norm": 0.4581480324268341, |
| "learning_rate": 0.0001, |
| "loss": 1.4656, |
| "step": 2794 |
| }, |
| { |
| "epoch": 0.9360348292029471, |
| "grad_norm": 0.4929927885532379, |
| "learning_rate": 0.0001, |
| "loss": 1.5082, |
| "step": 2795 |
| }, |
| { |
| "epoch": 0.9363697253851306, |
| "grad_norm": 0.525664210319519, |
| "learning_rate": 0.0001, |
| "loss": 1.5383, |
| "step": 2796 |
| }, |
| { |
| "epoch": 0.9367046215673142, |
| "grad_norm": 0.474171906709671, |
| "learning_rate": 0.0001, |
| "loss": 1.5132, |
| "step": 2797 |
| }, |
| { |
| "epoch": 0.9370395177494977, |
| "grad_norm": 0.483628511428833, |
| "learning_rate": 0.0001, |
| "loss": 1.3998, |
| "step": 2798 |
| }, |
| { |
| "epoch": 0.9373744139316812, |
| "grad_norm": 0.49638795852661133, |
| "learning_rate": 0.0001, |
| "loss": 1.4475, |
| "step": 2799 |
| }, |
| { |
| "epoch": 0.9377093101138647, |
| "grad_norm": 0.4912618398666382, |
| "learning_rate": 0.0001, |
| "loss": 1.5874, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.9380442062960482, |
| "grad_norm": 0.45865505933761597, |
| "learning_rate": 0.0001, |
| "loss": 1.3994, |
| "step": 2801 |
| }, |
| { |
| "epoch": 0.9383791024782318, |
| "grad_norm": 0.48046156764030457, |
| "learning_rate": 0.0001, |
| "loss": 1.5055, |
| "step": 2802 |
| }, |
| { |
| "epoch": 0.9387139986604153, |
| "grad_norm": 0.49069395661354065, |
| "learning_rate": 0.0001, |
| "loss": 1.5077, |
| "step": 2803 |
| }, |
| { |
| "epoch": 0.9390488948425988, |
| "grad_norm": 0.47460469603538513, |
| "learning_rate": 0.0001, |
| "loss": 1.468, |
| "step": 2804 |
| }, |
| { |
| "epoch": 0.9393837910247823, |
| "grad_norm": 0.4517071545124054, |
| "learning_rate": 0.0001, |
| "loss": 1.4131, |
| "step": 2805 |
| }, |
| { |
| "epoch": 0.9397186872069658, |
| "grad_norm": 0.5065127015113831, |
| "learning_rate": 0.0001, |
| "loss": 1.5084, |
| "step": 2806 |
| }, |
| { |
| "epoch": 0.9400535833891493, |
| "grad_norm": 0.45778200030326843, |
| "learning_rate": 0.0001, |
| "loss": 1.4749, |
| "step": 2807 |
| }, |
| { |
| "epoch": 0.9403884795713329, |
| "grad_norm": 0.4874265193939209, |
| "learning_rate": 0.0001, |
| "loss": 1.5827, |
| "step": 2808 |
| }, |
| { |
| "epoch": 0.9407233757535164, |
| "grad_norm": 0.5091594457626343, |
| "learning_rate": 0.0001, |
| "loss": 1.5677, |
| "step": 2809 |
| }, |
| { |
| "epoch": 0.9410582719356999, |
| "grad_norm": 0.4859616458415985, |
| "learning_rate": 0.0001, |
| "loss": 1.4978, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.9413931681178834, |
| "grad_norm": 0.45796576142311096, |
| "learning_rate": 0.0001, |
| "loss": 1.5074, |
| "step": 2811 |
| }, |
| { |
| "epoch": 0.941728064300067, |
| "grad_norm": 0.46981683373451233, |
| "learning_rate": 0.0001, |
| "loss": 1.4518, |
| "step": 2812 |
| }, |
| { |
| "epoch": 0.9420629604822505, |
| "grad_norm": 0.48510855436325073, |
| "learning_rate": 0.0001, |
| "loss": 1.5716, |
| "step": 2813 |
| }, |
| { |
| "epoch": 0.942397856664434, |
| "grad_norm": 0.47368323802948, |
| "learning_rate": 0.0001, |
| "loss": 1.4786, |
| "step": 2814 |
| }, |
| { |
| "epoch": 0.9427327528466175, |
| "grad_norm": 0.464106947183609, |
| "learning_rate": 0.0001, |
| "loss": 1.4443, |
| "step": 2815 |
| }, |
| { |
| "epoch": 0.943067649028801, |
| "grad_norm": 0.4603121876716614, |
| "learning_rate": 0.0001, |
| "loss": 1.5207, |
| "step": 2816 |
| }, |
| { |
| "epoch": 0.9434025452109845, |
| "grad_norm": 0.48097118735313416, |
| "learning_rate": 0.0001, |
| "loss": 1.5198, |
| "step": 2817 |
| }, |
| { |
| "epoch": 0.9437374413931681, |
| "grad_norm": 0.4634473919868469, |
| "learning_rate": 0.0001, |
| "loss": 1.501, |
| "step": 2818 |
| }, |
| { |
| "epoch": 0.9440723375753517, |
| "grad_norm": 0.46776083111763, |
| "learning_rate": 0.0001, |
| "loss": 1.5428, |
| "step": 2819 |
| }, |
| { |
| "epoch": 0.9444072337575352, |
| "grad_norm": 0.46727311611175537, |
| "learning_rate": 0.0001, |
| "loss": 1.5548, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.9447421299397187, |
| "grad_norm": 0.46500930190086365, |
| "learning_rate": 0.0001, |
| "loss": 1.4424, |
| "step": 2821 |
| }, |
| { |
| "epoch": 0.9450770261219023, |
| "grad_norm": 0.4897436201572418, |
| "learning_rate": 0.0001, |
| "loss": 1.606, |
| "step": 2822 |
| }, |
| { |
| "epoch": 0.9454119223040858, |
| "grad_norm": 0.4767928123474121, |
| "learning_rate": 0.0001, |
| "loss": 1.4874, |
| "step": 2823 |
| }, |
| { |
| "epoch": 0.9457468184862693, |
| "grad_norm": 0.4675102233886719, |
| "learning_rate": 0.0001, |
| "loss": 1.5864, |
| "step": 2824 |
| }, |
| { |
| "epoch": 0.9460817146684528, |
| "grad_norm": 0.480685293674469, |
| "learning_rate": 0.0001, |
| "loss": 1.483, |
| "step": 2825 |
| }, |
| { |
| "epoch": 0.9464166108506363, |
| "grad_norm": 0.483405202627182, |
| "learning_rate": 0.0001, |
| "loss": 1.5395, |
| "step": 2826 |
| }, |
| { |
| "epoch": 0.9467515070328199, |
| "grad_norm": 0.4541187584400177, |
| "learning_rate": 0.0001, |
| "loss": 1.5344, |
| "step": 2827 |
| }, |
| { |
| "epoch": 0.9470864032150034, |
| "grad_norm": 0.4717221260070801, |
| "learning_rate": 0.0001, |
| "loss": 1.4605, |
| "step": 2828 |
| }, |
| { |
| "epoch": 0.9474212993971869, |
| "grad_norm": 0.47321516275405884, |
| "learning_rate": 0.0001, |
| "loss": 1.4989, |
| "step": 2829 |
| }, |
| { |
| "epoch": 0.9477561955793704, |
| "grad_norm": 0.4786660075187683, |
| "learning_rate": 0.0001, |
| "loss": 1.5507, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.9480910917615539, |
| "grad_norm": 0.47312670946121216, |
| "learning_rate": 0.0001, |
| "loss": 1.5756, |
| "step": 2831 |
| }, |
| { |
| "epoch": 0.9484259879437374, |
| "grad_norm": 0.4898805618286133, |
| "learning_rate": 0.0001, |
| "loss": 1.5134, |
| "step": 2832 |
| }, |
| { |
| "epoch": 0.948760884125921, |
| "grad_norm": 0.47797274589538574, |
| "learning_rate": 0.0001, |
| "loss": 1.5282, |
| "step": 2833 |
| }, |
| { |
| "epoch": 0.9490957803081045, |
| "grad_norm": 0.47154468297958374, |
| "learning_rate": 0.0001, |
| "loss": 1.573, |
| "step": 2834 |
| }, |
| { |
| "epoch": 0.949430676490288, |
| "grad_norm": 0.4796604812145233, |
| "learning_rate": 0.0001, |
| "loss": 1.5892, |
| "step": 2835 |
| }, |
| { |
| "epoch": 0.9497655726724715, |
| "grad_norm": 0.463866263628006, |
| "learning_rate": 0.0001, |
| "loss": 1.4255, |
| "step": 2836 |
| }, |
| { |
| "epoch": 0.950100468854655, |
| "grad_norm": 0.46627679467201233, |
| "learning_rate": 0.0001, |
| "loss": 1.5095, |
| "step": 2837 |
| }, |
| { |
| "epoch": 0.9504353650368386, |
| "grad_norm": 0.4780580699443817, |
| "learning_rate": 0.0001, |
| "loss": 1.5194, |
| "step": 2838 |
| }, |
| { |
| "epoch": 0.9507702612190221, |
| "grad_norm": 0.46096986532211304, |
| "learning_rate": 0.0001, |
| "loss": 1.5223, |
| "step": 2839 |
| }, |
| { |
| "epoch": 0.9511051574012056, |
| "grad_norm": 0.46082839369773865, |
| "learning_rate": 0.0001, |
| "loss": 1.4261, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.9514400535833891, |
| "grad_norm": 0.4698468744754791, |
| "learning_rate": 0.0001, |
| "loss": 1.4458, |
| "step": 2841 |
| }, |
| { |
| "epoch": 0.9517749497655726, |
| "grad_norm": 0.4854731559753418, |
| "learning_rate": 0.0001, |
| "loss": 1.4575, |
| "step": 2842 |
| }, |
| { |
| "epoch": 0.9521098459477562, |
| "grad_norm": 0.4889122247695923, |
| "learning_rate": 0.0001, |
| "loss": 1.4321, |
| "step": 2843 |
| }, |
| { |
| "epoch": 0.9524447421299397, |
| "grad_norm": 0.48800012469291687, |
| "learning_rate": 0.0001, |
| "loss": 1.544, |
| "step": 2844 |
| }, |
| { |
| "epoch": 0.9527796383121232, |
| "grad_norm": 0.48651984333992004, |
| "learning_rate": 0.0001, |
| "loss": 1.5325, |
| "step": 2845 |
| }, |
| { |
| "epoch": 0.9531145344943067, |
| "grad_norm": 0.47828370332717896, |
| "learning_rate": 0.0001, |
| "loss": 1.5728, |
| "step": 2846 |
| }, |
| { |
| "epoch": 0.9534494306764902, |
| "grad_norm": 0.4760272204875946, |
| "learning_rate": 0.0001, |
| "loss": 1.5307, |
| "step": 2847 |
| }, |
| { |
| "epoch": 0.9537843268586738, |
| "grad_norm": 0.4532026946544647, |
| "learning_rate": 0.0001, |
| "loss": 1.4, |
| "step": 2848 |
| }, |
| { |
| "epoch": 0.9541192230408574, |
| "grad_norm": 0.4741441309452057, |
| "learning_rate": 0.0001, |
| "loss": 1.6349, |
| "step": 2849 |
| }, |
| { |
| "epoch": 0.9544541192230409, |
| "grad_norm": 0.47783204913139343, |
| "learning_rate": 0.0001, |
| "loss": 1.407, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.9547890154052244, |
| "grad_norm": 0.46502619981765747, |
| "learning_rate": 0.0001, |
| "loss": 1.446, |
| "step": 2851 |
| }, |
| { |
| "epoch": 0.955123911587408, |
| "grad_norm": 0.47991493344306946, |
| "learning_rate": 0.0001, |
| "loss": 1.4288, |
| "step": 2852 |
| }, |
| { |
| "epoch": 0.9554588077695915, |
| "grad_norm": 0.5061444044113159, |
| "learning_rate": 0.0001, |
| "loss": 1.5309, |
| "step": 2853 |
| }, |
| { |
| "epoch": 0.955793703951775, |
| "grad_norm": 0.483979195356369, |
| "learning_rate": 0.0001, |
| "loss": 1.559, |
| "step": 2854 |
| }, |
| { |
| "epoch": 0.9561286001339585, |
| "grad_norm": 0.49346140027046204, |
| "learning_rate": 0.0001, |
| "loss": 1.5161, |
| "step": 2855 |
| }, |
| { |
| "epoch": 0.956463496316142, |
| "grad_norm": 0.47661370038986206, |
| "learning_rate": 0.0001, |
| "loss": 1.5026, |
| "step": 2856 |
| }, |
| { |
| "epoch": 0.9567983924983255, |
| "grad_norm": 0.4530797600746155, |
| "learning_rate": 0.0001, |
| "loss": 1.4322, |
| "step": 2857 |
| }, |
| { |
| "epoch": 0.9571332886805091, |
| "grad_norm": 0.46580278873443604, |
| "learning_rate": 0.0001, |
| "loss": 1.5788, |
| "step": 2858 |
| }, |
| { |
| "epoch": 0.9574681848626926, |
| "grad_norm": 0.4692135751247406, |
| "learning_rate": 0.0001, |
| "loss": 1.5282, |
| "step": 2859 |
| }, |
| { |
| "epoch": 0.9578030810448761, |
| "grad_norm": 0.4626304507255554, |
| "learning_rate": 0.0001, |
| "loss": 1.5596, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.9581379772270596, |
| "grad_norm": 0.47358620166778564, |
| "learning_rate": 0.0001, |
| "loss": 1.5271, |
| "step": 2861 |
| }, |
| { |
| "epoch": 0.9584728734092431, |
| "grad_norm": 0.4857538938522339, |
| "learning_rate": 0.0001, |
| "loss": 1.5322, |
| "step": 2862 |
| }, |
| { |
| "epoch": 0.9588077695914267, |
| "grad_norm": 0.47148844599723816, |
| "learning_rate": 0.0001, |
| "loss": 1.471, |
| "step": 2863 |
| }, |
| { |
| "epoch": 0.9591426657736102, |
| "grad_norm": 0.48549243807792664, |
| "learning_rate": 0.0001, |
| "loss": 1.5688, |
| "step": 2864 |
| }, |
| { |
| "epoch": 0.9594775619557937, |
| "grad_norm": 0.4802207946777344, |
| "learning_rate": 0.0001, |
| "loss": 1.5284, |
| "step": 2865 |
| }, |
| { |
| "epoch": 0.9598124581379772, |
| "grad_norm": 0.47728025913238525, |
| "learning_rate": 0.0001, |
| "loss": 1.4833, |
| "step": 2866 |
| }, |
| { |
| "epoch": 0.9601473543201607, |
| "grad_norm": 0.5061824321746826, |
| "learning_rate": 0.0001, |
| "loss": 1.5163, |
| "step": 2867 |
| }, |
| { |
| "epoch": 0.9604822505023443, |
| "grad_norm": 0.48417097330093384, |
| "learning_rate": 0.0001, |
| "loss": 1.5739, |
| "step": 2868 |
| }, |
| { |
| "epoch": 0.9608171466845278, |
| "grad_norm": 0.4783802628517151, |
| "learning_rate": 0.0001, |
| "loss": 1.5024, |
| "step": 2869 |
| }, |
| { |
| "epoch": 0.9611520428667113, |
| "grad_norm": 0.4534517526626587, |
| "learning_rate": 0.0001, |
| "loss": 1.4769, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.9614869390488948, |
| "grad_norm": 0.4999220371246338, |
| "learning_rate": 0.0001, |
| "loss": 1.519, |
| "step": 2871 |
| }, |
| { |
| "epoch": 0.9618218352310783, |
| "grad_norm": 0.4893072247505188, |
| "learning_rate": 0.0001, |
| "loss": 1.5519, |
| "step": 2872 |
| }, |
| { |
| "epoch": 0.9621567314132619, |
| "grad_norm": 0.4914241135120392, |
| "learning_rate": 0.0001, |
| "loss": 1.5377, |
| "step": 2873 |
| }, |
| { |
| "epoch": 0.9624916275954454, |
| "grad_norm": 0.47055432200431824, |
| "learning_rate": 0.0001, |
| "loss": 1.5066, |
| "step": 2874 |
| }, |
| { |
| "epoch": 0.9628265237776289, |
| "grad_norm": 0.4881734848022461, |
| "learning_rate": 0.0001, |
| "loss": 1.5661, |
| "step": 2875 |
| }, |
| { |
| "epoch": 0.9631614199598124, |
| "grad_norm": 0.48922911286354065, |
| "learning_rate": 0.0001, |
| "loss": 1.5827, |
| "step": 2876 |
| }, |
| { |
| "epoch": 0.9634963161419959, |
| "grad_norm": 0.4815993010997772, |
| "learning_rate": 0.0001, |
| "loss": 1.4694, |
| "step": 2877 |
| }, |
| { |
| "epoch": 0.9638312123241795, |
| "grad_norm": 0.5004373788833618, |
| "learning_rate": 0.0001, |
| "loss": 1.5051, |
| "step": 2878 |
| }, |
| { |
| "epoch": 0.964166108506363, |
| "grad_norm": 0.48256343603134155, |
| "learning_rate": 0.0001, |
| "loss": 1.4622, |
| "step": 2879 |
| }, |
| { |
| "epoch": 0.9645010046885466, |
| "grad_norm": 0.47723624110221863, |
| "learning_rate": 0.0001, |
| "loss": 1.5401, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.9648359008707301, |
| "grad_norm": 0.479966938495636, |
| "learning_rate": 0.0001, |
| "loss": 1.3779, |
| "step": 2881 |
| }, |
| { |
| "epoch": 0.9651707970529136, |
| "grad_norm": 0.47350406646728516, |
| "learning_rate": 0.0001, |
| "loss": 1.5343, |
| "step": 2882 |
| }, |
| { |
| "epoch": 0.9655056932350972, |
| "grad_norm": 0.4866943061351776, |
| "learning_rate": 0.0001, |
| "loss": 1.5302, |
| "step": 2883 |
| }, |
| { |
| "epoch": 0.9658405894172807, |
| "grad_norm": 0.4885713756084442, |
| "learning_rate": 0.0001, |
| "loss": 1.4867, |
| "step": 2884 |
| }, |
| { |
| "epoch": 0.9661754855994642, |
| "grad_norm": 0.4667793810367584, |
| "learning_rate": 0.0001, |
| "loss": 1.4078, |
| "step": 2885 |
| }, |
| { |
| "epoch": 0.9665103817816477, |
| "grad_norm": 0.4988701641559601, |
| "learning_rate": 0.0001, |
| "loss": 1.4917, |
| "step": 2886 |
| }, |
| { |
| "epoch": 0.9668452779638312, |
| "grad_norm": 0.47840380668640137, |
| "learning_rate": 0.0001, |
| "loss": 1.4133, |
| "step": 2887 |
| }, |
| { |
| "epoch": 0.9671801741460148, |
| "grad_norm": 0.5049902200698853, |
| "learning_rate": 0.0001, |
| "loss": 1.6271, |
| "step": 2888 |
| }, |
| { |
| "epoch": 0.9675150703281983, |
| "grad_norm": 0.4824509918689728, |
| "learning_rate": 0.0001, |
| "loss": 1.5561, |
| "step": 2889 |
| }, |
| { |
| "epoch": 0.9678499665103818, |
| "grad_norm": 0.47951459884643555, |
| "learning_rate": 0.0001, |
| "loss": 1.4888, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.9681848626925653, |
| "grad_norm": 0.4813043177127838, |
| "learning_rate": 0.0001, |
| "loss": 1.4919, |
| "step": 2891 |
| }, |
| { |
| "epoch": 0.9685197588747488, |
| "grad_norm": 0.45737335085868835, |
| "learning_rate": 0.0001, |
| "loss": 1.4508, |
| "step": 2892 |
| }, |
| { |
| "epoch": 0.9688546550569324, |
| "grad_norm": 0.4915395975112915, |
| "learning_rate": 0.0001, |
| "loss": 1.553, |
| "step": 2893 |
| }, |
| { |
| "epoch": 0.9691895512391159, |
| "grad_norm": 0.4985513687133789, |
| "learning_rate": 0.0001, |
| "loss": 1.4996, |
| "step": 2894 |
| }, |
| { |
| "epoch": 0.9695244474212994, |
| "grad_norm": 0.48302069306373596, |
| "learning_rate": 0.0001, |
| "loss": 1.49, |
| "step": 2895 |
| }, |
| { |
| "epoch": 0.9698593436034829, |
| "grad_norm": 0.46924352645874023, |
| "learning_rate": 0.0001, |
| "loss": 1.3829, |
| "step": 2896 |
| }, |
| { |
| "epoch": 0.9701942397856664, |
| "grad_norm": 0.48958954215049744, |
| "learning_rate": 0.0001, |
| "loss": 1.4775, |
| "step": 2897 |
| }, |
| { |
| "epoch": 0.97052913596785, |
| "grad_norm": 0.4941006600856781, |
| "learning_rate": 0.0001, |
| "loss": 1.5184, |
| "step": 2898 |
| }, |
| { |
| "epoch": 0.9708640321500335, |
| "grad_norm": 0.4678502678871155, |
| "learning_rate": 0.0001, |
| "loss": 1.5189, |
| "step": 2899 |
| }, |
| { |
| "epoch": 0.971198928332217, |
| "grad_norm": 0.48216712474823, |
| "learning_rate": 0.0001, |
| "loss": 1.5569, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.9715338245144005, |
| "grad_norm": 0.48112717270851135, |
| "learning_rate": 0.0001, |
| "loss": 1.4676, |
| "step": 2901 |
| }, |
| { |
| "epoch": 0.971868720696584, |
| "grad_norm": 0.48687508702278137, |
| "learning_rate": 0.0001, |
| "loss": 1.495, |
| "step": 2902 |
| }, |
| { |
| "epoch": 0.9722036168787676, |
| "grad_norm": 0.47197338938713074, |
| "learning_rate": 0.0001, |
| "loss": 1.4437, |
| "step": 2903 |
| }, |
| { |
| "epoch": 0.9725385130609511, |
| "grad_norm": 0.4881942868232727, |
| "learning_rate": 0.0001, |
| "loss": 1.5543, |
| "step": 2904 |
| }, |
| { |
| "epoch": 0.9728734092431346, |
| "grad_norm": 0.4662570059299469, |
| "learning_rate": 0.0001, |
| "loss": 1.4929, |
| "step": 2905 |
| }, |
| { |
| "epoch": 0.9732083054253181, |
| "grad_norm": 0.4897606074810028, |
| "learning_rate": 0.0001, |
| "loss": 1.5552, |
| "step": 2906 |
| }, |
| { |
| "epoch": 0.9735432016075016, |
| "grad_norm": 0.4804513454437256, |
| "learning_rate": 0.0001, |
| "loss": 1.5079, |
| "step": 2907 |
| }, |
| { |
| "epoch": 0.9738780977896851, |
| "grad_norm": 0.4756616950035095, |
| "learning_rate": 0.0001, |
| "loss": 1.5442, |
| "step": 2908 |
| }, |
| { |
| "epoch": 0.9742129939718687, |
| "grad_norm": 0.4789727032184601, |
| "learning_rate": 0.0001, |
| "loss": 1.5301, |
| "step": 2909 |
| }, |
| { |
| "epoch": 0.9745478901540523, |
| "grad_norm": 0.46526962518692017, |
| "learning_rate": 0.0001, |
| "loss": 1.5407, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.9748827863362358, |
| "grad_norm": 0.4653953015804291, |
| "learning_rate": 0.0001, |
| "loss": 1.4906, |
| "step": 2911 |
| }, |
| { |
| "epoch": 0.9752176825184193, |
| "grad_norm": 0.4752448499202728, |
| "learning_rate": 0.0001, |
| "loss": 1.552, |
| "step": 2912 |
| }, |
| { |
| "epoch": 0.9755525787006029, |
| "grad_norm": 0.4925309121608734, |
| "learning_rate": 0.0001, |
| "loss": 1.4738, |
| "step": 2913 |
| }, |
| { |
| "epoch": 0.9758874748827864, |
| "grad_norm": 0.4728720188140869, |
| "learning_rate": 0.0001, |
| "loss": 1.4793, |
| "step": 2914 |
| }, |
| { |
| "epoch": 0.9762223710649699, |
| "grad_norm": 0.4950384795665741, |
| "learning_rate": 0.0001, |
| "loss": 1.5126, |
| "step": 2915 |
| }, |
| { |
| "epoch": 0.9765572672471534, |
| "grad_norm": 0.4991387128829956, |
| "learning_rate": 0.0001, |
| "loss": 1.4999, |
| "step": 2916 |
| }, |
| { |
| "epoch": 0.9768921634293369, |
| "grad_norm": 0.46141117811203003, |
| "learning_rate": 0.0001, |
| "loss": 1.5335, |
| "step": 2917 |
| }, |
| { |
| "epoch": 0.9772270596115205, |
| "grad_norm": 0.4639141261577606, |
| "learning_rate": 0.0001, |
| "loss": 1.5581, |
| "step": 2918 |
| }, |
| { |
| "epoch": 0.977561955793704, |
| "grad_norm": 0.49240928888320923, |
| "learning_rate": 0.0001, |
| "loss": 1.4707, |
| "step": 2919 |
| }, |
| { |
| "epoch": 0.9778968519758875, |
| "grad_norm": 0.5489037036895752, |
| "learning_rate": 0.0001, |
| "loss": 1.5207, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.978231748158071, |
| "grad_norm": 0.5049098134040833, |
| "learning_rate": 0.0001, |
| "loss": 1.5358, |
| "step": 2921 |
| }, |
| { |
| "epoch": 0.9785666443402545, |
| "grad_norm": 0.4774382710456848, |
| "learning_rate": 0.0001, |
| "loss": 1.4936, |
| "step": 2922 |
| }, |
| { |
| "epoch": 0.978901540522438, |
| "grad_norm": 0.4593014419078827, |
| "learning_rate": 0.0001, |
| "loss": 1.4882, |
| "step": 2923 |
| }, |
| { |
| "epoch": 0.9792364367046216, |
| "grad_norm": 0.47530999779701233, |
| "learning_rate": 0.0001, |
| "loss": 1.5659, |
| "step": 2924 |
| }, |
| { |
| "epoch": 0.9795713328868051, |
| "grad_norm": 0.4622528553009033, |
| "learning_rate": 0.0001, |
| "loss": 1.4154, |
| "step": 2925 |
| }, |
| { |
| "epoch": 0.9799062290689886, |
| "grad_norm": 0.4709389805793762, |
| "learning_rate": 0.0001, |
| "loss": 1.4697, |
| "step": 2926 |
| }, |
| { |
| "epoch": 0.9802411252511721, |
| "grad_norm": 0.4578280746936798, |
| "learning_rate": 0.0001, |
| "loss": 1.4665, |
| "step": 2927 |
| }, |
| { |
| "epoch": 0.9805760214333556, |
| "grad_norm": 0.4711247384548187, |
| "learning_rate": 0.0001, |
| "loss": 1.5215, |
| "step": 2928 |
| }, |
| { |
| "epoch": 0.9809109176155392, |
| "grad_norm": 0.47621962428092957, |
| "learning_rate": 0.0001, |
| "loss": 1.5034, |
| "step": 2929 |
| }, |
| { |
| "epoch": 0.9812458137977227, |
| "grad_norm": 0.47477683424949646, |
| "learning_rate": 0.0001, |
| "loss": 1.4787, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.9815807099799062, |
| "grad_norm": 0.498028427362442, |
| "learning_rate": 0.0001, |
| "loss": 1.5613, |
| "step": 2931 |
| }, |
| { |
| "epoch": 0.9819156061620897, |
| "grad_norm": 0.47753721475601196, |
| "learning_rate": 0.0001, |
| "loss": 1.5403, |
| "step": 2932 |
| }, |
| { |
| "epoch": 0.9822505023442732, |
| "grad_norm": 0.46745017170906067, |
| "learning_rate": 0.0001, |
| "loss": 1.4648, |
| "step": 2933 |
| }, |
| { |
| "epoch": 0.9825853985264568, |
| "grad_norm": 0.46483781933784485, |
| "learning_rate": 0.0001, |
| "loss": 1.5435, |
| "step": 2934 |
| }, |
| { |
| "epoch": 0.9829202947086403, |
| "grad_norm": 0.46817606687545776, |
| "learning_rate": 0.0001, |
| "loss": 1.5385, |
| "step": 2935 |
| }, |
| { |
| "epoch": 0.9832551908908238, |
| "grad_norm": 0.5240177512168884, |
| "learning_rate": 0.0001, |
| "loss": 1.6504, |
| "step": 2936 |
| }, |
| { |
| "epoch": 0.9835900870730073, |
| "grad_norm": 0.45395007729530334, |
| "learning_rate": 0.0001, |
| "loss": 1.5389, |
| "step": 2937 |
| }, |
| { |
| "epoch": 0.9839249832551908, |
| "grad_norm": 0.5035748481750488, |
| "learning_rate": 0.0001, |
| "loss": 1.5081, |
| "step": 2938 |
| }, |
| { |
| "epoch": 0.9842598794373744, |
| "grad_norm": 0.47561246156692505, |
| "learning_rate": 0.0001, |
| "loss": 1.542, |
| "step": 2939 |
| }, |
| { |
| "epoch": 0.9845947756195579, |
| "grad_norm": 0.48551228642463684, |
| "learning_rate": 0.0001, |
| "loss": 1.5123, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.9849296718017415, |
| "grad_norm": 0.48080435395240784, |
| "learning_rate": 0.0001, |
| "loss": 1.5054, |
| "step": 2941 |
| }, |
| { |
| "epoch": 0.985264567983925, |
| "grad_norm": 0.4846731126308441, |
| "learning_rate": 0.0001, |
| "loss": 1.4762, |
| "step": 2942 |
| }, |
| { |
| "epoch": 0.9855994641661086, |
| "grad_norm": 0.5035936236381531, |
| "learning_rate": 0.0001, |
| "loss": 1.5832, |
| "step": 2943 |
| }, |
| { |
| "epoch": 0.9859343603482921, |
| "grad_norm": 0.4639035165309906, |
| "learning_rate": 0.0001, |
| "loss": 1.5488, |
| "step": 2944 |
| }, |
| { |
| "epoch": 0.9862692565304756, |
| "grad_norm": 0.4746101200580597, |
| "learning_rate": 0.0001, |
| "loss": 1.5195, |
| "step": 2945 |
| }, |
| { |
| "epoch": 0.9866041527126591, |
| "grad_norm": 0.48923352360725403, |
| "learning_rate": 0.0001, |
| "loss": 1.4401, |
| "step": 2946 |
| }, |
| { |
| "epoch": 0.9869390488948426, |
| "grad_norm": 0.5042362213134766, |
| "learning_rate": 0.0001, |
| "loss": 1.5081, |
| "step": 2947 |
| }, |
| { |
| "epoch": 0.9872739450770261, |
| "grad_norm": 0.48975637555122375, |
| "learning_rate": 0.0001, |
| "loss": 1.5775, |
| "step": 2948 |
| }, |
| { |
| "epoch": 0.9876088412592097, |
| "grad_norm": 0.4755106568336487, |
| "learning_rate": 0.0001, |
| "loss": 1.505, |
| "step": 2949 |
| }, |
| { |
| "epoch": 0.9879437374413932, |
| "grad_norm": 0.47388800978660583, |
| "learning_rate": 0.0001, |
| "loss": 1.5376, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.9882786336235767, |
| "grad_norm": 0.4691302180290222, |
| "learning_rate": 0.0001, |
| "loss": 1.4887, |
| "step": 2951 |
| }, |
| { |
| "epoch": 0.9886135298057602, |
| "grad_norm": 0.48980018496513367, |
| "learning_rate": 0.0001, |
| "loss": 1.4638, |
| "step": 2952 |
| }, |
| { |
| "epoch": 0.9889484259879437, |
| "grad_norm": 0.4859633147716522, |
| "learning_rate": 0.0001, |
| "loss": 1.4893, |
| "step": 2953 |
| }, |
| { |
| "epoch": 0.9892833221701273, |
| "grad_norm": 0.4973372220993042, |
| "learning_rate": 0.0001, |
| "loss": 1.5081, |
| "step": 2954 |
| }, |
| { |
| "epoch": 0.9896182183523108, |
| "grad_norm": 0.48805660009384155, |
| "learning_rate": 0.0001, |
| "loss": 1.484, |
| "step": 2955 |
| }, |
| { |
| "epoch": 0.9899531145344943, |
| "grad_norm": 0.4861151874065399, |
| "learning_rate": 0.0001, |
| "loss": 1.4613, |
| "step": 2956 |
| }, |
| { |
| "epoch": 0.9902880107166778, |
| "grad_norm": 0.47580957412719727, |
| "learning_rate": 0.0001, |
| "loss": 1.6285, |
| "step": 2957 |
| }, |
| { |
| "epoch": 0.9906229068988613, |
| "grad_norm": 0.46954604983329773, |
| "learning_rate": 0.0001, |
| "loss": 1.5096, |
| "step": 2958 |
| }, |
| { |
| "epoch": 0.9909578030810449, |
| "grad_norm": 0.48852768540382385, |
| "learning_rate": 0.0001, |
| "loss": 1.5095, |
| "step": 2959 |
| }, |
| { |
| "epoch": 0.9912926992632284, |
| "grad_norm": 0.47819429636001587, |
| "learning_rate": 0.0001, |
| "loss": 1.4737, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.9916275954454119, |
| "grad_norm": 0.4803783893585205, |
| "learning_rate": 0.0001, |
| "loss": 1.4867, |
| "step": 2961 |
| }, |
| { |
| "epoch": 0.9919624916275954, |
| "grad_norm": 0.5112715363502502, |
| "learning_rate": 0.0001, |
| "loss": 1.5335, |
| "step": 2962 |
| }, |
| { |
| "epoch": 0.9922973878097789, |
| "grad_norm": 0.47857457399368286, |
| "learning_rate": 0.0001, |
| "loss": 1.5255, |
| "step": 2963 |
| }, |
| { |
| "epoch": 0.9926322839919625, |
| "grad_norm": 0.48473525047302246, |
| "learning_rate": 0.0001, |
| "loss": 1.4301, |
| "step": 2964 |
| }, |
| { |
| "epoch": 0.992967180174146, |
| "grad_norm": 0.46047571301460266, |
| "learning_rate": 0.0001, |
| "loss": 1.4681, |
| "step": 2965 |
| }, |
| { |
| "epoch": 0.9933020763563295, |
| "grad_norm": 0.48862722516059875, |
| "learning_rate": 0.0001, |
| "loss": 1.5713, |
| "step": 2966 |
| }, |
| { |
| "epoch": 0.993636972538513, |
| "grad_norm": 0.49070534110069275, |
| "learning_rate": 0.0001, |
| "loss": 1.4359, |
| "step": 2967 |
| }, |
| { |
| "epoch": 0.9939718687206965, |
| "grad_norm": 0.46229931712150574, |
| "learning_rate": 0.0001, |
| "loss": 1.3731, |
| "step": 2968 |
| }, |
| { |
| "epoch": 0.9943067649028801, |
| "grad_norm": 0.4823308289051056, |
| "learning_rate": 0.0001, |
| "loss": 1.4629, |
| "step": 2969 |
| }, |
| { |
| "epoch": 0.9946416610850636, |
| "grad_norm": 0.4691035747528076, |
| "learning_rate": 0.0001, |
| "loss": 1.5538, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.9949765572672472, |
| "grad_norm": 0.4909954071044922, |
| "learning_rate": 0.0001, |
| "loss": 1.4868, |
| "step": 2971 |
| }, |
| { |
| "epoch": 0.9953114534494307, |
| "grad_norm": 0.49325498938560486, |
| "learning_rate": 0.0001, |
| "loss": 1.5534, |
| "step": 2972 |
| }, |
| { |
| "epoch": 0.9956463496316142, |
| "grad_norm": 0.47066375613212585, |
| "learning_rate": 0.0001, |
| "loss": 1.4604, |
| "step": 2973 |
| }, |
| { |
| "epoch": 0.9959812458137978, |
| "grad_norm": 0.5018871426582336, |
| "learning_rate": 0.0001, |
| "loss": 1.5454, |
| "step": 2974 |
| }, |
| { |
| "epoch": 0.9963161419959813, |
| "grad_norm": 0.46382683515548706, |
| "learning_rate": 0.0001, |
| "loss": 1.4127, |
| "step": 2975 |
| }, |
| { |
| "epoch": 0.9966510381781648, |
| "grad_norm": 0.49722546339035034, |
| "learning_rate": 0.0001, |
| "loss": 1.5617, |
| "step": 2976 |
| }, |
| { |
| "epoch": 0.9969859343603483, |
| "grad_norm": 0.4691721498966217, |
| "learning_rate": 0.0001, |
| "loss": 1.5258, |
| "step": 2977 |
| }, |
| { |
| "epoch": 0.9973208305425318, |
| "grad_norm": 0.48319104313850403, |
| "learning_rate": 0.0001, |
| "loss": 1.5848, |
| "step": 2978 |
| }, |
| { |
| "epoch": 0.9976557267247154, |
| "grad_norm": 0.48205214738845825, |
| "learning_rate": 0.0001, |
| "loss": 1.4981, |
| "step": 2979 |
| }, |
| { |
| "epoch": 0.9979906229068989, |
| "grad_norm": 0.4730733036994934, |
| "learning_rate": 0.0001, |
| "loss": 1.5446, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.9983255190890824, |
| "grad_norm": 0.4865299165248871, |
| "learning_rate": 0.0001, |
| "loss": 1.4452, |
| "step": 2981 |
| }, |
| { |
| "epoch": 0.9986604152712659, |
| "grad_norm": 0.4808069169521332, |
| "learning_rate": 0.0001, |
| "loss": 1.4897, |
| "step": 2982 |
| }, |
| { |
| "epoch": 0.9989953114534494, |
| "grad_norm": 0.504528284072876, |
| "learning_rate": 0.0001, |
| "loss": 1.4669, |
| "step": 2983 |
| }, |
| { |
| "epoch": 0.999330207635633, |
| "grad_norm": 0.4789443910121918, |
| "learning_rate": 0.0001, |
| "loss": 1.4834, |
| "step": 2984 |
| }, |
| { |
| "epoch": 0.9996651038178165, |
| "grad_norm": 0.481677770614624, |
| "learning_rate": 0.0001, |
| "loss": 1.5099, |
| "step": 2985 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.45984122157096863, |
| "learning_rate": 0.0001, |
| "loss": 1.4532, |
| "step": 2986 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 2986, |
| "total_flos": 1.6950937651777634e+19, |
| "train_loss": 1.548020068463749, |
| "train_runtime": 38071.8732, |
| "train_samples_per_second": 0.627, |
| "train_steps_per_second": 0.078 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2986, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 300, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.6950937651777634e+19, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|