| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 1029, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.00583941605839416, |
| "grad_norm": 1.9921875, |
| "learning_rate": 1.9230769230769234e-07, |
| "loss": 2.052279233932495, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01167883211678832, |
| "grad_norm": 0.71875, |
| "learning_rate": 5.76923076923077e-07, |
| "loss": 1.9450315237045288, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.017518248175182483, |
| "grad_norm": 0.97265625, |
| "learning_rate": 9.615384615384617e-07, |
| "loss": 2.0441408157348633, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.02335766423357664, |
| "grad_norm": 1.390625, |
| "learning_rate": 1.3461538461538462e-06, |
| "loss": 1.9473555088043213, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.029197080291970802, |
| "grad_norm": 0.6875, |
| "learning_rate": 1.7307692307692308e-06, |
| "loss": 1.9234904050827026, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.035036496350364967, |
| "grad_norm": 1.234375, |
| "learning_rate": 2.1153846153846155e-06, |
| "loss": 2.0447089672088623, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.040875912408759124, |
| "grad_norm": 1.171875, |
| "learning_rate": 2.5e-06, |
| "loss": 1.8234096765518188, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.04671532846715328, |
| "grad_norm": 0.51953125, |
| "learning_rate": 2.8846153846153845e-06, |
| "loss": 1.8641804456710815, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.052554744525547446, |
| "grad_norm": 0.6640625, |
| "learning_rate": 3.2692307692307696e-06, |
| "loss": 1.9037718772888184, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.058394160583941604, |
| "grad_norm": 0.7734375, |
| "learning_rate": 3.653846153846154e-06, |
| "loss": 1.9216861724853516, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06423357664233577, |
| "grad_norm": 1.3125, |
| "learning_rate": 4.0384615384615385e-06, |
| "loss": 1.9886118173599243, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.07007299270072993, |
| "grad_norm": 1.015625, |
| "learning_rate": 4.423076923076924e-06, |
| "loss": 1.7973345518112183, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07591240875912408, |
| "grad_norm": 0.53515625, |
| "learning_rate": 4.807692307692308e-06, |
| "loss": 1.8787648677825928, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.08175182481751825, |
| "grad_norm": 0.765625, |
| "learning_rate": 5.192307692307693e-06, |
| "loss": 1.8880407810211182, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08759124087591241, |
| "grad_norm": 0.5, |
| "learning_rate": 5.576923076923077e-06, |
| "loss": 1.8372446298599243, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09343065693430656, |
| "grad_norm": 0.65625, |
| "learning_rate": 5.961538461538462e-06, |
| "loss": 1.8118259906768799, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09927007299270073, |
| "grad_norm": 1.09375, |
| "learning_rate": 6.3461538461538466e-06, |
| "loss": 1.872291922569275, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.10510948905109489, |
| "grad_norm": 1.125, |
| "learning_rate": 6.730769230769232e-06, |
| "loss": 1.9201974868774414, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.11094890510948906, |
| "grad_norm": 0.6875, |
| "learning_rate": 7.115384615384616e-06, |
| "loss": 1.821286678314209, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.11678832116788321, |
| "grad_norm": 0.361328125, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 1.6810513734817505, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12262773722627737, |
| "grad_norm": 0.50390625, |
| "learning_rate": 7.884615384615384e-06, |
| "loss": 1.7490516901016235, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.12846715328467154, |
| "grad_norm": 0.400390625, |
| "learning_rate": 8.26923076923077e-06, |
| "loss": 1.6871154308319092, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.1343065693430657, |
| "grad_norm": 0.9765625, |
| "learning_rate": 8.653846153846155e-06, |
| "loss": 1.8664501905441284, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.14014598540145987, |
| "grad_norm": 0.57421875, |
| "learning_rate": 9.03846153846154e-06, |
| "loss": 1.76151442527771, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.145985401459854, |
| "grad_norm": 0.6953125, |
| "learning_rate": 9.423076923076923e-06, |
| "loss": 1.7157841920852661, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.15182481751824817, |
| "grad_norm": 0.474609375, |
| "learning_rate": 9.807692307692308e-06, |
| "loss": 1.7865409851074219, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.15766423357664233, |
| "grad_norm": 0.43359375, |
| "learning_rate": 9.999976735551495e-06, |
| "loss": 1.7557110786437988, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.1635036496350365, |
| "grad_norm": 0.359375, |
| "learning_rate": 9.999790621406734e-06, |
| "loss": 1.6408637762069702, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.16934306569343066, |
| "grad_norm": 0.62890625, |
| "learning_rate": 9.999418400814693e-06, |
| "loss": 1.7107805013656616, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.17518248175182483, |
| "grad_norm": 11.9375, |
| "learning_rate": 9.998860089170008e-06, |
| "loss": 1.6930475234985352, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.181021897810219, |
| "grad_norm": 0.90625, |
| "learning_rate": 9.998115709563843e-06, |
| "loss": 1.8085753917694092, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.18686131386861313, |
| "grad_norm": 0.54296875, |
| "learning_rate": 9.997185292782932e-06, |
| "loss": 1.6834930181503296, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.1927007299270073, |
| "grad_norm": 0.80859375, |
| "learning_rate": 9.996068877308297e-06, |
| "loss": 1.5944346189498901, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.19854014598540146, |
| "grad_norm": 0.80859375, |
| "learning_rate": 9.994766509313667e-06, |
| "loss": 1.7082901000976562, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.20437956204379562, |
| "grad_norm": 0.3671875, |
| "learning_rate": 9.993278242663572e-06, |
| "loss": 1.6360015869140625, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.21021897810218979, |
| "grad_norm": 0.419921875, |
| "learning_rate": 9.9916041389111e-06, |
| "loss": 1.7114381790161133, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.21605839416058395, |
| "grad_norm": 0.4296875, |
| "learning_rate": 9.989744267295359e-06, |
| "loss": 1.5103245973587036, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.22189781021897811, |
| "grad_norm": 0.46875, |
| "learning_rate": 9.987698704738617e-06, |
| "loss": 1.4727965593338013, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.22773722627737225, |
| "grad_norm": 0.71875, |
| "learning_rate": 9.985467535843121e-06, |
| "loss": 1.6391849517822266, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.23357664233576642, |
| "grad_norm": 3.625, |
| "learning_rate": 9.983050852887587e-06, |
| "loss": 1.7767753601074219, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.23941605839416058, |
| "grad_norm": 0.57421875, |
| "learning_rate": 9.9804487558234e-06, |
| "loss": 1.6842796802520752, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.24525547445255474, |
| "grad_norm": 0.8515625, |
| "learning_rate": 9.977661352270457e-06, |
| "loss": 1.6517852544784546, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.2510948905109489, |
| "grad_norm": 0.72265625, |
| "learning_rate": 9.974688757512742e-06, |
| "loss": 1.501247763633728, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.2569343065693431, |
| "grad_norm": 0.462890625, |
| "learning_rate": 9.971531094493541e-06, |
| "loss": 1.5306718349456787, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.26277372262773724, |
| "grad_norm": 0.3671875, |
| "learning_rate": 9.968188493810365e-06, |
| "loss": 1.6086313724517822, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2686131386861314, |
| "grad_norm": 0.59375, |
| "learning_rate": 9.964661093709545e-06, |
| "loss": 1.5198537111282349, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.27445255474452557, |
| "grad_norm": 0.9375, |
| "learning_rate": 9.960949040080507e-06, |
| "loss": 1.5648375749588013, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.28029197080291973, |
| "grad_norm": 0.5390625, |
| "learning_rate": 9.95705248644976e-06, |
| "loss": 1.6112207174301147, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.28613138686131384, |
| "grad_norm": 0.38671875, |
| "learning_rate": 9.95297159397452e-06, |
| "loss": 1.575645923614502, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.291970802919708, |
| "grad_norm": 0.455078125, |
| "learning_rate": 9.948706531436067e-06, |
| "loss": 1.4316264390945435, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.29781021897810217, |
| "grad_norm": 0.365234375, |
| "learning_rate": 9.944257475232746e-06, |
| "loss": 1.4882372617721558, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.30364963503649633, |
| "grad_norm": 0.392578125, |
| "learning_rate": 9.939624609372686e-06, |
| "loss": 1.4284008741378784, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.3094890510948905, |
| "grad_norm": 0.43359375, |
| "learning_rate": 9.934808125466181e-06, |
| "loss": 1.468398928642273, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.31532846715328466, |
| "grad_norm": 0.37109375, |
| "learning_rate": 9.929808222717769e-06, |
| "loss": 1.465457797050476, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.32116788321167883, |
| "grad_norm": 0.359375, |
| "learning_rate": 9.924625107917991e-06, |
| "loss": 1.425096035003662, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.327007299270073, |
| "grad_norm": 0.337890625, |
| "learning_rate": 9.919258995434838e-06, |
| "loss": 1.4254180192947388, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.33284671532846716, |
| "grad_norm": 0.349609375, |
| "learning_rate": 9.913710107204888e-06, |
| "loss": 1.4670501947402954, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.3386861313868613, |
| "grad_norm": 0.41796875, |
| "learning_rate": 9.907978672724125e-06, |
| "loss": 1.549682378768921, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.3445255474452555, |
| "grad_norm": 0.322265625, |
| "learning_rate": 9.902064929038447e-06, |
| "loss": 1.448873519897461, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.35036496350364965, |
| "grad_norm": 0.40625, |
| "learning_rate": 9.89596912073386e-06, |
| "loss": 1.4342598915100098, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3562043795620438, |
| "grad_norm": 0.396484375, |
| "learning_rate": 9.889691499926372e-06, |
| "loss": 1.424146294593811, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.362043795620438, |
| "grad_norm": 0.337890625, |
| "learning_rate": 9.883232326251553e-06, |
| "loss": 1.3757034540176392, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.3678832116788321, |
| "grad_norm": 0.7421875, |
| "learning_rate": 9.876591866853798e-06, |
| "loss": 1.5297839641571045, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.37372262773722625, |
| "grad_norm": 0.443359375, |
| "learning_rate": 9.869770396375291e-06, |
| "loss": 1.4363529682159424, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.3795620437956204, |
| "grad_norm": 0.515625, |
| "learning_rate": 9.862768196944636e-06, |
| "loss": 1.5066875219345093, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.3854014598540146, |
| "grad_norm": 0.609375, |
| "learning_rate": 9.855585558165181e-06, |
| "loss": 1.4090046882629395, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.39124087591240875, |
| "grad_norm": 0.46875, |
| "learning_rate": 9.848222777103063e-06, |
| "loss": 1.5839899778366089, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.3970802919708029, |
| "grad_norm": 0.7421875, |
| "learning_rate": 9.840680158274895e-06, |
| "loss": 1.425782561302185, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.4029197080291971, |
| "grad_norm": 0.376953125, |
| "learning_rate": 9.832958013635195e-06, |
| "loss": 1.3894191980361938, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.40875912408759124, |
| "grad_norm": 0.322265625, |
| "learning_rate": 9.825056662563464e-06, |
| "loss": 1.5015205144882202, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.4145985401459854, |
| "grad_norm": 1.234375, |
| "learning_rate": 9.816976431850994e-06, |
| "loss": 1.3953148126602173, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.42043795620437957, |
| "grad_norm": 0.66015625, |
| "learning_rate": 9.808717655687344e-06, |
| "loss": 1.3676221370697021, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.42627737226277373, |
| "grad_norm": 1.046875, |
| "learning_rate": 9.800280675646508e-06, |
| "loss": 1.4441938400268555, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.4321167883211679, |
| "grad_norm": 0.609375, |
| "learning_rate": 9.791665840672813e-06, |
| "loss": 1.3372529745101929, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.43795620437956206, |
| "grad_norm": 0.4765625, |
| "learning_rate": 9.782873507066466e-06, |
| "loss": 1.3514975309371948, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.44379562043795623, |
| "grad_norm": 0.515625, |
| "learning_rate": 9.773904038468822e-06, |
| "loss": 1.3599399328231812, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.44963503649635034, |
| "grad_norm": 0.5546875, |
| "learning_rate": 9.764757805847352e-06, |
| "loss": 1.4376461505889893, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.4554744525547445, |
| "grad_norm": 0.443359375, |
| "learning_rate": 9.755435187480288e-06, |
| "loss": 1.4733713865280151, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.46131386861313867, |
| "grad_norm": 0.51171875, |
| "learning_rate": 9.745936568940992e-06, |
| "loss": 1.4277862310409546, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.46715328467153283, |
| "grad_norm": 0.8046875, |
| "learning_rate": 9.736262343081998e-06, |
| "loss": 1.4021371603012085, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.472992700729927, |
| "grad_norm": 0.63671875, |
| "learning_rate": 9.726412910018768e-06, |
| "loss": 1.4141305685043335, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.47883211678832116, |
| "grad_norm": 0.53125, |
| "learning_rate": 9.716388677113145e-06, |
| "loss": 1.3931472301483154, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.4846715328467153, |
| "grad_norm": 0.38671875, |
| "learning_rate": 9.7061900589565e-06, |
| "loss": 1.3798588514328003, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.4905109489051095, |
| "grad_norm": 0.34375, |
| "learning_rate": 9.695817477352593e-06, |
| "loss": 1.3861608505249023, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.49635036496350365, |
| "grad_norm": 0.4453125, |
| "learning_rate": 9.685271361300117e-06, |
| "loss": 1.4044109582901, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.5021897810218978, |
| "grad_norm": 0.345703125, |
| "learning_rate": 9.67455214697497e-06, |
| "loss": 1.392524003982544, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.5080291970802919, |
| "grad_norm": 0.73046875, |
| "learning_rate": 9.663660277712194e-06, |
| "loss": 1.4445301294326782, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.5138686131386861, |
| "grad_norm": 0.4453125, |
| "learning_rate": 9.652596203987667e-06, |
| "loss": 1.47966468334198, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.5197080291970803, |
| "grad_norm": 0.431640625, |
| "learning_rate": 9.64136038339944e-06, |
| "loss": 1.392290711402893, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.5255474452554745, |
| "grad_norm": 0.390625, |
| "learning_rate": 9.62995328064884e-06, |
| "loss": 1.5381602048873901, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.5313868613138686, |
| "grad_norm": 0.36328125, |
| "learning_rate": 9.618375367521232e-06, |
| "loss": 1.3802764415740967, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.5372262773722628, |
| "grad_norm": 0.453125, |
| "learning_rate": 9.606627122866513e-06, |
| "loss": 1.4806022644042969, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.5430656934306569, |
| "grad_norm": 0.546875, |
| "learning_rate": 9.594709032579307e-06, |
| "loss": 1.4255928993225098, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5489051094890511, |
| "grad_norm": 0.73828125, |
| "learning_rate": 9.582621589578873e-06, |
| "loss": 1.336686372756958, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.5547445255474452, |
| "grad_norm": 0.51171875, |
| "learning_rate": 9.570365293788707e-06, |
| "loss": 1.4040147066116333, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.5605839416058395, |
| "grad_norm": 0.337890625, |
| "learning_rate": 9.557940652115875e-06, |
| "loss": 1.416895866394043, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.5664233576642336, |
| "grad_norm": 0.455078125, |
| "learning_rate": 9.545348178430051e-06, |
| "loss": 1.3130196332931519, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.5722627737226277, |
| "grad_norm": 0.3671875, |
| "learning_rate": 9.53258839354225e-06, |
| "loss": 1.3405402898788452, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.5781021897810219, |
| "grad_norm": 0.337890625, |
| "learning_rate": 9.519661825183303e-06, |
| "loss": 1.4502712488174438, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.583941605839416, |
| "grad_norm": 0.9765625, |
| "learning_rate": 9.506569007982023e-06, |
| "loss": 1.4316283464431763, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.5897810218978102, |
| "grad_norm": 1.0859375, |
| "learning_rate": 9.493310483443089e-06, |
| "loss": 1.4556365013122559, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.5956204379562043, |
| "grad_norm": 0.376953125, |
| "learning_rate": 9.479886799924663e-06, |
| "loss": 1.413832426071167, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.6014598540145986, |
| "grad_norm": 0.56640625, |
| "learning_rate": 9.466298512615697e-06, |
| "loss": 1.252786636352539, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.6072992700729927, |
| "grad_norm": 0.4765625, |
| "learning_rate": 9.45254618351298e-06, |
| "loss": 1.369625210762024, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.6131386861313869, |
| "grad_norm": 1.40625, |
| "learning_rate": 9.43863038139789e-06, |
| "loss": 1.3296895027160645, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.618978102189781, |
| "grad_norm": 0.388671875, |
| "learning_rate": 9.424551681812871e-06, |
| "loss": 1.4407665729522705, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.6248175182481752, |
| "grad_norm": 0.400390625, |
| "learning_rate": 9.41031066703763e-06, |
| "loss": 1.363607406616211, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.6306569343065693, |
| "grad_norm": 0.6953125, |
| "learning_rate": 9.395907926065052e-06, |
| "loss": 1.4068963527679443, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.6364963503649635, |
| "grad_norm": 0.484375, |
| "learning_rate": 9.381344054576845e-06, |
| "loss": 1.4279429912567139, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.6423357664233577, |
| "grad_norm": 0.4375, |
| "learning_rate": 9.366619654918895e-06, |
| "loss": 1.4228907823562622, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6481751824817519, |
| "grad_norm": 0.330078125, |
| "learning_rate": 9.351735336076363e-06, |
| "loss": 1.4166944026947021, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.654014598540146, |
| "grad_norm": 0.6171875, |
| "learning_rate": 9.33669171364849e-06, |
| "loss": 1.438708782196045, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.6598540145985401, |
| "grad_norm": 0.59375, |
| "learning_rate": 9.32148940982315e-06, |
| "loss": 1.3473215103149414, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.6656934306569343, |
| "grad_norm": 0.326171875, |
| "learning_rate": 9.306129053351087e-06, |
| "loss": 1.3401542901992798, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.6715328467153284, |
| "grad_norm": 0.59765625, |
| "learning_rate": 9.290611279519952e-06, |
| "loss": 1.4278738498687744, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.6773722627737226, |
| "grad_norm": 0.47265625, |
| "learning_rate": 9.274936730127991e-06, |
| "loss": 1.4194695949554443, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.6832116788321168, |
| "grad_norm": 0.59375, |
| "learning_rate": 9.259106053457529e-06, |
| "loss": 1.4373456239700317, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.689051094890511, |
| "grad_norm": 0.51171875, |
| "learning_rate": 9.243119904248136e-06, |
| "loss": 1.3994792699813843, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.6948905109489051, |
| "grad_norm": 0.36328125, |
| "learning_rate": 9.226978943669562e-06, |
| "loss": 1.3777506351470947, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.7007299270072993, |
| "grad_norm": 0.37890625, |
| "learning_rate": 9.210683839294386e-06, |
| "loss": 1.3514622449874878, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.7065693430656934, |
| "grad_norm": 0.5390625, |
| "learning_rate": 9.194235265070406e-06, |
| "loss": 1.4253668785095215, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.7124087591240876, |
| "grad_norm": 0.470703125, |
| "learning_rate": 9.177633901292767e-06, |
| "loss": 1.3638951778411865, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.7182481751824817, |
| "grad_norm": 0.310546875, |
| "learning_rate": 9.160880434575823e-06, |
| "loss": 1.3718801736831665, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.724087591240876, |
| "grad_norm": 0.46484375, |
| "learning_rate": 9.143975557824743e-06, |
| "loss": 1.3095505237579346, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.7299270072992701, |
| "grad_norm": 0.56640625, |
| "learning_rate": 9.126919970206843e-06, |
| "loss": 1.3069571256637573, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.7357664233576642, |
| "grad_norm": 0.27734375, |
| "learning_rate": 9.109714377122685e-06, |
| "loss": 1.341015100479126, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.7416058394160584, |
| "grad_norm": 0.5859375, |
| "learning_rate": 9.092359490176892e-06, |
| "loss": 1.3410066366195679, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.7474452554744525, |
| "grad_norm": 0.396484375, |
| "learning_rate": 9.074856027148715e-06, |
| "loss": 1.265282154083252, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.7532846715328467, |
| "grad_norm": 0.349609375, |
| "learning_rate": 9.057204711962352e-06, |
| "loss": 1.2000367641448975, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.7591240875912408, |
| "grad_norm": 0.50390625, |
| "learning_rate": 9.03940627465701e-06, |
| "loss": 1.4280476570129395, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.7649635036496351, |
| "grad_norm": 0.51171875, |
| "learning_rate": 9.021461451356695e-06, |
| "loss": 1.3344112634658813, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.7708029197080292, |
| "grad_norm": 0.859375, |
| "learning_rate": 9.003370984239795e-06, |
| "loss": 1.4776599407196045, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.7766423357664234, |
| "grad_norm": 0.6328125, |
| "learning_rate": 8.985135621508355e-06, |
| "loss": 1.4331424236297607, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.7824817518248175, |
| "grad_norm": 0.314453125, |
| "learning_rate": 8.966756117357152e-06, |
| "loss": 1.3084166049957275, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.7883211678832117, |
| "grad_norm": 2.640625, |
| "learning_rate": 8.948233231942489e-06, |
| "loss": 1.3645600080490112, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.7941605839416058, |
| "grad_norm": 0.703125, |
| "learning_rate": 8.929567731350767e-06, |
| "loss": 1.3116084337234497, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.65625, |
| "learning_rate": 8.9107603875668e-06, |
| "loss": 1.363153338432312, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.8058394160583942, |
| "grad_norm": 0.4140625, |
| "learning_rate": 8.891811978441871e-06, |
| "loss": 1.3285009860992432, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.8116788321167884, |
| "grad_norm": 0.40234375, |
| "learning_rate": 8.87272328766158e-06, |
| "loss": 1.4433071613311768, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.8175182481751825, |
| "grad_norm": 1.09375, |
| "learning_rate": 8.85349510471342e-06, |
| "loss": 1.4583128690719604, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.8233576642335766, |
| "grad_norm": 0.298828125, |
| "learning_rate": 8.834128224854133e-06, |
| "loss": 1.3218973875045776, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.8291970802919708, |
| "grad_norm": 0.97265625, |
| "learning_rate": 8.814623449076809e-06, |
| "loss": 1.291555404663086, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.8350364963503649, |
| "grad_norm": 0.337890625, |
| "learning_rate": 8.794981584077767e-06, |
| "loss": 1.3497284650802612, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.8408759124087591, |
| "grad_norm": 0.322265625, |
| "learning_rate": 8.775203442223186e-06, |
| "loss": 1.2834782600402832, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.8467153284671532, |
| "grad_norm": 0.35546875, |
| "learning_rate": 8.755289841515507e-06, |
| "loss": 1.3291265964508057, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.8525547445255475, |
| "grad_norm": 0.373046875, |
| "learning_rate": 8.735241605559603e-06, |
| "loss": 1.3475388288497925, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.8583941605839416, |
| "grad_norm": 0.349609375, |
| "learning_rate": 8.715059563528713e-06, |
| "loss": 1.37130606174469, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.8642335766423358, |
| "grad_norm": 0.443359375, |
| "learning_rate": 8.694744550130153e-06, |
| "loss": 1.4104875326156616, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.8700729927007299, |
| "grad_norm": 1.1875, |
| "learning_rate": 8.674297405570782e-06, |
| "loss": 1.3223466873168945, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.8759124087591241, |
| "grad_norm": 0.52734375, |
| "learning_rate": 8.653718975522271e-06, |
| "loss": 1.3034943342208862, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.8817518248175182, |
| "grad_norm": 0.46875, |
| "learning_rate": 8.63301011108611e-06, |
| "loss": 1.4011861085891724, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.8875912408759125, |
| "grad_norm": 0.7578125, |
| "learning_rate": 8.612171668758413e-06, |
| "loss": 1.4873169660568237, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.8934306569343066, |
| "grad_norm": 0.4296875, |
| "learning_rate": 8.591204510394498e-06, |
| "loss": 1.2987056970596313, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.8992700729927007, |
| "grad_norm": 0.61328125, |
| "learning_rate": 8.570109503173233e-06, |
| "loss": 1.3402009010314941, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.9051094890510949, |
| "grad_norm": 0.5859375, |
| "learning_rate": 8.548887519561184e-06, |
| "loss": 1.2947343587875366, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.910948905109489, |
| "grad_norm": 0.89453125, |
| "learning_rate": 8.527539437276515e-06, |
| "loss": 1.3383480310440063, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.9167883211678832, |
| "grad_norm": 0.72265625, |
| "learning_rate": 8.506066139252696e-06, |
| "loss": 1.3967725038528442, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.9226277372262773, |
| "grad_norm": 0.3828125, |
| "learning_rate": 8.484468513601987e-06, |
| "loss": 1.2097219228744507, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.9284671532846716, |
| "grad_norm": 1.546875, |
| "learning_rate": 8.462747453578698e-06, |
| "loss": 1.3612643480300903, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.9343065693430657, |
| "grad_norm": 0.578125, |
| "learning_rate": 8.440903857542253e-06, |
| "loss": 1.3607382774353027, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.9401459854014599, |
| "grad_norm": 0.384765625, |
| "learning_rate": 8.418938628920038e-06, |
| "loss": 1.276464819908142, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.945985401459854, |
| "grad_norm": 0.5390625, |
| "learning_rate": 8.396852676170018e-06, |
| "loss": 1.3733280897140503, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.9518248175182482, |
| "grad_norm": 0.6328125, |
| "learning_rate": 8.374646912743187e-06, |
| "loss": 1.365743637084961, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.9576642335766423, |
| "grad_norm": 0.373046875, |
| "learning_rate": 8.352322257045775e-06, |
| "loss": 1.3169546127319336, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.9635036496350365, |
| "grad_norm": 0.49609375, |
| "learning_rate": 8.329879632401265e-06, |
| "loss": 1.3615431785583496, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.9693430656934306, |
| "grad_norm": 0.341796875, |
| "learning_rate": 8.307319967012211e-06, |
| "loss": 1.3420912027359009, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.9751824817518249, |
| "grad_norm": 0.8203125, |
| "learning_rate": 8.284644193921848e-06, |
| "loss": 1.28525710105896, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.981021897810219, |
| "grad_norm": 0.48046875, |
| "learning_rate": 8.261853250975487e-06, |
| "loss": 1.3461776971817017, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.9868613138686131, |
| "grad_norm": 0.546875, |
| "learning_rate": 8.238948080781752e-06, |
| "loss": 1.3771872520446777, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.9927007299270073, |
| "grad_norm": 0.71484375, |
| "learning_rate": 8.215929630673573e-06, |
| "loss": 1.1941386461257935, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.9985401459854014, |
| "grad_norm": 0.3359375, |
| "learning_rate": 8.192798852669016e-06, |
| "loss": 1.3167439699172974, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.0029197080291972, |
| "grad_norm": 0.291015625, |
| "learning_rate": 8.169556703431916e-06, |
| "loss": 1.196553111076355, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.0087591240875913, |
| "grad_norm": 0.376953125, |
| "learning_rate": 8.146204144232285e-06, |
| "loss": 1.3300058841705322, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.0145985401459854, |
| "grad_norm": 0.388671875, |
| "learning_rate": 8.122742140906585e-06, |
| "loss": 1.3176811933517456, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.0204379562043795, |
| "grad_norm": 0.486328125, |
| "learning_rate": 8.099171663817758e-06, |
| "loss": 1.2936145067214966, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.0262773722627738, |
| "grad_norm": 0.375, |
| "learning_rate": 8.07549368781511e-06, |
| "loss": 1.2210314273834229, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.032116788321168, |
| "grad_norm": 0.625, |
| "learning_rate": 8.05170919219398e-06, |
| "loss": 1.2909876108169556, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.037956204379562, |
| "grad_norm": 0.58203125, |
| "learning_rate": 8.027819160655248e-06, |
| "loss": 1.2588341236114502, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.0437956204379562, |
| "grad_norm": 0.267578125, |
| "learning_rate": 8.003824581264639e-06, |
| "loss": 1.2091121673583984, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.0496350364963503, |
| "grad_norm": 0.259765625, |
| "learning_rate": 7.979726446411868e-06, |
| "loss": 1.2468533515930176, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.0554744525547446, |
| "grad_norm": 0.703125, |
| "learning_rate": 7.955525752769588e-06, |
| "loss": 1.2384498119354248, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.0613138686131387, |
| "grad_norm": 0.447265625, |
| "learning_rate": 7.931223501252176e-06, |
| "loss": 1.2103887796401978, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.0671532846715328, |
| "grad_norm": 0.375, |
| "learning_rate": 7.906820696974325e-06, |
| "loss": 1.2475626468658447, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.072992700729927, |
| "grad_norm": 0.474609375, |
| "learning_rate": 7.882318349209492e-06, |
| "loss": 1.3049901723861694, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.0788321167883212, |
| "grad_norm": 0.478515625, |
| "learning_rate": 7.857717471348135e-06, |
| "loss": 1.3034197092056274, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.0846715328467154, |
| "grad_norm": 0.5078125, |
| "learning_rate": 7.833019080855812e-06, |
| "loss": 1.177399754524231, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.0905109489051095, |
| "grad_norm": 0.69921875, |
| "learning_rate": 7.808224199231099e-06, |
| "loss": 1.2648054361343384, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.0963503649635036, |
| "grad_norm": 0.349609375, |
| "learning_rate": 7.783333851963337e-06, |
| "loss": 1.1547051668167114, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.102189781021898, |
| "grad_norm": 0.337890625, |
| "learning_rate": 7.75834906849023e-06, |
| "loss": 1.2063677310943604, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.108029197080292, |
| "grad_norm": 0.431640625, |
| "learning_rate": 7.733270882155252e-06, |
| "loss": 1.294480323791504, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.1138686131386861, |
| "grad_norm": 0.5078125, |
| "learning_rate": 7.708100330164922e-06, |
| "loss": 1.1963160037994385, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.1197080291970802, |
| "grad_norm": 0.82421875, |
| "learning_rate": 7.682838453545898e-06, |
| "loss": 1.2486681938171387, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.1255474452554743, |
| "grad_norm": 0.294921875, |
| "learning_rate": 7.657486297101933e-06, |
| "loss": 1.2629107236862183, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.1313868613138687, |
| "grad_norm": 0.486328125, |
| "learning_rate": 7.63204490937065e-06, |
| "loss": 1.200867772102356, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.1372262773722628, |
| "grad_norm": 0.53125, |
| "learning_rate": 7.606515342580181e-06, |
| "loss": 1.2637931108474731, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.143065693430657, |
| "grad_norm": 0.369140625, |
| "learning_rate": 7.580898652605649e-06, |
| "loss": 1.2616581916809082, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.148905109489051, |
| "grad_norm": 0.48828125, |
| "learning_rate": 7.555195898925497e-06, |
| "loss": 1.3153358697891235, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.1547445255474453, |
| "grad_norm": 0.58203125, |
| "learning_rate": 7.529408144577673e-06, |
| "loss": 1.3324670791625977, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.1605839416058394, |
| "grad_norm": 0.283203125, |
| "learning_rate": 7.503536456115653e-06, |
| "loss": 1.226647138595581, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.1664233576642336, |
| "grad_norm": 1.125, |
| "learning_rate": 7.47758190356434e-06, |
| "loss": 1.2243517637252808, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.1722627737226277, |
| "grad_norm": 0.75, |
| "learning_rate": 7.451545560375808e-06, |
| "loss": 1.2697927951812744, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.178102189781022, |
| "grad_norm": 1.0703125, |
| "learning_rate": 7.425428503384898e-06, |
| "loss": 1.2327685356140137, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.183941605839416, |
| "grad_norm": 1.390625, |
| "learning_rate": 7.399231812764695e-06, |
| "loss": 1.3016244173049927, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.1897810218978102, |
| "grad_norm": 0.3046875, |
| "learning_rate": 7.372956571981831e-06, |
| "loss": 1.189962387084961, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.1956204379562043, |
| "grad_norm": 2.15625, |
| "learning_rate": 7.346603867751698e-06, |
| "loss": 1.235486626625061, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.2014598540145984, |
| "grad_norm": 1.0703125, |
| "learning_rate": 7.32017478999349e-06, |
| "loss": 1.2581639289855957, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.2072992700729928, |
| "grad_norm": 0.44140625, |
| "learning_rate": 7.293670431785123e-06, |
| "loss": 1.2778874635696411, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.2131386861313869, |
| "grad_norm": 0.66015625, |
| "learning_rate": 7.267091889318033e-06, |
| "loss": 1.3071149587631226, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.218978102189781, |
| "grad_norm": 0.404296875, |
| "learning_rate": 7.240440261851839e-06, |
| "loss": 1.2892022132873535, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.224817518248175, |
| "grad_norm": 0.828125, |
| "learning_rate": 7.21371665166887e-06, |
| "loss": 1.1825231313705444, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.2306569343065694, |
| "grad_norm": 0.703125, |
| "learning_rate": 7.186922164028589e-06, |
| "loss": 1.2527143955230713, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.2364963503649635, |
| "grad_norm": 0.71875, |
| "learning_rate": 7.160057907121868e-06, |
| "loss": 1.1351896524429321, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.2423357664233576, |
| "grad_norm": 0.47265625, |
| "learning_rate": 7.133124992025161e-06, |
| "loss": 1.2882559299468994, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.2481751824817517, |
| "grad_norm": 2.796875, |
| "learning_rate": 7.106124532654553e-06, |
| "loss": 1.3231326341629028, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.254014598540146, |
| "grad_norm": 0.328125, |
| "learning_rate": 7.079057645719686e-06, |
| "loss": 1.138244867324829, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.2598540145985402, |
| "grad_norm": 0.39453125, |
| "learning_rate": 7.051925450677568e-06, |
| "loss": 1.3060168027877808, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.2656934306569343, |
| "grad_norm": 0.484375, |
| "learning_rate": 7.024729069686288e-06, |
| "loss": 1.2885905504226685, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.2715328467153284, |
| "grad_norm": 0.2890625, |
| "learning_rate": 6.997469627558591e-06, |
| "loss": 1.2515441179275513, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.2773722627737225, |
| "grad_norm": 0.41796875, |
| "learning_rate": 6.970148251715363e-06, |
| "loss": 1.2661933898925781, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.2832116788321168, |
| "grad_norm": 0.90625, |
| "learning_rate": 6.942766072139e-06, |
| "loss": 1.2927354574203491, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.289051094890511, |
| "grad_norm": 0.62890625, |
| "learning_rate": 6.9153242213266745e-06, |
| "loss": 1.2908434867858887, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.294890510948905, |
| "grad_norm": 0.482421875, |
| "learning_rate": 6.887823834243501e-06, |
| "loss": 1.2376095056533813, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.3007299270072994, |
| "grad_norm": 0.36328125, |
| "learning_rate": 6.860266048275585e-06, |
| "loss": 1.2220968008041382, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.3065693430656935, |
| "grad_norm": 0.52734375, |
| "learning_rate": 6.832652003182989e-06, |
| "loss": 1.2590991258621216, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.3124087591240876, |
| "grad_norm": 0.5234375, |
| "learning_rate": 6.804982841052593e-06, |
| "loss": 1.2670376300811768, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.3182481751824817, |
| "grad_norm": 1.3984375, |
| "learning_rate": 6.777259706250856e-06, |
| "loss": 1.3193187713623047, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.3240875912408758, |
| "grad_norm": 1.0234375, |
| "learning_rate": 6.749483745376489e-06, |
| "loss": 1.2359987497329712, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.3299270072992702, |
| "grad_norm": 0.546875, |
| "learning_rate": 6.721656107213032e-06, |
| "loss": 1.2815067768096924, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.3357664233576643, |
| "grad_norm": 0.486328125, |
| "learning_rate": 6.693777942681339e-06, |
| "loss": 1.2145709991455078, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.3416058394160584, |
| "grad_norm": 0.435546875, |
| "learning_rate": 6.665850404791986e-06, |
| "loss": 1.2475862503051758, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.3474452554744525, |
| "grad_norm": 0.4453125, |
| "learning_rate": 6.637874648597571e-06, |
| "loss": 1.2392385005950928, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.3532846715328466, |
| "grad_norm": 1.1875, |
| "learning_rate": 6.60985183114495e-06, |
| "loss": 1.2735258340835571, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.359124087591241, |
| "grad_norm": 0.34765625, |
| "learning_rate": 6.5817831114273845e-06, |
| "loss": 1.26645827293396, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.364963503649635, |
| "grad_norm": 0.396484375, |
| "learning_rate": 6.553669650336599e-06, |
| "loss": 1.2172024250030518, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.3708029197080291, |
| "grad_norm": 0.37890625, |
| "learning_rate": 6.5255126106147785e-06, |
| "loss": 1.3016058206558228, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.3766423357664235, |
| "grad_norm": 0.400390625, |
| "learning_rate": 6.497313156806467e-06, |
| "loss": 1.3059654235839844, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.3824817518248176, |
| "grad_norm": 0.7421875, |
| "learning_rate": 6.469072455210414e-06, |
| "loss": 1.2682950496673584, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.3883211678832117, |
| "grad_norm": 0.41015625, |
| "learning_rate": 6.4407916738313256e-06, |
| "loss": 1.190187931060791, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.3941605839416058, |
| "grad_norm": 0.51953125, |
| "learning_rate": 6.412471982331574e-06, |
| "loss": 1.2461662292480469, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.5, |
| "learning_rate": 6.384114551982804e-06, |
| "loss": 1.2167097330093384, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.4058394160583942, |
| "grad_norm": 0.390625, |
| "learning_rate": 6.3557205556175e-06, |
| "loss": 1.1439507007598877, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.4116788321167884, |
| "grad_norm": 1.1328125, |
| "learning_rate": 6.327291167580478e-06, |
| "loss": 1.207546353340149, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.4175182481751825, |
| "grad_norm": 0.486328125, |
| "learning_rate": 6.298827563680314e-06, |
| "loss": 1.292868971824646, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.4233576642335766, |
| "grad_norm": 0.42578125, |
| "learning_rate": 6.270330921140718e-06, |
| "loss": 1.3323169946670532, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.4291970802919707, |
| "grad_norm": 0.458984375, |
| "learning_rate": 6.241802418551838e-06, |
| "loss": 1.19215726852417, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.435036496350365, |
| "grad_norm": 0.5703125, |
| "learning_rate": 6.21324323582152e-06, |
| "loss": 1.2297883033752441, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.4408759124087591, |
| "grad_norm": 0.33984375, |
| "learning_rate": 6.1846545541265115e-06, |
| "loss": 1.3055942058563232, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.4467153284671532, |
| "grad_norm": 0.4140625, |
| "learning_rate": 6.156037555863597e-06, |
| "loss": 1.2372398376464844, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.4525547445255476, |
| "grad_norm": 0.7265625, |
| "learning_rate": 6.1273934246007105e-06, |
| "loss": 1.3352288007736206, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.4583941605839417, |
| "grad_norm": 0.5390625, |
| "learning_rate": 6.098723345027977e-06, |
| "loss": 1.2846771478652954, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.4642335766423358, |
| "grad_norm": 0.5703125, |
| "learning_rate": 6.070028502908709e-06, |
| "loss": 1.2137339115142822, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.4700729927007299, |
| "grad_norm": 0.470703125, |
| "learning_rate": 6.041310085030378e-06, |
| "loss": 1.2333766222000122, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.475912408759124, |
| "grad_norm": 0.400390625, |
| "learning_rate": 6.012569279155521e-06, |
| "loss": 1.217051386833191, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.4817518248175183, |
| "grad_norm": 1.078125, |
| "learning_rate": 5.983807273972617e-06, |
| "loss": 1.1864327192306519, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.4875912408759124, |
| "grad_norm": 0.46484375, |
| "learning_rate": 5.955025259046927e-06, |
| "loss": 1.2389144897460938, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.4934306569343065, |
| "grad_norm": 0.71875, |
| "learning_rate": 5.926224424771291e-06, |
| "loss": 1.2382243871688843, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.4992700729927007, |
| "grad_norm": 0.4921875, |
| "learning_rate": 5.897405962316899e-06, |
| "loss": 1.208208680152893, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.5051094890510948, |
| "grad_norm": 0.44921875, |
| "learning_rate": 5.868571063584023e-06, |
| "loss": 1.286102533340454, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.510948905109489, |
| "grad_norm": 0.494140625, |
| "learning_rate": 5.83972092115272e-06, |
| "loss": 1.2459356784820557, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.5167883211678832, |
| "grad_norm": 0.53515625, |
| "learning_rate": 5.810856728233513e-06, |
| "loss": 1.276041865348816, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.5226277372262773, |
| "grad_norm": 0.734375, |
| "learning_rate": 5.781979678618033e-06, |
| "loss": 1.27890944480896, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.5284671532846716, |
| "grad_norm": 1.7265625, |
| "learning_rate": 5.753090966629654e-06, |
| "loss": 1.1399537324905396, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.5343065693430655, |
| "grad_norm": 0.388671875, |
| "learning_rate": 5.7241917870740935e-06, |
| "loss": 1.2000788450241089, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.5401459854014599, |
| "grad_norm": 0.451171875, |
| "learning_rate": 5.695283335189992e-06, |
| "loss": 1.3109184503555298, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.545985401459854, |
| "grad_norm": 0.5859375, |
| "learning_rate": 5.666366806599488e-06, |
| "loss": 1.2306164503097534, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.551824817518248, |
| "grad_norm": 0.3359375, |
| "learning_rate": 5.63744339725876e-06, |
| "loss": 1.2434693574905396, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.5576642335766424, |
| "grad_norm": 0.8125, |
| "learning_rate": 5.608514303408572e-06, |
| "loss": 1.1840685606002808, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.5635036496350365, |
| "grad_norm": 0.90234375, |
| "learning_rate": 5.579580721524786e-06, |
| "loss": 1.2241274118423462, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.5693430656934306, |
| "grad_norm": 0.478515625, |
| "learning_rate": 5.550643848268889e-06, |
| "loss": 1.206296682357788, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.575182481751825, |
| "grad_norm": 1.6484375, |
| "learning_rate": 5.5217048804384945e-06, |
| "loss": 1.2080531120300293, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.5810218978102188, |
| "grad_norm": 0.498046875, |
| "learning_rate": 5.492765014917845e-06, |
| "loss": 1.207125186920166, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.5868613138686132, |
| "grad_norm": 1.0546875, |
| "learning_rate": 5.463825448628314e-06, |
| "loss": 1.218297004699707, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.5927007299270073, |
| "grad_norm": 0.431640625, |
| "learning_rate": 5.434887378478892e-06, |
| "loss": 1.1724162101745605, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.5985401459854014, |
| "grad_norm": 0.4765625, |
| "learning_rate": 5.405952001316697e-06, |
| "loss": 1.215348720550537, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.6043795620437957, |
| "grad_norm": 0.3359375, |
| "learning_rate": 5.377020513877463e-06, |
| "loss": 1.2883554697036743, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.6102189781021898, |
| "grad_norm": 0.490234375, |
| "learning_rate": 5.348094112736057e-06, |
| "loss": 1.253550410270691, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.616058394160584, |
| "grad_norm": 0.4921875, |
| "learning_rate": 5.319173994256973e-06, |
| "loss": 1.29800546169281, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.621897810218978, |
| "grad_norm": 0.390625, |
| "learning_rate": 5.290261354544867e-06, |
| "loss": 1.1871838569641113, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.6277372262773722, |
| "grad_norm": 0.65625, |
| "learning_rate": 5.261357389395078e-06, |
| "loss": 1.1837831735610962, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.6335766423357665, |
| "grad_norm": 1.125, |
| "learning_rate": 5.232463294244178e-06, |
| "loss": 1.2513107061386108, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.6394160583941606, |
| "grad_norm": 0.52734375, |
| "learning_rate": 5.203580264120521e-06, |
| "loss": 1.3240464925765991, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.6452554744525547, |
| "grad_norm": 0.52734375, |
| "learning_rate": 5.1747094935948325e-06, |
| "loss": 1.2752066850662231, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.651094890510949, |
| "grad_norm": 0.546875, |
| "learning_rate": 5.145852176730786e-06, |
| "loss": 1.1872999668121338, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.656934306569343, |
| "grad_norm": 0.69140625, |
| "learning_rate": 5.117009507035628e-06, |
| "loss": 1.2202606201171875, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.6627737226277373, |
| "grad_norm": 0.625, |
| "learning_rate": 5.088182677410819e-06, |
| "loss": 1.2224456071853638, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.6686131386861314, |
| "grad_norm": 0.6328125, |
| "learning_rate": 5.059372880102683e-06, |
| "loss": 1.2351001501083374, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.6744525547445255, |
| "grad_norm": 0.609375, |
| "learning_rate": 5.0305813066531136e-06, |
| "loss": 1.2855942249298096, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.6802919708029198, |
| "grad_norm": 0.98828125, |
| "learning_rate": 5.001809147850282e-06, |
| "loss": 1.2410361766815186, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.686131386861314, |
| "grad_norm": 0.431640625, |
| "learning_rate": 4.97305759367939e-06, |
| "loss": 1.2131555080413818, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.691970802919708, |
| "grad_norm": 0.4375, |
| "learning_rate": 4.944327833273456e-06, |
| "loss": 1.2389878034591675, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.6978102189781021, |
| "grad_norm": 0.46484375, |
| "learning_rate": 4.9156210548641315e-06, |
| "loss": 1.2704704999923706, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.7036496350364962, |
| "grad_norm": 0.50390625, |
| "learning_rate": 4.886938445732557e-06, |
| "loss": 1.2079095840454102, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.7094890510948906, |
| "grad_norm": 0.5703125, |
| "learning_rate": 4.858281192160258e-06, |
| "loss": 1.3086167573928833, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.7153284671532847, |
| "grad_norm": 0.37109375, |
| "learning_rate": 4.82965047938008e-06, |
| "loss": 1.1482274532318115, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.7211678832116788, |
| "grad_norm": 0.349609375, |
| "learning_rate": 4.801047491527176e-06, |
| "loss": 1.316062331199646, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.7270072992700731, |
| "grad_norm": 0.8359375, |
| "learning_rate": 4.772473411590022e-06, |
| "loss": 1.271776556968689, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.732846715328467, |
| "grad_norm": 0.427734375, |
| "learning_rate": 4.743929421361492e-06, |
| "loss": 1.1389895677566528, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.7386861313868613, |
| "grad_norm": 7.53125, |
| "learning_rate": 4.715416701389985e-06, |
| "loss": 1.196537733078003, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.7445255474452555, |
| "grad_norm": 0.333984375, |
| "learning_rate": 4.686936430930597e-06, |
| "loss": 1.2567116022109985, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.7503649635036496, |
| "grad_norm": 0.427734375, |
| "learning_rate": 4.658489787896346e-06, |
| "loss": 1.3270188570022583, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.756204379562044, |
| "grad_norm": 0.458984375, |
| "learning_rate": 4.630077948809457e-06, |
| "loss": 1.2750407457351685, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.762043795620438, |
| "grad_norm": 0.447265625, |
| "learning_rate": 4.601702088752702e-06, |
| "loss": 1.2777055501937866, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.7678832116788321, |
| "grad_norm": 0.361328125, |
| "learning_rate": 4.573363381320795e-06, |
| "loss": 1.1461223363876343, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.7737226277372264, |
| "grad_norm": 0.498046875, |
| "learning_rate": 4.5450629985718655e-06, |
| "loss": 1.2125712633132935, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.7795620437956203, |
| "grad_norm": 0.453125, |
| "learning_rate": 4.516802110978968e-06, |
| "loss": 1.259176254272461, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.7854014598540147, |
| "grad_norm": 1.3203125, |
| "learning_rate": 4.488581887381689e-06, |
| "loss": 1.1314493417739868, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.7912408759124088, |
| "grad_norm": 0.39453125, |
| "learning_rate": 4.460403494937787e-06, |
| "loss": 1.2192261219024658, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.7970802919708029, |
| "grad_norm": 0.5859375, |
| "learning_rate": 4.4322680990749365e-06, |
| "loss": 1.1592247486114502, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.8029197080291972, |
| "grad_norm": 0.404296875, |
| "learning_rate": 4.404176863442515e-06, |
| "loss": 1.300370454788208, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.808759124087591, |
| "grad_norm": 1.65625, |
| "learning_rate": 4.376130949863487e-06, |
| "loss": 1.2521164417266846, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.8145985401459854, |
| "grad_norm": 0.5703125, |
| "learning_rate": 4.3481315182863405e-06, |
| "loss": 1.1387872695922852, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.8204379562043795, |
| "grad_norm": 0.58984375, |
| "learning_rate": 4.320179726737118e-06, |
| "loss": 1.2439383268356323, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.8262773722627736, |
| "grad_norm": 0.859375, |
| "learning_rate": 4.292276731271528e-06, |
| "loss": 1.1349395513534546, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.832116788321168, |
| "grad_norm": 0.51953125, |
| "learning_rate": 4.264423685927123e-06, |
| "loss": 1.2615232467651367, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.837956204379562, |
| "grad_norm": 1.1328125, |
| "learning_rate": 4.236621742675572e-06, |
| "loss": 1.297728419303894, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.8437956204379562, |
| "grad_norm": 0.89453125, |
| "learning_rate": 4.208872051375016e-06, |
| "loss": 1.3184691667556763, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.8496350364963505, |
| "grad_norm": 0.4140625, |
| "learning_rate": 4.1811757597225154e-06, |
| "loss": 1.2532655000686646, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.8554744525547444, |
| "grad_norm": 0.7421875, |
| "learning_rate": 4.1535340132065775e-06, |
| "loss": 1.2408058643341064, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.8613138686131387, |
| "grad_norm": 0.52734375, |
| "learning_rate": 4.125947955059781e-06, |
| "loss": 1.298876404762268, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.8671532846715329, |
| "grad_norm": 0.58203125, |
| "learning_rate": 4.098418726211497e-06, |
| "loss": 1.1866939067840576, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.872992700729927, |
| "grad_norm": 0.515625, |
| "learning_rate": 4.070947465240695e-06, |
| "loss": 1.2406346797943115, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.8788321167883213, |
| "grad_norm": 0.45703125, |
| "learning_rate": 4.043535308328861e-06, |
| "loss": 1.2553025484085083, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.8846715328467152, |
| "grad_norm": 0.50390625, |
| "learning_rate": 4.0161833892129944e-06, |
| "loss": 1.2756036520004272, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.8905109489051095, |
| "grad_norm": 0.546875, |
| "learning_rate": 3.988892839138731e-06, |
| "loss": 1.185132622718811, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.8963503649635036, |
| "grad_norm": 0.83203125, |
| "learning_rate": 3.96166478681355e-06, |
| "loss": 1.221949815750122, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.9021897810218977, |
| "grad_norm": 0.63671875, |
| "learning_rate": 3.934500358360085e-06, |
| "loss": 1.2973777055740356, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.908029197080292, |
| "grad_norm": 0.373046875, |
| "learning_rate": 3.907400677269563e-06, |
| "loss": 1.2753914594650269, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.9138686131386862, |
| "grad_norm": 0.4609375, |
| "learning_rate": 3.880366864355325e-06, |
| "loss": 1.2708187103271484, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.9197080291970803, |
| "grad_norm": 0.380859375, |
| "learning_rate": 3.853400037706478e-06, |
| "loss": 1.2944742441177368, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.9255474452554746, |
| "grad_norm": 0.41015625, |
| "learning_rate": 3.826501312641655e-06, |
| "loss": 1.2255587577819824, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.9313868613138685, |
| "grad_norm": 1.4609375, |
| "learning_rate": 3.7996718016628697e-06, |
| "loss": 1.168819546699524, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.9372262773722628, |
| "grad_norm": 0.62109375, |
| "learning_rate": 3.772912614409526e-06, |
| "loss": 1.141089916229248, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.943065693430657, |
| "grad_norm": 0.388671875, |
| "learning_rate": 3.7462248576125125e-06, |
| "loss": 1.2469316720962524, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.948905109489051, |
| "grad_norm": 0.53125, |
| "learning_rate": 3.719609635048431e-06, |
| "loss": 1.222388505935669, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.9547445255474454, |
| "grad_norm": 0.47265625, |
| "learning_rate": 3.6930680474939486e-06, |
| "loss": 1.1344715356826782, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.9605839416058393, |
| "grad_norm": 0.5625, |
| "learning_rate": 3.6666011926802647e-06, |
| "loss": 1.2021689414978027, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.9664233576642336, |
| "grad_norm": 0.51171875, |
| "learning_rate": 3.6402101652477163e-06, |
| "loss": 1.251044750213623, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.9722627737226277, |
| "grad_norm": 0.578125, |
| "learning_rate": 3.613896056700502e-06, |
| "loss": 1.258431315422058, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.9781021897810218, |
| "grad_norm": 0.51171875, |
| "learning_rate": 3.58765995536154e-06, |
| "loss": 1.2117798328399658, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.9839416058394161, |
| "grad_norm": 1.390625, |
| "learning_rate": 3.561502946327452e-06, |
| "loss": 1.2486507892608643, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.9897810218978103, |
| "grad_norm": 0.73046875, |
| "learning_rate": 3.535426111423691e-06, |
| "loss": 1.1538894176483154, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.9956204379562044, |
| "grad_norm": 0.5703125, |
| "learning_rate": 3.509430529159794e-06, |
| "loss": 1.2577342987060547, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.2421875, |
| "learning_rate": 3.483517274684778e-06, |
| "loss": 1.3043779134750366, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.0058394160583943, |
| "grad_norm": 0.8359375, |
| "learning_rate": 3.457687419742668e-06, |
| "loss": 1.1992483139038086, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.011678832116788, |
| "grad_norm": 0.42578125, |
| "learning_rate": 3.431942032628184e-06, |
| "loss": 1.2118330001831055, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.0175182481751825, |
| "grad_norm": 0.34375, |
| "learning_rate": 3.4062821781425402e-06, |
| "loss": 1.1599669456481934, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.0233576642335764, |
| "grad_norm": 0.30078125, |
| "learning_rate": 3.3807089175494175e-06, |
| "loss": 1.1471362113952637, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.0291970802919708, |
| "grad_norm": 0.421875, |
| "learning_rate": 3.355223308531066e-06, |
| "loss": 1.3080707788467407, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.035036496350365, |
| "grad_norm": 0.55859375, |
| "learning_rate": 3.3298264051445655e-06, |
| "loss": 1.2860289812088013, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.040875912408759, |
| "grad_norm": 0.4921875, |
| "learning_rate": 3.3045192577782214e-06, |
| "loss": 1.1995092630386353, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.0467153284671533, |
| "grad_norm": 0.5078125, |
| "learning_rate": 3.2793029131081335e-06, |
| "loss": 1.1927409172058105, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.0525547445255476, |
| "grad_norm": 0.38671875, |
| "learning_rate": 3.254178414054896e-06, |
| "loss": 1.1319977045059204, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.0583941605839415, |
| "grad_norm": 0.41015625, |
| "learning_rate": 3.2291467997404747e-06, |
| "loss": 1.238900899887085, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.064233576642336, |
| "grad_norm": 0.439453125, |
| "learning_rate": 3.2042091054452175e-06, |
| "loss": 1.1581428050994873, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.0700729927007298, |
| "grad_norm": 0.31640625, |
| "learning_rate": 3.1793663625650444e-06, |
| "loss": 1.2304531335830688, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.075912408759124, |
| "grad_norm": 0.37109375, |
| "learning_rate": 3.154619598568789e-06, |
| "loss": 1.2318830490112305, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.0817518248175184, |
| "grad_norm": 0.52734375, |
| "learning_rate": 3.1299698369557026e-06, |
| "loss": 1.2299096584320068, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.0875912408759123, |
| "grad_norm": 0.52734375, |
| "learning_rate": 3.105418097213121e-06, |
| "loss": 1.154068112373352, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.0934306569343066, |
| "grad_norm": 0.41796875, |
| "learning_rate": 3.0809653947743044e-06, |
| "loss": 1.0612831115722656, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.0992700729927005, |
| "grad_norm": 0.298828125, |
| "learning_rate": 3.0566127409764377e-06, |
| "loss": 1.154129147529602, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.105109489051095, |
| "grad_norm": 0.7890625, |
| "learning_rate": 3.0323611430188026e-06, |
| "loss": 1.1408308744430542, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.110948905109489, |
| "grad_norm": 0.734375, |
| "learning_rate": 3.008211603921118e-06, |
| "loss": 1.1843266487121582, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.116788321167883, |
| "grad_norm": 0.41015625, |
| "learning_rate": 2.9841651224820656e-06, |
| "loss": 1.0983469486236572, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.1226277372262774, |
| "grad_norm": 0.361328125, |
| "learning_rate": 2.96022269323797e-06, |
| "loss": 1.2294151782989502, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.1284671532846717, |
| "grad_norm": 0.400390625, |
| "learning_rate": 2.9363853064216706e-06, |
| "loss": 1.3076847791671753, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.1343065693430656, |
| "grad_norm": 0.61328125, |
| "learning_rate": 2.912653947921567e-06, |
| "loss": 1.185577392578125, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.14014598540146, |
| "grad_norm": 0.408203125, |
| "learning_rate": 2.8890295992408425e-06, |
| "loss": 1.1940393447875977, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.145985401459854, |
| "grad_norm": 0.4296875, |
| "learning_rate": 2.86551323745687e-06, |
| "loss": 1.1575770378112793, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.151824817518248, |
| "grad_norm": 0.35546875, |
| "learning_rate": 2.8421058351808055e-06, |
| "loss": 1.1326178312301636, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.1576642335766425, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.818808360517349e-06, |
| "loss": 1.167670726776123, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.1635036496350364, |
| "grad_norm": 0.482421875, |
| "learning_rate": 2.7956217770247262e-06, |
| "loss": 1.3139435052871704, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.1693430656934307, |
| "grad_norm": 0.46875, |
| "learning_rate": 2.7725470436748165e-06, |
| "loss": 1.12082839012146, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.1751824817518246, |
| "grad_norm": 0.53125, |
| "learning_rate": 2.7495851148135005e-06, |
| "loss": 1.1051329374313354, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.181021897810219, |
| "grad_norm": 0.93359375, |
| "learning_rate": 2.7267369401211895e-06, |
| "loss": 1.1831530332565308, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.1868613138686133, |
| "grad_norm": 0.36328125, |
| "learning_rate": 2.704003464573544e-06, |
| "loss": 1.1686221361160278, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.192700729927007, |
| "grad_norm": 0.326171875, |
| "learning_rate": 2.6813856284023943e-06, |
| "loss": 1.1672006845474243, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.1985401459854015, |
| "grad_norm": 0.404296875, |
| "learning_rate": 2.6588843670568505e-06, |
| "loss": 1.254637360572815, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.204379562043796, |
| "grad_norm": 0.52734375, |
| "learning_rate": 2.6365006111646152e-06, |
| "loss": 1.1789019107818604, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.2102189781021897, |
| "grad_norm": 0.474609375, |
| "learning_rate": 2.614235286493494e-06, |
| "loss": 1.1955108642578125, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.216058394160584, |
| "grad_norm": 0.87109375, |
| "learning_rate": 2.5920893139131043e-06, |
| "loss": 1.2021061182022095, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.221897810218978, |
| "grad_norm": 0.451171875, |
| "learning_rate": 2.570063609356791e-06, |
| "loss": 1.2603737115859985, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.2277372262773723, |
| "grad_norm": 0.431640625, |
| "learning_rate": 2.5481590837837477e-06, |
| "loss": 1.1586928367614746, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.2335766423357666, |
| "grad_norm": 0.443359375, |
| "learning_rate": 2.5263766431413327e-06, |
| "loss": 1.3295210599899292, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.2394160583941605, |
| "grad_norm": 0.69140625, |
| "learning_rate": 2.5047171883276065e-06, |
| "loss": 1.116708517074585, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.245255474452555, |
| "grad_norm": 0.37890625, |
| "learning_rate": 2.4831816151540682e-06, |
| "loss": 1.0877715349197388, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.2510948905109487, |
| "grad_norm": 0.49609375, |
| "learning_rate": 2.4617708143086085e-06, |
| "loss": 1.3073028326034546, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.256934306569343, |
| "grad_norm": 0.7578125, |
| "learning_rate": 2.440485671318668e-06, |
| "loss": 1.2231556177139282, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.2627737226277373, |
| "grad_norm": 0.796875, |
| "learning_rate": 2.41932706651462e-06, |
| "loss": 1.1234259605407715, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.2686131386861312, |
| "grad_norm": 0.443359375, |
| "learning_rate": 2.398295874993347e-06, |
| "loss": 1.1716923713684082, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.2744525547445256, |
| "grad_norm": 0.65625, |
| "learning_rate": 2.3773929665820662e-06, |
| "loss": 1.2562777996063232, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.28029197080292, |
| "grad_norm": 0.458984375, |
| "learning_rate": 2.35661920580234e-06, |
| "loss": 1.2565009593963623, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.286131386861314, |
| "grad_norm": 1.140625, |
| "learning_rate": 2.3359754518343255e-06, |
| "loss": 1.2188069820404053, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.291970802919708, |
| "grad_norm": 1.3125, |
| "learning_rate": 2.315462558481241e-06, |
| "loss": 1.1537082195281982, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.297810218978102, |
| "grad_norm": 0.357421875, |
| "learning_rate": 2.29508137413405e-06, |
| "loss": 1.0930520296096802, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.3036496350364963, |
| "grad_norm": 0.5078125, |
| "learning_rate": 2.274832741736376e-06, |
| "loss": 1.107776165008545, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.3094890510948907, |
| "grad_norm": 0.40234375, |
| "learning_rate": 2.254717498749638e-06, |
| "loss": 1.291731357574463, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.3153284671532846, |
| "grad_norm": 0.396484375, |
| "learning_rate": 2.234736477118413e-06, |
| "loss": 1.1373404264450073, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.321167883211679, |
| "grad_norm": 0.380859375, |
| "learning_rate": 2.2148905032360305e-06, |
| "loss": 1.1539984941482544, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.3270072992700728, |
| "grad_norm": 0.46875, |
| "learning_rate": 2.195180397910389e-06, |
| "loss": 1.1758666038513184, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.332846715328467, |
| "grad_norm": 1.8984375, |
| "learning_rate": 2.1756069763300158e-06, |
| "loss": 1.2303135395050049, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.3386861313868614, |
| "grad_norm": 0.546875, |
| "learning_rate": 2.1561710480303435e-06, |
| "loss": 1.1783814430236816, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.3445255474452553, |
| "grad_norm": 0.345703125, |
| "learning_rate": 2.1368734168602318e-06, |
| "loss": 1.2231682538986206, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.3503649635036497, |
| "grad_norm": 0.4453125, |
| "learning_rate": 2.1177148809487237e-06, |
| "loss": 1.1812344789505005, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.356204379562044, |
| "grad_norm": 1.5546875, |
| "learning_rate": 2.0986962326720328e-06, |
| "loss": 1.1925325393676758, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.362043795620438, |
| "grad_norm": 0.4140625, |
| "learning_rate": 2.079818258620771e-06, |
| "loss": 1.1922979354858398, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.367883211678832, |
| "grad_norm": 1.7578125, |
| "learning_rate": 2.0610817395674197e-06, |
| "loss": 1.2363067865371704, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.373722627737226, |
| "grad_norm": 0.41015625, |
| "learning_rate": 2.042487450434033e-06, |
| "loss": 1.1505279541015625, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.3795620437956204, |
| "grad_norm": 0.765625, |
| "learning_rate": 2.0240361602601906e-06, |
| "loss": 1.1627535820007324, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.3854014598540147, |
| "grad_norm": 0.33203125, |
| "learning_rate": 2.0057286321711924e-06, |
| "loss": 1.155818223953247, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.3912408759124086, |
| "grad_norm": 0.5078125, |
| "learning_rate": 1.987565623346492e-06, |
| "loss": 1.2554137706756592, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.397080291970803, |
| "grad_norm": 0.546875, |
| "learning_rate": 1.9695478849883867e-06, |
| "loss": 1.1215136051177979, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.402919708029197, |
| "grad_norm": 0.3515625, |
| "learning_rate": 1.9516761622909423e-06, |
| "loss": 1.1090680360794067, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.408759124087591, |
| "grad_norm": 0.416015625, |
| "learning_rate": 1.9339511944091773e-06, |
| "loss": 1.1436755657196045, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.4145985401459855, |
| "grad_norm": 0.48046875, |
| "learning_rate": 1.91637371442849e-06, |
| "loss": 1.140608549118042, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.4204379562043794, |
| "grad_norm": 0.54296875, |
| "learning_rate": 1.8989444493343401e-06, |
| "loss": 1.2412205934524536, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.4262773722627737, |
| "grad_norm": 0.71484375, |
| "learning_rate": 1.8816641199821797e-06, |
| "loss": 1.2433428764343262, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.432116788321168, |
| "grad_norm": 0.4140625, |
| "learning_rate": 1.8645334410676413e-06, |
| "loss": 1.2072176933288574, |
| "step": 834 |
| }, |
| { |
| "epoch": 2.437956204379562, |
| "grad_norm": 0.5703125, |
| "learning_rate": 1.8475531210969766e-06, |
| "loss": 1.174179196357727, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.4437956204379563, |
| "grad_norm": 0.87890625, |
| "learning_rate": 1.8307238623577588e-06, |
| "loss": 1.2336622476577759, |
| "step": 838 |
| }, |
| { |
| "epoch": 2.44963503649635, |
| "grad_norm": 0.40625, |
| "learning_rate": 1.814046360889829e-06, |
| "loss": 1.2075152397155762, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.4554744525547445, |
| "grad_norm": 0.3515625, |
| "learning_rate": 1.7975213064565136e-06, |
| "loss": 1.1945725679397583, |
| "step": 842 |
| }, |
| { |
| "epoch": 2.461313868613139, |
| "grad_norm": 0.5703125, |
| "learning_rate": 1.7811493825160952e-06, |
| "loss": 1.255374789237976, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.4671532846715327, |
| "grad_norm": 0.326171875, |
| "learning_rate": 1.7649312661935463e-06, |
| "loss": 1.0935304164886475, |
| "step": 846 |
| }, |
| { |
| "epoch": 2.472992700729927, |
| "grad_norm": 1.2109375, |
| "learning_rate": 1.7488676282525236e-06, |
| "loss": 1.1871508359909058, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.478832116788321, |
| "grad_norm": 0.515625, |
| "learning_rate": 1.7329591330676255e-06, |
| "loss": 1.236677646636963, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.4846715328467153, |
| "grad_norm": 0.59375, |
| "learning_rate": 1.7172064385969144e-06, |
| "loss": 1.1770851612091064, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.4905109489051096, |
| "grad_norm": 0.76953125, |
| "learning_rate": 1.7016101963547063e-06, |
| "loss": 1.1361277103424072, |
| "step": 854 |
| }, |
| { |
| "epoch": 2.4963503649635035, |
| "grad_norm": 0.5078125, |
| "learning_rate": 1.68617105138462e-06, |
| "loss": 1.1203017234802246, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.502189781021898, |
| "grad_norm": 0.453125, |
| "learning_rate": 1.6708896422329056e-06, |
| "loss": 1.2030003070831299, |
| "step": 858 |
| }, |
| { |
| "epoch": 2.508029197080292, |
| "grad_norm": 0.5234375, |
| "learning_rate": 1.6557666009220274e-06, |
| "loss": 1.194663405418396, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.513868613138686, |
| "grad_norm": 0.74609375, |
| "learning_rate": 1.6408025529245324e-06, |
| "loss": 1.3168246746063232, |
| "step": 862 |
| }, |
| { |
| "epoch": 2.5197080291970804, |
| "grad_norm": 0.43359375, |
| "learning_rate": 1.6259981171371736e-06, |
| "loss": 1.1622366905212402, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.5255474452554747, |
| "grad_norm": 0.333984375, |
| "learning_rate": 1.611353905855317e-06, |
| "loss": 1.1341776847839355, |
| "step": 866 |
| }, |
| { |
| "epoch": 2.5313868613138686, |
| "grad_norm": 0.47265625, |
| "learning_rate": 1.5968705247476192e-06, |
| "loss": 1.1897858381271362, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.537226277372263, |
| "grad_norm": 0.486328125, |
| "learning_rate": 1.582548572830973e-06, |
| "loss": 1.1871055364608765, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.543065693430657, |
| "grad_norm": 0.55859375, |
| "learning_rate": 1.5683886424457373e-06, |
| "loss": 1.1016008853912354, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.548905109489051, |
| "grad_norm": 0.7734375, |
| "learning_rate": 1.5543913192312373e-06, |
| "loss": 1.1918286085128784, |
| "step": 874 |
| }, |
| { |
| "epoch": 2.554744525547445, |
| "grad_norm": 0.5078125, |
| "learning_rate": 1.5405571821015402e-06, |
| "loss": 1.2634363174438477, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.5605839416058394, |
| "grad_norm": 0.376953125, |
| "learning_rate": 1.5268868032215152e-06, |
| "loss": 1.1621180772781372, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.5664233576642337, |
| "grad_norm": 0.4765625, |
| "learning_rate": 1.513380747983169e-06, |
| "loss": 1.1664128303527832, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.5722627737226276, |
| "grad_norm": 0.609375, |
| "learning_rate": 1.5000395749822597e-06, |
| "loss": 1.1880872249603271, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.578102189781022, |
| "grad_norm": 0.4921875, |
| "learning_rate": 1.4868638359951963e-06, |
| "loss": 1.2146211862564087, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.5839416058394162, |
| "grad_norm": 0.55859375, |
| "learning_rate": 1.4738540759562175e-06, |
| "loss": 1.1511411666870117, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.58978102189781, |
| "grad_norm": 0.515625, |
| "learning_rate": 1.4610108329348514e-06, |
| "loss": 1.2108086347579956, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.5956204379562045, |
| "grad_norm": 0.400390625, |
| "learning_rate": 1.4483346381136653e-06, |
| "loss": 1.0855056047439575, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.601459854014599, |
| "grad_norm": 0.3671875, |
| "learning_rate": 1.4358260157662928e-06, |
| "loss": 1.1662517786026, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.6072992700729927, |
| "grad_norm": 0.79296875, |
| "learning_rate": 1.4234854832357536e-06, |
| "loss": 1.195744514465332, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.613138686131387, |
| "grad_norm": 0.53515625, |
| "learning_rate": 1.4113135509130547e-06, |
| "loss": 1.2275428771972656, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.618978102189781, |
| "grad_norm": 0.50390625, |
| "learning_rate": 1.3993107222160817e-06, |
| "loss": 1.2926044464111328, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.624817518248175, |
| "grad_norm": 0.60546875, |
| "learning_rate": 1.3874774935687782e-06, |
| "loss": 1.2193105220794678, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.630656934306569, |
| "grad_norm": 0.5859375, |
| "learning_rate": 1.3758143543806143e-06, |
| "loss": 1.227401852607727, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.6364963503649634, |
| "grad_norm": 0.86328125, |
| "learning_rate": 1.3643217870263448e-06, |
| "loss": 1.2654823064804077, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.6423357664233578, |
| "grad_norm": 0.373046875, |
| "learning_rate": 1.35300026682606e-06, |
| "loss": 1.1640398502349854, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.6481751824817517, |
| "grad_norm": 0.625, |
| "learning_rate": 1.341850262025524e-06, |
| "loss": 1.1758366823196411, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.654014598540146, |
| "grad_norm": 0.462890625, |
| "learning_rate": 1.330872233776811e-06, |
| "loss": 1.1808586120605469, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.6598540145985403, |
| "grad_norm": 0.458984375, |
| "learning_rate": 1.3200666361192349e-06, |
| "loss": 1.1935012340545654, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.665693430656934, |
| "grad_norm": 0.396484375, |
| "learning_rate": 1.3094339159605627e-06, |
| "loss": 1.1293021440505981, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.6715328467153285, |
| "grad_norm": 0.875, |
| "learning_rate": 1.2989745130585407e-06, |
| "loss": 1.2715429067611694, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.677372262773723, |
| "grad_norm": 0.462890625, |
| "learning_rate": 1.2886888600026983e-06, |
| "loss": 1.1346051692962646, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.6832116788321168, |
| "grad_norm": 0.412109375, |
| "learning_rate": 1.2785773821964636e-06, |
| "loss": 1.2460830211639404, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.689051094890511, |
| "grad_norm": 1.6171875, |
| "learning_rate": 1.2686404978395626e-06, |
| "loss": 1.1654274463653564, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.694890510948905, |
| "grad_norm": 1.65625, |
| "learning_rate": 1.2588786179107279e-06, |
| "loss": 1.253066897392273, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.7007299270072993, |
| "grad_norm": 0.42578125, |
| "learning_rate": 1.2492921461506994e-06, |
| "loss": 1.188564658164978, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.706569343065693, |
| "grad_norm": 0.466796875, |
| "learning_rate": 1.2398814790455244e-06, |
| "loss": 1.244341254234314, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.7124087591240875, |
| "grad_norm": 0.443359375, |
| "learning_rate": 1.2306470058101612e-06, |
| "loss": 1.1333363056182861, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.718248175182482, |
| "grad_norm": 0.66015625, |
| "learning_rate": 1.2215891083723842e-06, |
| "loss": 1.263460397720337, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.7240875912408757, |
| "grad_norm": 0.359375, |
| "learning_rate": 1.2127081613569809e-06, |
| "loss": 1.2082524299621582, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.72992700729927, |
| "grad_norm": 0.408203125, |
| "learning_rate": 1.2040045320702626e-06, |
| "loss": 1.1242833137512207, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.7357664233576644, |
| "grad_norm": 0.63671875, |
| "learning_rate": 1.1954785804848739e-06, |
| "loss": 1.14191734790802, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.7416058394160583, |
| "grad_norm": 1.65625, |
| "learning_rate": 1.1871306592249008e-06, |
| "loss": 1.1483235359191895, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.7474452554744526, |
| "grad_norm": 0.5546875, |
| "learning_rate": 1.1789611135512903e-06, |
| "loss": 1.1981879472732544, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.753284671532847, |
| "grad_norm": 1.171875, |
| "learning_rate": 1.1709702813475674e-06, |
| "loss": 1.1760236024856567, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.759124087591241, |
| "grad_norm": 0.380859375, |
| "learning_rate": 1.163158493105864e-06, |
| "loss": 1.2009891271591187, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.764963503649635, |
| "grad_norm": 0.65234375, |
| "learning_rate": 1.1555260719132474e-06, |
| "loss": 1.1685845851898193, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.770802919708029, |
| "grad_norm": 0.423828125, |
| "learning_rate": 1.1480733334383599e-06, |
| "loss": 1.2095947265625, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.7766423357664234, |
| "grad_norm": 0.357421875, |
| "learning_rate": 1.1408005859183595e-06, |
| "loss": 1.1101715564727783, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.7824817518248173, |
| "grad_norm": 0.69921875, |
| "learning_rate": 1.1337081301461775e-06, |
| "loss": 1.2515991926193237, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.7883211678832116, |
| "grad_norm": 1.9609375, |
| "learning_rate": 1.1267962594580712e-06, |
| "loss": 1.2288193702697754, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.794160583941606, |
| "grad_norm": 0.90234375, |
| "learning_rate": 1.1200652597214982e-06, |
| "loss": 1.1902008056640625, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.458984375, |
| "learning_rate": 1.1135154093232868e-06, |
| "loss": 1.1392861604690552, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.805839416058394, |
| "grad_norm": 0.625, |
| "learning_rate": 1.107146979158129e-06, |
| "loss": 1.1379271745681763, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.8116788321167885, |
| "grad_norm": 0.57421875, |
| "learning_rate": 1.100960232617371e-06, |
| "loss": 1.274213433265686, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.8175182481751824, |
| "grad_norm": 0.984375, |
| "learning_rate": 1.094955425578123e-06, |
| "loss": 1.2406063079833984, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.8233576642335767, |
| "grad_norm": 1.0859375, |
| "learning_rate": 1.0891328063926742e-06, |
| "loss": 1.1469717025756836, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.829197080291971, |
| "grad_norm": 0.3046875, |
| "learning_rate": 1.0834926158782235e-06, |
| "loss": 1.2098904848098755, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.835036496350365, |
| "grad_norm": 0.640625, |
| "learning_rate": 1.0780350873069184e-06, |
| "loss": 1.1746724843978882, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.8408759124087593, |
| "grad_norm": 0.37109375, |
| "learning_rate": 1.0727604463962058e-06, |
| "loss": 1.187951683998108, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.846715328467153, |
| "grad_norm": 0.546875, |
| "learning_rate": 1.0676689112994977e-06, |
| "loss": 1.1875951290130615, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.8525547445255475, |
| "grad_norm": 0.50390625, |
| "learning_rate": 1.06276069259715e-06, |
| "loss": 1.1863131523132324, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.8583941605839414, |
| "grad_norm": 0.3984375, |
| "learning_rate": 1.0580359932877516e-06, |
| "loss": 1.1864418983459473, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.8642335766423357, |
| "grad_norm": 0.66796875, |
| "learning_rate": 1.0534950087797282e-06, |
| "loss": 1.255091667175293, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.87007299270073, |
| "grad_norm": 0.357421875, |
| "learning_rate": 1.049137926883261e-06, |
| "loss": 1.1269922256469727, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.875912408759124, |
| "grad_norm": 0.64453125, |
| "learning_rate": 1.0449649278025208e-06, |
| "loss": 1.1925872564315796, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.8817518248175182, |
| "grad_norm": 0.3828125, |
| "learning_rate": 1.0409761841282112e-06, |
| "loss": 1.1946667432785034, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.8875912408759126, |
| "grad_norm": 0.408203125, |
| "learning_rate": 1.037171860830434e-06, |
| "loss": 1.141740083694458, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.8934306569343065, |
| "grad_norm": 0.4921875, |
| "learning_rate": 1.0335521152518637e-06, |
| "loss": 1.0821702480316162, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.899270072992701, |
| "grad_norm": 0.453125, |
| "learning_rate": 1.0301170971012418e-06, |
| "loss": 1.1304783821105957, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.905109489051095, |
| "grad_norm": 0.84765625, |
| "learning_rate": 1.0268669484471839e-06, |
| "loss": 1.1460580825805664, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.910948905109489, |
| "grad_norm": 0.58984375, |
| "learning_rate": 1.0238018037123042e-06, |
| "loss": 1.216837763786316, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.9167883211678833, |
| "grad_norm": 0.46484375, |
| "learning_rate": 1.020921789667656e-06, |
| "loss": 1.217172622680664, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.9226277372262772, |
| "grad_norm": 0.400390625, |
| "learning_rate": 1.0182270254274888e-06, |
| "loss": 1.2125324010849, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.9284671532846716, |
| "grad_norm": 0.83984375, |
| "learning_rate": 1.015717622444321e-06, |
| "loss": 1.1410317420959473, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.9343065693430654, |
| "grad_norm": 0.578125, |
| "learning_rate": 1.0133936845043322e-06, |
| "loss": 1.2082126140594482, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.9401459854014598, |
| "grad_norm": 0.6484375, |
| "learning_rate": 1.011255307723068e-06, |
| "loss": 1.2457588911056519, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.945985401459854, |
| "grad_norm": 1.140625, |
| "learning_rate": 1.0093025805414676e-06, |
| "loss": 1.1455327272415161, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.951824817518248, |
| "grad_norm": 0.5546875, |
| "learning_rate": 1.007535583722203e-06, |
| "loss": 1.1338199377059937, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.9576642335766423, |
| "grad_norm": 0.419921875, |
| "learning_rate": 1.0059543903463426e-06, |
| "loss": 1.2006813287734985, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.9635036496350367, |
| "grad_norm": 0.392578125, |
| "learning_rate": 1.004559065810324e-06, |
| "loss": 1.2222992181777954, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.9693430656934305, |
| "grad_norm": 0.349609375, |
| "learning_rate": 1.003349667823253e-06, |
| "loss": 1.1396572589874268, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.975182481751825, |
| "grad_norm": 0.4453125, |
| "learning_rate": 1.0023262464045155e-06, |
| "loss": 1.2639334201812744, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.981021897810219, |
| "grad_norm": 0.640625, |
| "learning_rate": 1.0014888438817083e-06, |
| "loss": 1.206993579864502, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.986861313868613, |
| "grad_norm": 0.58203125, |
| "learning_rate": 1.0008374948888896e-06, |
| "loss": 1.1673427820205688, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.9927007299270074, |
| "grad_norm": 0.39453125, |
| "learning_rate": 1.0003722263651458e-06, |
| "loss": 1.2105653285980225, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.9985401459854013, |
| "grad_norm": 0.78125, |
| "learning_rate": 1.0000930575534762e-06, |
| "loss": 1.1724438667297363, |
| "step": 1028 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 1029, |
| "total_flos": 2.909158325117518e+18, |
| "train_loss": 1.307906815331462, |
| "train_runtime": 13215.9511, |
| "train_samples_per_second": 2.488, |
| "train_steps_per_second": 0.078 |
| } |
| ], |
| "logging_steps": 2, |
| "max_steps": 1029, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 9999999, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.909158325117518e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|