| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9878213802435725, |
| "eval_steps": 500, |
| "global_step": 276, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.010825439783491205, |
| "grad_norm": 5.808782195509016, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 0.8566, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.02165087956698241, |
| "grad_norm": 5.813205553053294, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 0.8603, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03247631935047361, |
| "grad_norm": 5.437266634937425, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 0.8524, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.04330175913396482, |
| "grad_norm": 2.304459428183892, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 0.7759, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.05412719891745602, |
| "grad_norm": 3.927953214676238, |
| "learning_rate": 1.4285714285714287e-05, |
| "loss": 0.7544, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.06495263870094722, |
| "grad_norm": 4.210566313942864, |
| "learning_rate": 1.7142857142857142e-05, |
| "loss": 0.751, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.07577807848443843, |
| "grad_norm": 4.355428233675389, |
| "learning_rate": 2e-05, |
| "loss": 0.7344, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.08660351826792964, |
| "grad_norm": 2.9135046061222485, |
| "learning_rate": 2.2857142857142858e-05, |
| "loss": 0.7289, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.09742895805142084, |
| "grad_norm": 2.7435977316235043, |
| "learning_rate": 2.5714285714285718e-05, |
| "loss": 0.672, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.10825439783491204, |
| "grad_norm": 2.124176021674874, |
| "learning_rate": 2.8571428571428574e-05, |
| "loss": 0.6658, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.11907983761840325, |
| "grad_norm": 1.560974058082246, |
| "learning_rate": 3.142857142857143e-05, |
| "loss": 0.6378, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.12990527740189445, |
| "grad_norm": 1.356421259435607, |
| "learning_rate": 3.4285714285714284e-05, |
| "loss": 0.6303, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.14073071718538566, |
| "grad_norm": 1.5660206311423615, |
| "learning_rate": 3.714285714285715e-05, |
| "loss": 0.614, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.15155615696887687, |
| "grad_norm": 1.2014017650689, |
| "learning_rate": 4e-05, |
| "loss": 0.6243, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.16238159675236807, |
| "grad_norm": 1.4027226467953162, |
| "learning_rate": 4.2857142857142856e-05, |
| "loss": 0.6077, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.17320703653585928, |
| "grad_norm": 1.1683655505452553, |
| "learning_rate": 4.5714285714285716e-05, |
| "loss": 0.5975, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.18403247631935046, |
| "grad_norm": 1.835593785225974, |
| "learning_rate": 4.857142857142857e-05, |
| "loss": 0.596, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.19485791610284167, |
| "grad_norm": 1.1345154883647737, |
| "learning_rate": 5.1428571428571436e-05, |
| "loss": 0.5762, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.20568335588633288, |
| "grad_norm": 1.824887208898125, |
| "learning_rate": 5.4285714285714295e-05, |
| "loss": 0.572, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.2165087956698241, |
| "grad_norm": 1.6126198323929144, |
| "learning_rate": 5.714285714285715e-05, |
| "loss": 0.5762, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2273342354533153, |
| "grad_norm": 1.5918620480095331, |
| "learning_rate": 6.000000000000001e-05, |
| "loss": 0.5692, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2381596752368065, |
| "grad_norm": 1.4250264018215846, |
| "learning_rate": 6.285714285714286e-05, |
| "loss": 0.5693, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2489851150202977, |
| "grad_norm": 1.6782182483587103, |
| "learning_rate": 6.571428571428571e-05, |
| "loss": 0.5601, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.2598105548037889, |
| "grad_norm": 1.2505969451610879, |
| "learning_rate": 6.857142857142857e-05, |
| "loss": 0.5655, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.2706359945872801, |
| "grad_norm": 2.1118955155765673, |
| "learning_rate": 7.142857142857143e-05, |
| "loss": 0.5609, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.2814614343707713, |
| "grad_norm": 1.4121413273832522, |
| "learning_rate": 7.42857142857143e-05, |
| "loss": 0.5389, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2922868741542625, |
| "grad_norm": 2.0766991334479408, |
| "learning_rate": 7.714285714285715e-05, |
| "loss": 0.547, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.30311231393775373, |
| "grad_norm": 1.4556064554736243, |
| "learning_rate": 8e-05, |
| "loss": 0.5524, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.31393775372124494, |
| "grad_norm": 1.6077536748554087, |
| "learning_rate": 7.999679062421315e-05, |
| "loss": 0.5492, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.32476319350473615, |
| "grad_norm": 1.5101627731283567, |
| "learning_rate": 7.998716301185722e-05, |
| "loss": 0.5436, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.33558863328822736, |
| "grad_norm": 1.984676597011515, |
| "learning_rate": 7.997111870786354e-05, |
| "loss": 0.5451, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.34641407307171856, |
| "grad_norm": 1.21014550417124, |
| "learning_rate": 7.994866028684212e-05, |
| "loss": 0.5388, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.3572395128552097, |
| "grad_norm": 1.95150099944423, |
| "learning_rate": 7.991979135266861e-05, |
| "loss": 0.5414, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3680649526387009, |
| "grad_norm": 1.5787618301801385, |
| "learning_rate": 7.98845165379059e-05, |
| "loss": 0.5454, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.37889039242219213, |
| "grad_norm": 1.0690263609490216, |
| "learning_rate": 7.984284150306085e-05, |
| "loss": 0.5239, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.38971583220568334, |
| "grad_norm": 1.7977915032515326, |
| "learning_rate": 7.97947729356758e-05, |
| "loss": 0.5253, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.40054127198917455, |
| "grad_norm": 2.120705589307475, |
| "learning_rate": 7.974031854925562e-05, |
| "loss": 0.535, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.41136671177266576, |
| "grad_norm": 1.1488904564251323, |
| "learning_rate": 7.967948708202972e-05, |
| "loss": 0.5189, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.42219215155615697, |
| "grad_norm": 2.7953953760475363, |
| "learning_rate": 7.961228829555003e-05, |
| "loss": 0.5404, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.4330175913396482, |
| "grad_norm": 2.211003461793142, |
| "learning_rate": 7.953873297312447e-05, |
| "loss": 0.5388, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.4438430311231394, |
| "grad_norm": 1.6204765603156832, |
| "learning_rate": 7.945883291808655e-05, |
| "loss": 0.5234, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.4546684709066306, |
| "grad_norm": 1.560460098341002, |
| "learning_rate": 7.937260095190137e-05, |
| "loss": 0.5191, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.4654939106901218, |
| "grad_norm": 1.3057494076782257, |
| "learning_rate": 7.928005091210817e-05, |
| "loss": 0.514, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.476319350473613, |
| "grad_norm": 1.3681049413609492, |
| "learning_rate": 7.918119765009979e-05, |
| "loss": 0.5172, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.4871447902571042, |
| "grad_norm": 1.3087285372962363, |
| "learning_rate": 7.907605702873948e-05, |
| "loss": 0.5166, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.4979702300405954, |
| "grad_norm": 1.5892591530023001, |
| "learning_rate": 7.896464591981549e-05, |
| "loss": 0.5135, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5087956698240866, |
| "grad_norm": 0.9146670461290698, |
| "learning_rate": 7.884698220133357e-05, |
| "loss": 0.5093, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5196211096075778, |
| "grad_norm": 1.2871074236378093, |
| "learning_rate": 7.872308475464818e-05, |
| "loss": 0.5066, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.530446549391069, |
| "grad_norm": 1.187151202508197, |
| "learning_rate": 7.859297346143258e-05, |
| "loss": 0.5058, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.5412719891745602, |
| "grad_norm": 1.5765769924422004, |
| "learning_rate": 7.84566692004885e-05, |
| "loss": 0.5027, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.5520974289580515, |
| "grad_norm": 1.1216495666926116, |
| "learning_rate": 7.831419384439565e-05, |
| "loss": 0.4992, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.5629228687415426, |
| "grad_norm": 1.22301226354295, |
| "learning_rate": 7.816557025600196e-05, |
| "loss": 0.494, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.5737483085250338, |
| "grad_norm": 1.164455112281458, |
| "learning_rate": 7.80108222847547e-05, |
| "loss": 0.4938, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.584573748308525, |
| "grad_norm": 1.4650064133181142, |
| "learning_rate": 7.784997476287349e-05, |
| "loss": 0.5074, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.5953991880920162, |
| "grad_norm": 1.22737056565541, |
| "learning_rate": 7.76830535013654e-05, |
| "loss": 0.494, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.6062246278755075, |
| "grad_norm": 0.8673329147060569, |
| "learning_rate": 7.751008528588322e-05, |
| "loss": 0.4958, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.6170500676589986, |
| "grad_norm": 1.427111220332804, |
| "learning_rate": 7.733109787242708e-05, |
| "loss": 0.5, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.6278755074424899, |
| "grad_norm": 1.2086609324924107, |
| "learning_rate": 7.71461199828905e-05, |
| "loss": 0.4938, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.638700947225981, |
| "grad_norm": 1.2921503295566135, |
| "learning_rate": 7.695518130045147e-05, |
| "loss": 0.4841, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.6495263870094723, |
| "grad_norm": 1.2045601060877222, |
| "learning_rate": 7.675831246480923e-05, |
| "loss": 0.4928, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.6603518267929634, |
| "grad_norm": 1.2673683923210595, |
| "learning_rate": 7.655554506726747e-05, |
| "loss": 0.502, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.6711772665764547, |
| "grad_norm": 1.1256001884788949, |
| "learning_rate": 7.6346911645665e-05, |
| "loss": 0.4889, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.6820027063599459, |
| "grad_norm": 0.8830494632014936, |
| "learning_rate": 7.61324456791544e-05, |
| "loss": 0.4955, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.6928281461434371, |
| "grad_norm": 0.9599498090811002, |
| "learning_rate": 7.591218158282968e-05, |
| "loss": 0.491, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.7036535859269283, |
| "grad_norm": 1.1534591144602964, |
| "learning_rate": 7.568615470220369e-05, |
| "loss": 0.4882, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.7144790257104194, |
| "grad_norm": 1.5164318002685795, |
| "learning_rate": 7.545440130753634e-05, |
| "loss": 0.4939, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.7253044654939107, |
| "grad_norm": 0.6951797833665687, |
| "learning_rate": 7.52169585880143e-05, |
| "loss": 0.4885, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.7361299052774019, |
| "grad_norm": 1.2588621776669873, |
| "learning_rate": 7.497386464578329e-05, |
| "loss": 0.5013, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.7469553450608931, |
| "grad_norm": 1.4705912459647563, |
| "learning_rate": 7.472515848983394e-05, |
| "loss": 0.4907, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.7577807848443843, |
| "grad_norm": 0.6781089249417709, |
| "learning_rate": 7.447088002974199e-05, |
| "loss": 0.4737, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.7686062246278755, |
| "grad_norm": 1.3533306719892424, |
| "learning_rate": 7.421107006926408e-05, |
| "loss": 0.4795, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.7794316644113667, |
| "grad_norm": 1.0711976523099835, |
| "learning_rate": 7.394577029979004e-05, |
| "loss": 0.4878, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.790257104194858, |
| "grad_norm": 1.7828745135486515, |
| "learning_rate": 7.367502329365268e-05, |
| "loss": 0.4975, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.8010825439783491, |
| "grad_norm": 0.9406459074769725, |
| "learning_rate": 7.33988724972963e-05, |
| "loss": 0.4912, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.8119079837618404, |
| "grad_norm": 1.350705792808143, |
| "learning_rate": 7.311736222430487e-05, |
| "loss": 0.4909, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.8227334235453315, |
| "grad_norm": 0.7667816696092303, |
| "learning_rate": 7.283053764829106e-05, |
| "loss": 0.48, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.8335588633288228, |
| "grad_norm": 1.3313352979799213, |
| "learning_rate": 7.253844479564737e-05, |
| "loss": 0.4817, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.8443843031123139, |
| "grad_norm": 0.7190091824384284, |
| "learning_rate": 7.224113053816021e-05, |
| "loss": 0.4839, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.8552097428958051, |
| "grad_norm": 1.09462918815019, |
| "learning_rate": 7.193864258548855e-05, |
| "loss": 0.489, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.8660351826792964, |
| "grad_norm": 1.0916277655373101, |
| "learning_rate": 7.163102947750794e-05, |
| "loss": 0.483, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.8768606224627875, |
| "grad_norm": 1.390674863582184, |
| "learning_rate": 7.131834057652142e-05, |
| "loss": 0.4911, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.8876860622462788, |
| "grad_norm": 0.8966944634191916, |
| "learning_rate": 7.100062605933835e-05, |
| "loss": 0.48, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.8985115020297699, |
| "grad_norm": 0.8382478308469653, |
| "learning_rate": 7.067793690922268e-05, |
| "loss": 0.4811, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.9093369418132612, |
| "grad_norm": 0.8056638959260214, |
| "learning_rate": 7.035032490771165e-05, |
| "loss": 0.483, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.9201623815967523, |
| "grad_norm": 0.8261415511137054, |
| "learning_rate": 7.001784262630652e-05, |
| "loss": 0.478, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.9309878213802436, |
| "grad_norm": 0.8582439051300673, |
| "learning_rate": 6.968054341803644e-05, |
| "loss": 0.4837, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.9418132611637348, |
| "grad_norm": 1.1350401399820995, |
| "learning_rate": 6.933848140889705e-05, |
| "loss": 0.4803, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.952638700947226, |
| "grad_norm": 1.48619526541436, |
| "learning_rate": 6.89917114891648e-05, |
| "loss": 0.4687, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.9634641407307172, |
| "grad_norm": 0.742051335326601, |
| "learning_rate": 6.864028930458892e-05, |
| "loss": 0.4788, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.9742895805142084, |
| "grad_norm": 0.6782228718743897, |
| "learning_rate": 6.828427124746191e-05, |
| "loss": 0.4775, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.9851150202976996, |
| "grad_norm": 1.462261986108606, |
| "learning_rate": 6.792371444757037e-05, |
| "loss": 0.4848, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.9959404600811907, |
| "grad_norm": 1.1720362746872328, |
| "learning_rate": 6.755867676302747e-05, |
| "loss": 0.4811, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.006765899864682, |
| "grad_norm": 1.3982192683722436, |
| "learning_rate": 6.718921677098853e-05, |
| "loss": 0.7468, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.0175913396481733, |
| "grad_norm": 1.6335922067721735, |
| "learning_rate": 6.681539375825115e-05, |
| "loss": 0.4665, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.0284167794316643, |
| "grad_norm": 0.8967439830099186, |
| "learning_rate": 6.643726771174164e-05, |
| "loss": 0.4556, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.0392422192151556, |
| "grad_norm": 1.7895944548607083, |
| "learning_rate": 6.60548993088889e-05, |
| "loss": 0.4659, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.0500676589986468, |
| "grad_norm": 1.168440641440536, |
| "learning_rate": 6.56683499078876e-05, |
| "loss": 0.4487, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.060893098782138, |
| "grad_norm": 1.7145903444920905, |
| "learning_rate": 6.527768153785216e-05, |
| "loss": 0.4679, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.0717185385656292, |
| "grad_norm": 1.7706267980698807, |
| "learning_rate": 6.488295688886295e-05, |
| "loss": 0.4602, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.0825439783491204, |
| "grad_norm": 0.8279569864134981, |
| "learning_rate": 6.448423930190653e-05, |
| "loss": 0.451, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.0933694181326117, |
| "grad_norm": 1.4178241026761327, |
| "learning_rate": 6.408159275871132e-05, |
| "loss": 0.4524, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.104194857916103, |
| "grad_norm": 0.855662456588115, |
| "learning_rate": 6.36750818714807e-05, |
| "loss": 0.4501, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.115020297699594, |
| "grad_norm": 1.2774070048056905, |
| "learning_rate": 6.326477187252455e-05, |
| "loss": 0.4562, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.1258457374830853, |
| "grad_norm": 0.9647061679734499, |
| "learning_rate": 6.28507286037917e-05, |
| "loss": 0.4455, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.1366711772665765, |
| "grad_norm": 0.8152001814683185, |
| "learning_rate": 6.243301850630419e-05, |
| "loss": 0.4498, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.1474966170500678, |
| "grad_norm": 0.8148817931032433, |
| "learning_rate": 6.201170860949565e-05, |
| "loss": 0.4526, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.1583220568335588, |
| "grad_norm": 0.7054191999169396, |
| "learning_rate": 6.15868665204552e-05, |
| "loss": 0.4469, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.16914749661705, |
| "grad_norm": 0.7721709008206348, |
| "learning_rate": 6.11585604130785e-05, |
| "loss": 0.4414, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.1799729364005414, |
| "grad_norm": 0.7332249667079027, |
| "learning_rate": 6.072685901712808e-05, |
| "loss": 0.4468, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.1907983761840324, |
| "grad_norm": 0.7820399298347422, |
| "learning_rate": 6.02918316072043e-05, |
| "loss": 0.4519, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.2016238159675237, |
| "grad_norm": 0.6492150545629941, |
| "learning_rate": 5.9853547991628967e-05, |
| "loss": 0.4498, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.212449255751015, |
| "grad_norm": 0.6404997166391087, |
| "learning_rate": 5.941207850124325e-05, |
| "loss": 0.4433, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.2232746955345062, |
| "grad_norm": 0.573153411012465, |
| "learning_rate": 5.896749397812181e-05, |
| "loss": 0.4393, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.2341001353179972, |
| "grad_norm": 0.48292527952470066, |
| "learning_rate": 5.8519865764204834e-05, |
| "loss": 0.4463, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.2449255751014885, |
| "grad_norm": 0.5776286655791839, |
| "learning_rate": 5.8069265689849884e-05, |
| "loss": 0.4519, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.2557510148849798, |
| "grad_norm": 0.4552967048568162, |
| "learning_rate": 5.761576606230538e-05, |
| "loss": 0.4438, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.266576454668471, |
| "grad_norm": 0.4974927705723956, |
| "learning_rate": 5.7159439654107506e-05, |
| "loss": 0.4409, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.277401894451962, |
| "grad_norm": 0.4434973022939995, |
| "learning_rate": 5.6700359691402533e-05, |
| "loss": 0.441, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.2882273342354533, |
| "grad_norm": 0.45448947368164877, |
| "learning_rate": 5.6238599842196285e-05, |
| "loss": 0.4449, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.2990527740189446, |
| "grad_norm": 0.4481602723894844, |
| "learning_rate": 5.5774234204532746e-05, |
| "loss": 0.4368, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.3098782138024356, |
| "grad_norm": 0.4642402176711816, |
| "learning_rate": 5.5307337294603595e-05, |
| "loss": 0.4439, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.320703653585927, |
| "grad_norm": 0.38843059591219814, |
| "learning_rate": 5.483798403479072e-05, |
| "loss": 0.4395, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.3315290933694182, |
| "grad_norm": 0.37537289111217187, |
| "learning_rate": 5.436624974164349e-05, |
| "loss": 0.4426, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.3423545331529092, |
| "grad_norm": 0.33092526529989286, |
| "learning_rate": 5.389221011379281e-05, |
| "loss": 0.4397, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.3531799729364005, |
| "grad_norm": 0.3372237449147885, |
| "learning_rate": 5.3415941219803895e-05, |
| "loss": 0.44, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.3640054127198917, |
| "grad_norm": 0.33360582606351236, |
| "learning_rate": 5.2937519485969525e-05, |
| "loss": 0.4392, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.374830852503383, |
| "grad_norm": 0.32080810729314085, |
| "learning_rate": 5.245702168404616e-05, |
| "loss": 0.4321, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.3856562922868743, |
| "grad_norm": 0.33377674127634516, |
| "learning_rate": 5.1974524918934336e-05, |
| "loss": 0.4347, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.3964817320703653, |
| "grad_norm": 0.3265043304083045, |
| "learning_rate": 5.14901066163058e-05, |
| "loss": 0.4338, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.4073071718538566, |
| "grad_norm": 0.405023433971877, |
| "learning_rate": 5.1003844510179126e-05, |
| "loss": 0.4385, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.4181326116373478, |
| "grad_norm": 0.358827748620467, |
| "learning_rate": 5.0515816630445795e-05, |
| "loss": 0.4403, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.4289580514208389, |
| "grad_norm": 0.3971700589793911, |
| "learning_rate": 5.002610129034883e-05, |
| "loss": 0.4416, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.4397834912043301, |
| "grad_norm": 0.5549803120974646, |
| "learning_rate": 4.953477707391597e-05, |
| "loss": 0.4353, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.4506089309878214, |
| "grad_norm": 0.7247883056299059, |
| "learning_rate": 4.90419228233494e-05, |
| "loss": 0.4374, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.4614343707713127, |
| "grad_norm": 0.7920917923543059, |
| "learning_rate": 4.854761762637403e-05, |
| "loss": 0.4379, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.472259810554804, |
| "grad_norm": 0.8481877343768299, |
| "learning_rate": 4.805194080354641e-05, |
| "loss": 0.4371, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.483085250338295, |
| "grad_norm": 0.8347261592537676, |
| "learning_rate": 4.7554971895526175e-05, |
| "loss": 0.4338, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.4939106901217862, |
| "grad_norm": 0.7621817880915904, |
| "learning_rate": 4.705679065031235e-05, |
| "loss": 0.4402, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.5047361299052775, |
| "grad_norm": 0.5219445352980245, |
| "learning_rate": 4.6557477010446206e-05, |
| "loss": 0.4389, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.5155615696887685, |
| "grad_norm": 0.24046891519988917, |
| "learning_rate": 4.605711110018307e-05, |
| "loss": 0.4379, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.5263870094722598, |
| "grad_norm": 0.5809161771814877, |
| "learning_rate": 4.555577321263477e-05, |
| "loss": 0.4345, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.537212449255751, |
| "grad_norm": 0.7872874905802484, |
| "learning_rate": 4.505354379688518e-05, |
| "loss": 0.4355, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.548037889039242, |
| "grad_norm": 0.6611587307034023, |
| "learning_rate": 4.4550503445080606e-05, |
| "loss": 0.4416, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.5588633288227334, |
| "grad_norm": 0.35150127822824473, |
| "learning_rate": 4.4046732879497295e-05, |
| "loss": 0.4333, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.5696887686062246, |
| "grad_norm": 0.25191342410729517, |
| "learning_rate": 4.354231293958801e-05, |
| "loss": 0.4314, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.5805142083897157, |
| "grad_norm": 0.3964569105966751, |
| "learning_rate": 4.3037324569009854e-05, |
| "loss": 0.4315, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.5913396481732072, |
| "grad_norm": 0.4506273296490119, |
| "learning_rate": 4.2531848802635264e-05, |
| "loss": 0.4279, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.6021650879566982, |
| "grad_norm": 0.3792095172740697, |
| "learning_rate": 4.202596675354851e-05, |
| "loss": 0.4476, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.6129905277401895, |
| "grad_norm": 0.25609624572619266, |
| "learning_rate": 4.151975960002958e-05, |
| "loss": 0.4296, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.6238159675236807, |
| "grad_norm": 0.29780854410214036, |
| "learning_rate": 4.101330857252752e-05, |
| "loss": 0.4365, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.6346414073071718, |
| "grad_norm": 0.4435006947829412, |
| "learning_rate": 4.050669494062561e-05, |
| "loss": 0.437, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.645466847090663, |
| "grad_norm": 0.309041123839106, |
| "learning_rate": 4e-05, |
| "loss": 0.4305, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.6562922868741543, |
| "grad_norm": 0.2570236992212162, |
| "learning_rate": 3.9493305059374405e-05, |
| "loss": 0.4433, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.6671177266576453, |
| "grad_norm": 0.32770704547134333, |
| "learning_rate": 3.8986691427472496e-05, |
| "loss": 0.4356, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.6779431664411368, |
| "grad_norm": 0.3077253319134328, |
| "learning_rate": 3.8480240399970436e-05, |
| "loss": 0.4287, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.6887686062246279, |
| "grad_norm": 0.25299607930186807, |
| "learning_rate": 3.7974033246451496e-05, |
| "loss": 0.4368, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.699594046008119, |
| "grad_norm": 0.22312926404966466, |
| "learning_rate": 3.746815119736475e-05, |
| "loss": 0.4229, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.7104194857916104, |
| "grad_norm": 0.2417526738867689, |
| "learning_rate": 3.696267543099016e-05, |
| "loss": 0.4342, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.7212449255751014, |
| "grad_norm": 0.302965747482617, |
| "learning_rate": 3.6457687060412e-05, |
| "loss": 0.4408, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.7320703653585927, |
| "grad_norm": 0.20920899613389016, |
| "learning_rate": 3.595326712050272e-05, |
| "loss": 0.4347, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.742895805142084, |
| "grad_norm": 0.25748536345096484, |
| "learning_rate": 3.5449496554919414e-05, |
| "loss": 0.4316, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.753721244925575, |
| "grad_norm": 0.2442961455059911, |
| "learning_rate": 3.494645620311484e-05, |
| "loss": 0.4369, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.7645466847090663, |
| "grad_norm": 0.2193742738754088, |
| "learning_rate": 3.444422678736525e-05, |
| "loss": 0.4307, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.7753721244925575, |
| "grad_norm": 0.17715305671292078, |
| "learning_rate": 3.394288889981695e-05, |
| "loss": 0.4281, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.7861975642760486, |
| "grad_norm": 0.24502275884699662, |
| "learning_rate": 3.34425229895538e-05, |
| "loss": 0.4355, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.79702300405954, |
| "grad_norm": 0.2192086541292272, |
| "learning_rate": 3.294320934968768e-05, |
| "loss": 0.4282, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.8078484438430311, |
| "grad_norm": 0.21694854030399527, |
| "learning_rate": 3.2445028104473845e-05, |
| "loss": 0.4338, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.8186738836265224, |
| "grad_norm": 0.2255599010213592, |
| "learning_rate": 3.194805919645359e-05, |
| "loss": 0.4306, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.8294993234100136, |
| "grad_norm": 0.16169324703842375, |
| "learning_rate": 3.145238237362596e-05, |
| "loss": 0.4386, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.8403247631935047, |
| "grad_norm": 0.19307322773361396, |
| "learning_rate": 3.0958077176650606e-05, |
| "loss": 0.4296, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.851150202976996, |
| "grad_norm": 0.193815929380636, |
| "learning_rate": 3.0465222926084036e-05, |
| "loss": 0.4258, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.8619756427604872, |
| "grad_norm": 0.17043868718230246, |
| "learning_rate": 2.997389870965118e-05, |
| "loss": 0.427, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.8728010825439783, |
| "grad_norm": 0.23106100080622996, |
| "learning_rate": 2.948418336955421e-05, |
| "loss": 0.4209, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.8836265223274695, |
| "grad_norm": 0.19492435702475458, |
| "learning_rate": 2.899615548982088e-05, |
| "loss": 0.4335, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.8944519621109608, |
| "grad_norm": 0.18373241681022587, |
| "learning_rate": 2.8509893383694213e-05, |
| "loss": 0.4273, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.9052774018944518, |
| "grad_norm": 0.21935000179277805, |
| "learning_rate": 2.8025475081065684e-05, |
| "loss": 0.4226, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.9161028416779433, |
| "grad_norm": 0.2081378352848529, |
| "learning_rate": 2.754297831595385e-05, |
| "loss": 0.424, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.9269282814614344, |
| "grad_norm": 0.2191334718271232, |
| "learning_rate": 2.7062480514030478e-05, |
| "loss": 0.4339, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.9377537212449256, |
| "grad_norm": 0.24225915258539527, |
| "learning_rate": 2.658405878019612e-05, |
| "loss": 0.4279, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.9485791610284169, |
| "grad_norm": 0.18137087946413424, |
| "learning_rate": 2.6107789886207195e-05, |
| "loss": 0.4306, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.959404600811908, |
| "grad_norm": 0.19279540790814204, |
| "learning_rate": 2.563375025835652e-05, |
| "loss": 0.4288, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.9702300405953992, |
| "grad_norm": 0.18225770391566753, |
| "learning_rate": 2.5162015965209295e-05, |
| "loss": 0.4257, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.9810554803788905, |
| "grad_norm": 0.1567588227927359, |
| "learning_rate": 2.4692662705396412e-05, |
| "loss": 0.4332, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.9918809201623815, |
| "grad_norm": 0.18452714103356302, |
| "learning_rate": 2.4225765795467267e-05, |
| "loss": 0.434, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.002706359945873, |
| "grad_norm": 0.3038807377018833, |
| "learning_rate": 2.376140015780372e-05, |
| "loss": 0.6821, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.013531799729364, |
| "grad_norm": 0.28735864762072005, |
| "learning_rate": 2.3299640308597487e-05, |
| "loss": 0.4037, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.024357239512855, |
| "grad_norm": 0.18652870105936184, |
| "learning_rate": 2.2840560345892518e-05, |
| "loss": 0.4023, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.0351826792963466, |
| "grad_norm": 0.26262420244447227, |
| "learning_rate": 2.2384233937694626e-05, |
| "loss": 0.4028, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.0460081190798376, |
| "grad_norm": 0.21554665169315776, |
| "learning_rate": 2.1930734310150116e-05, |
| "loss": 0.3969, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.0568335588633286, |
| "grad_norm": 0.2365327578302819, |
| "learning_rate": 2.1480134235795173e-05, |
| "loss": 0.4016, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.06765899864682, |
| "grad_norm": 0.18757983371061362, |
| "learning_rate": 2.10325060218782e-05, |
| "loss": 0.4064, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.078484438430311, |
| "grad_norm": 0.26228528185975086, |
| "learning_rate": 2.0587921498756768e-05, |
| "loss": 0.4103, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.089309878213802, |
| "grad_norm": 0.21513017222473524, |
| "learning_rate": 2.014645200837105e-05, |
| "loss": 0.403, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.1001353179972937, |
| "grad_norm": 0.21886351501773119, |
| "learning_rate": 1.9708168392795718e-05, |
| "loss": 0.3949, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.1109607577807847, |
| "grad_norm": 0.21693742682509493, |
| "learning_rate": 1.9273140982871936e-05, |
| "loss": 0.4042, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.121786197564276, |
| "grad_norm": 0.23546290407137777, |
| "learning_rate": 1.8841439586921515e-05, |
| "loss": 0.4029, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.1326116373477673, |
| "grad_norm": 0.21178864909912662, |
| "learning_rate": 1.841313347954482e-05, |
| "loss": 0.398, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.1434370771312583, |
| "grad_norm": 0.18726986326173004, |
| "learning_rate": 1.7988291390504348e-05, |
| "loss": 0.3971, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.15426251691475, |
| "grad_norm": 0.18271534224017974, |
| "learning_rate": 1.7566981493695828e-05, |
| "loss": 0.404, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.165087956698241, |
| "grad_norm": 0.2147844040709664, |
| "learning_rate": 1.71492713962083e-05, |
| "loss": 0.4065, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.175913396481732, |
| "grad_norm": 0.13272282917253114, |
| "learning_rate": 1.673522812747544e-05, |
| "loss": 0.4019, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.1867388362652234, |
| "grad_norm": 0.1639997741865366, |
| "learning_rate": 1.6324918128519306e-05, |
| "loss": 0.3989, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.1975642760487144, |
| "grad_norm": 0.17720118189615242, |
| "learning_rate": 1.5918407241288678e-05, |
| "loss": 0.4055, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.208389715832206, |
| "grad_norm": 0.138122932939748, |
| "learning_rate": 1.5515760698093485e-05, |
| "loss": 0.4039, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.219215155615697, |
| "grad_norm": 0.17125985703791913, |
| "learning_rate": 1.511704311113705e-05, |
| "loss": 0.4016, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.230040595399188, |
| "grad_norm": 0.1587790419110655, |
| "learning_rate": 1.4722318462147844e-05, |
| "loss": 0.4021, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.2408660351826795, |
| "grad_norm": 0.15781237529891373, |
| "learning_rate": 1.4331650092112406e-05, |
| "loss": 0.4065, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.2516914749661705, |
| "grad_norm": 0.15382743811108052, |
| "learning_rate": 1.394510069111112e-05, |
| "loss": 0.3967, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.2625169147496615, |
| "grad_norm": 0.16072099415521288, |
| "learning_rate": 1.3562732288258377e-05, |
| "loss": 0.4022, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.273342354533153, |
| "grad_norm": 0.1263113164901036, |
| "learning_rate": 1.3184606241748857e-05, |
| "loss": 0.3976, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.284167794316644, |
| "grad_norm": 0.15013101349012392, |
| "learning_rate": 1.2810783229011486e-05, |
| "loss": 0.3988, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.2949932341001356, |
| "grad_norm": 0.14736474180918321, |
| "learning_rate": 1.2441323236972536e-05, |
| "loss": 0.3994, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.3058186738836266, |
| "grad_norm": 0.13153103120475906, |
| "learning_rate": 1.2076285552429642e-05, |
| "loss": 0.4068, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.3166441136671176, |
| "grad_norm": 0.13114498339206726, |
| "learning_rate": 1.1715728752538103e-05, |
| "loss": 0.4013, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.3274695534506087, |
| "grad_norm": 0.13515923306986954, |
| "learning_rate": 1.1359710695411086e-05, |
| "loss": 0.4023, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.3382949932341, |
| "grad_norm": 0.13810939452372453, |
| "learning_rate": 1.100828851083521e-05, |
| "loss": 0.408, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.349120433017591, |
| "grad_norm": 0.13827451287992423, |
| "learning_rate": 1.0661518591102973e-05, |
| "loss": 0.4015, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.3599458728010827, |
| "grad_norm": 0.13316022171250266, |
| "learning_rate": 1.0319456581963578e-05, |
| "loss": 0.4094, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.3707713125845737, |
| "grad_norm": 0.13455083798097392, |
| "learning_rate": 9.982157373693502e-06, |
| "loss": 0.4064, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.381596752368065, |
| "grad_norm": 0.13687639487286918, |
| "learning_rate": 9.649675092288366e-06, |
| "loss": 0.3935, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.3924221921515563, |
| "grad_norm": 0.13576731970561884, |
| "learning_rate": 9.322063090777331e-06, |
| "loss": 0.4013, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.4032476319350473, |
| "grad_norm": 0.12695332075481375, |
| "learning_rate": 8.99937394066165e-06, |
| "loss": 0.4074, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.414073071718539, |
| "grad_norm": 0.12992836433950913, |
| "learning_rate": 8.681659423478587e-06, |
| "loss": 0.4068, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.42489851150203, |
| "grad_norm": 0.13614933415740577, |
| "learning_rate": 8.368970522492064e-06, |
| "loss": 0.4038, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.435723951285521, |
| "grad_norm": 0.1299611166958906, |
| "learning_rate": 8.06135741451146e-06, |
| "loss": 0.4018, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.4465493910690124, |
| "grad_norm": 0.12922732037406734, |
| "learning_rate": 7.758869461839808e-06, |
| "loss": 0.4049, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.4573748308525034, |
| "grad_norm": 0.11409530501821215, |
| "learning_rate": 7.461555204352655e-06, |
| "loss": 0.3983, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.4682002706359945, |
| "grad_norm": 0.13109519380435983, |
| "learning_rate": 7.169462351708958e-06, |
| "loss": 0.4004, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.479025710419486, |
| "grad_norm": 0.1096916737922998, |
| "learning_rate": 6.882637775695147e-06, |
| "loss": 0.3968, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.489851150202977, |
| "grad_norm": 0.10726936604984846, |
| "learning_rate": 6.60112750270371e-06, |
| "loss": 0.3982, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.500676589986468, |
| "grad_norm": 0.11479168890681679, |
| "learning_rate": 6.324976706347317e-06, |
| "loss": 0.4073, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.5115020297699595, |
| "grad_norm": 0.10858787032052403, |
| "learning_rate": 6.054229700209959e-06, |
| "loss": 0.4048, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.5223274695534506, |
| "grad_norm": 0.10800689312775234, |
| "learning_rate": 5.788929930735916e-06, |
| "loss": 0.3997, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.533152909336942, |
| "grad_norm": 0.1015329347217822, |
| "learning_rate": 5.529119970258014e-06, |
| "loss": 0.4028, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.543978349120433, |
| "grad_norm": 0.10284628660982224, |
| "learning_rate": 5.274841510166062e-06, |
| "loss": 0.3953, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.554803788903924, |
| "grad_norm": 0.1011980332411963, |
| "learning_rate": 5.026135354216717e-06, |
| "loss": 0.3948, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.565629228687415, |
| "grad_norm": 0.09800734193393426, |
| "learning_rate": 4.783041411985716e-06, |
| "loss": 0.4021, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.5764546684709067, |
| "grad_norm": 0.09700053551886881, |
| "learning_rate": 4.545598692463675e-06, |
| "loss": 0.4007, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.5872801082543977, |
| "grad_norm": 0.10037984607100636, |
| "learning_rate": 4.3138452977963266e-06, |
| "loss": 0.3982, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.598105548037889, |
| "grad_norm": 0.1014350319561999, |
| "learning_rate": 4.087818417170337e-06, |
| "loss": 0.4053, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.60893098782138, |
| "grad_norm": 0.09850776364380359, |
| "learning_rate": 3.867554320845601e-06, |
| "loss": 0.4079, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.6197564276048713, |
| "grad_norm": 0.09833019554471717, |
| "learning_rate": 3.6530883543350038e-06, |
| "loss": 0.4072, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.6305818673883627, |
| "grad_norm": 0.09477548233477305, |
| "learning_rate": 3.4444549327325325e-06, |
| "loss": 0.3981, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.641407307171854, |
| "grad_norm": 0.10863775134112984, |
| "learning_rate": 3.241687535190776e-06, |
| "loss": 0.3965, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.6522327469553453, |
| "grad_norm": 0.1059231743088096, |
| "learning_rate": 3.0448186995485307e-06, |
| "loss": 0.3973, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.6630581867388363, |
| "grad_norm": 0.0984891746140367, |
| "learning_rate": 2.853880017109516e-06, |
| "loss": 0.3981, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.6738836265223274, |
| "grad_norm": 0.09601668207701894, |
| "learning_rate": 2.6689021275729366e-06, |
| "loss": 0.4039, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.6847090663058184, |
| "grad_norm": 0.10112232940930609, |
| "learning_rate": 2.489914714116788e-06, |
| "loss": 0.3992, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.69553450608931, |
| "grad_norm": 0.10142024172351762, |
| "learning_rate": 2.316946498634605e-06, |
| "loss": 0.3952, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.706359945872801, |
| "grad_norm": 0.0969754986462005, |
| "learning_rate": 2.1500252371265253e-06, |
| "loss": 0.4065, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.7171853856562924, |
| "grad_norm": 0.09689017109227589, |
| "learning_rate": 1.989177715245307e-06, |
| "loss": 0.4023, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.7280108254397835, |
| "grad_norm": 0.08996806789637721, |
| "learning_rate": 1.8344297439980475e-06, |
| "loss": 0.4063, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.7388362652232745, |
| "grad_norm": 0.09222182196227491, |
| "learning_rate": 1.685806155604346e-06, |
| "loss": 0.4049, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.749661705006766, |
| "grad_norm": 0.09210760604624076, |
| "learning_rate": 1.5433307995115043e-06, |
| "loss": 0.399, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.760487144790257, |
| "grad_norm": 0.08811433949347404, |
| "learning_rate": 1.4070265385674176e-06, |
| "loss": 0.3946, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.7713125845737485, |
| "grad_norm": 0.09107211149204227, |
| "learning_rate": 1.276915245351833e-06, |
| "loss": 0.3933, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.7821380243572396, |
| "grad_norm": 0.08673352921830904, |
| "learning_rate": 1.1530177986664425e-06, |
| "loss": 0.4071, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.7929634641407306, |
| "grad_norm": 0.08666404171970574, |
| "learning_rate": 1.0353540801845229e-06, |
| "loss": 0.3998, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.803788903924222, |
| "grad_norm": 0.089018119071375, |
| "learning_rate": 9.239429712605274e-07, |
| "loss": 0.3985, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.814614343707713, |
| "grad_norm": 0.08297666448416004, |
| "learning_rate": 8.188023499002206e-07, |
| "loss": 0.4038, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.825439783491204, |
| "grad_norm": 0.08536059982474015, |
| "learning_rate": 7.199490878918314e-07, |
| "loss": 0.3938, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.8362652232746957, |
| "grad_norm": 0.08291161380222195, |
| "learning_rate": 6.273990480986314e-07, |
| "loss": 0.399, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.8470906630581867, |
| "grad_norm": 0.08470090318649044, |
| "learning_rate": 5.411670819134651e-07, |
| "loss": 0.4034, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.8579161028416777, |
| "grad_norm": 0.08630391724430683, |
| "learning_rate": 4.6126702687554483e-07, |
| "loss": 0.3989, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.8687415426251692, |
| "grad_norm": 0.08359943242372093, |
| "learning_rate": 3.8771170444996895e-07, |
| "loss": 0.4029, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.8795669824086603, |
| "grad_norm": 0.08397753527566773, |
| "learning_rate": 3.2051291797027925e-07, |
| "loss": 0.4017, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.8903924221921518, |
| "grad_norm": 0.08527296501600641, |
| "learning_rate": 2.59681450744389e-07, |
| "loss": 0.397, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.901217861975643, |
| "grad_norm": 0.08082037771426254, |
| "learning_rate": 2.0522706432419382e-07, |
| "loss": 0.3986, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.912043301759134, |
| "grad_norm": 0.08133863221177037, |
| "learning_rate": 1.5715849693916264e-07, |
| "loss": 0.4004, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.9228687415426253, |
| "grad_norm": 0.08213439600809583, |
| "learning_rate": 1.1548346209410366e-07, |
| "loss": 0.4017, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.9336941813261164, |
| "grad_norm": 0.0840670086317821, |
| "learning_rate": 8.020864733140343e-08, |
| "loss": 0.3987, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.944519621109608, |
| "grad_norm": 0.08197353679069497, |
| "learning_rate": 5.133971315788966e-08, |
| "loss": 0.4047, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.955345060893099, |
| "grad_norm": 0.08476922218479478, |
| "learning_rate": 2.8881292136468952e-08, |
| "loss": 0.4019, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.96617050067659, |
| "grad_norm": 0.08609409784588085, |
| "learning_rate": 1.2836988142779228e-08, |
| "loss": 0.4006, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.976995940460081, |
| "grad_norm": 0.0821345909006518, |
| "learning_rate": 3.209375786856761e-09, |
| "loss": 0.4089, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.9878213802435725, |
| "grad_norm": 0.07978409748230415, |
| "learning_rate": 0.0, |
| "loss": 0.3962, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.9878213802435725, |
| "step": 276, |
| "total_flos": 7.10230108051551e+18, |
| "train_loss": 0.464179332813491, |
| "train_runtime": 60729.3117, |
| "train_samples_per_second": 2.336, |
| "train_steps_per_second": 0.005 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 276, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7.10230108051551e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|