| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 690, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008695652173913044, |
| "grad_norm": 0.732024610042572, |
| "learning_rate": 8.571428571428571e-07, |
| "loss": 2.46012020111084, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.017391304347826087, |
| "grad_norm": 1.1184877157211304, |
| "learning_rate": 2.5714285714285716e-06, |
| "loss": 2.301504135131836, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02608695652173913, |
| "grad_norm": 3.1666553020477295, |
| "learning_rate": 4.2857142857142855e-06, |
| "loss": 2.9570844173431396, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.034782608695652174, |
| "grad_norm": 0.25166502594947815, |
| "learning_rate": 6e-06, |
| "loss": 1.8961513042449951, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.043478260869565216, |
| "grad_norm": 0.20189805328845978, |
| "learning_rate": 7.714285714285714e-06, |
| "loss": 3.0871455669403076, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05217391304347826, |
| "grad_norm": 0.20353659987449646, |
| "learning_rate": 9.428571428571428e-06, |
| "loss": 2.292584180831909, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06086956521739131, |
| "grad_norm": 0.6789005994796753, |
| "learning_rate": 1.1142857142857143e-05, |
| "loss": 2.5501410961151123, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06956521739130435, |
| "grad_norm": 0.2317086160182953, |
| "learning_rate": 1.2857142857142857e-05, |
| "loss": 1.931159257888794, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.0782608695652174, |
| "grad_norm": 1.7764248847961426, |
| "learning_rate": 1.4571428571428571e-05, |
| "loss": 2.571274518966675, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.08695652173913043, |
| "grad_norm": 0.4352605640888214, |
| "learning_rate": 1.6285714285714283e-05, |
| "loss": 1.9101747274398804, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09565217391304348, |
| "grad_norm": 0.18403619527816772, |
| "learning_rate": 1.8e-05, |
| "loss": 1.8424170017242432, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.10434782608695652, |
| "grad_norm": 0.16127634048461914, |
| "learning_rate": 1.9714285714285714e-05, |
| "loss": 5.551755905151367, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.11304347826086956, |
| "grad_norm": 0.1956806778907776, |
| "learning_rate": 2.1428571428571428e-05, |
| "loss": 1.828694224357605, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.12173913043478261, |
| "grad_norm": 2.0333733558654785, |
| "learning_rate": 2.3142857142857145e-05, |
| "loss": 2.4214303493499756, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.13043478260869565, |
| "grad_norm": 0.24648341536521912, |
| "learning_rate": 2.485714285714286e-05, |
| "loss": 3.56320858001709, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.1391304347826087, |
| "grad_norm": 0.17006301879882812, |
| "learning_rate": 2.657142857142857e-05, |
| "loss": 1.7686303853988647, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.14782608695652175, |
| "grad_norm": 1.2820721864700317, |
| "learning_rate": 2.8285714285714287e-05, |
| "loss": 1.9711768627166748, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1565217391304348, |
| "grad_norm": 1.2253106832504272, |
| "learning_rate": 3e-05, |
| "loss": 1.890865683555603, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.16521739130434782, |
| "grad_norm": 0.15150298178195953, |
| "learning_rate": 2.9994720778760907e-05, |
| "loss": 1.682741641998291, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.17391304347826086, |
| "grad_norm": 0.14206662774085999, |
| "learning_rate": 2.9978887486836082e-05, |
| "loss": 1.8588255643844604, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1826086956521739, |
| "grad_norm": 0.18157699704170227, |
| "learning_rate": 2.9952513235982553e-05, |
| "loss": 1.7579265832901, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.19130434782608696, |
| "grad_norm": 0.13783733546733856, |
| "learning_rate": 2.991561986706388e-05, |
| "loss": 3.504922389984131, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.4945126473903656, |
| "learning_rate": 2.9868237931963492e-05, |
| "loss": 1.5656976699829102, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.20869565217391303, |
| "grad_norm": 1.3513635396957397, |
| "learning_rate": 2.9810406668284216e-05, |
| "loss": 1.2987524271011353, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.21739130434782608, |
| "grad_norm": 0.3287999629974365, |
| "learning_rate": 2.9742173966855167e-05, |
| "loss": 1.316739797592163, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.22608695652173913, |
| "grad_norm": 0.6960898041725159, |
| "learning_rate": 2.9663596332072664e-05, |
| "loss": 1.020365595817566, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.23478260869565218, |
| "grad_norm": 0.3621000051498413, |
| "learning_rate": 2.9574738835108235e-05, |
| "loss": 0.9612956047058105, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.24347826086956523, |
| "grad_norm": 0.36931225657463074, |
| "learning_rate": 2.94756750600223e-05, |
| "loss": 1.4254815578460693, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.25217391304347825, |
| "grad_norm": 0.11330584436655045, |
| "learning_rate": 2.9366487042828275e-05, |
| "loss": 1.4714879989624023, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2608695652173913, |
| "grad_norm": 0.13919749855995178, |
| "learning_rate": 2.9247265203557428e-05, |
| "loss": 1.8500466346740723, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.26956521739130435, |
| "grad_norm": 1.1177152395248413, |
| "learning_rate": 2.9118108271380923e-05, |
| "loss": 1.0420849323272705, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.2782608695652174, |
| "grad_norm": 0.14357954263687134, |
| "learning_rate": 2.8979123202850823e-05, |
| "loss": 1.2155001163482666, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.28695652173913044, |
| "grad_norm": 0.29200610518455505, |
| "learning_rate": 2.8830425093328024e-05, |
| "loss": 0.5274585485458374, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.2956521739130435, |
| "grad_norm": 0.14065444469451904, |
| "learning_rate": 2.8672137081670264e-05, |
| "loss": 0.9751191735267639, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.30434782608695654, |
| "grad_norm": 1.254546046257019, |
| "learning_rate": 2.850439024825919e-05, |
| "loss": 0.45779138803482056, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3130434782608696, |
| "grad_norm": 2.3761823177337646, |
| "learning_rate": 2.8327323506450993e-05, |
| "loss": 1.0098928213119507, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.3217391304347826, |
| "grad_norm": 0.19490118324756622, |
| "learning_rate": 2.8141083487540432e-05, |
| "loss": 1.4895820617675781, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.33043478260869563, |
| "grad_norm": 1.0330501794815063, |
| "learning_rate": 2.794582441933354e-05, |
| "loss": 0.339144766330719, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3391304347826087, |
| "grad_norm": 0.17919474840164185, |
| "learning_rate": 2.7741707998429524e-05, |
| "loss": 1.4342169761657715, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.34782608695652173, |
| "grad_norm": 0.189740389585495, |
| "learning_rate": 2.7528903256317714e-05, |
| "loss": 1.676353931427002, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.3565217391304348, |
| "grad_norm": 0.152068629860878, |
| "learning_rate": 2.7307586419400365e-05, |
| "loss": 1.4120248556137085, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3652173913043478, |
| "grad_norm": 0.11617185920476913, |
| "learning_rate": 2.707794076305724e-05, |
| "loss": 1.4109995365142822, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.3739130434782609, |
| "grad_norm": 0.5076650977134705, |
| "learning_rate": 2.6840156459872887e-05, |
| "loss": 1.8156630992889404, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.3826086956521739, |
| "grad_norm": 0.3797202408313751, |
| "learning_rate": 2.6594430422152184e-05, |
| "loss": 1.2095890045166016, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.391304347826087, |
| "grad_norm": 0.1064012348651886, |
| "learning_rate": 2.6340966138854725e-05, |
| "loss": 1.3283032178878784, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.2617345452308655, |
| "learning_rate": 2.607997350708289e-05, |
| "loss": 0.3713761866092682, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.40869565217391307, |
| "grad_norm": 0.14992359280586243, |
| "learning_rate": 2.5811668658263345e-05, |
| "loss": 1.2495781183242798, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.41739130434782606, |
| "grad_norm": 0.8707973957061768, |
| "learning_rate": 2.5536273779165707e-05, |
| "loss": 0.892501950263977, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.4260869565217391, |
| "grad_norm": 0.18348021805286407, |
| "learning_rate": 2.5254016927906833e-05, |
| "loss": 0.99811190366745, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.43478260869565216, |
| "grad_norm": 0.28490695357322693, |
| "learning_rate": 2.4965131845092846e-05, |
| "loss": 0.9439688324928284, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4434782608695652, |
| "grad_norm": 0.19018062949180603, |
| "learning_rate": 2.466985776025548e-05, |
| "loss": 1.434984803199768, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.45217391304347826, |
| "grad_norm": 0.3262655436992645, |
| "learning_rate": 2.4368439193742983e-05, |
| "loss": 1.6902875900268555, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4608695652173913, |
| "grad_norm": 0.1134955883026123, |
| "learning_rate": 2.4061125754229576e-05, |
| "loss": 0.3920702636241913, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.46956521739130436, |
| "grad_norm": 0.15943671762943268, |
| "learning_rate": 2.3748171932011248e-05, |
| "loss": 1.1952660083770752, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.4782608695652174, |
| "grad_norm": 0.17980021238327026, |
| "learning_rate": 2.3429836888259e-05, |
| "loss": 1.0627365112304688, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.48695652173913045, |
| "grad_norm": 0.20388175547122955, |
| "learning_rate": 2.310638424040406e-05, |
| "loss": 1.4079805612564087, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.4956521739130435, |
| "grad_norm": 0.0773826614022255, |
| "learning_rate": 2.2778081843832856e-05, |
| "loss": 0.8712098002433777, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5043478260869565, |
| "grad_norm": 0.1742291897535324, |
| "learning_rate": 2.2445201570072414e-05, |
| "loss": 1.3538367748260498, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5130434782608696, |
| "grad_norm": 0.15458093583583832, |
| "learning_rate": 2.2108019081649993e-05, |
| "loss": 1.612799048423767, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.5217391304347826, |
| "grad_norm": 0.5128660202026367, |
| "learning_rate": 2.1766813603813334e-05, |
| "loss": 0.8653756380081177, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.5304347826086957, |
| "grad_norm": 0.6170827150344849, |
| "learning_rate": 2.1421867693300552e-05, |
| "loss": 0.7663818597793579, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.5391304347826087, |
| "grad_norm": 0.17489704489707947, |
| "learning_rate": 2.1073467004351202e-05, |
| "loss": 0.7496420741081238, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5478260869565217, |
| "grad_norm": 0.7850053906440735, |
| "learning_rate": 2.0721900052152227e-05, |
| "loss": 0.7726188898086548, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5565217391304348, |
| "grad_norm": 0.09144008904695511, |
| "learning_rate": 2.036745797391477e-05, |
| "loss": 0.7957323789596558, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5652173913043478, |
| "grad_norm": 0.11362311989068985, |
| "learning_rate": 2.0010434287779607e-05, |
| "loss": 1.3306431770324707, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5739130434782609, |
| "grad_norm": 0.13353990018367767, |
| "learning_rate": 1.965112464975093e-05, |
| "loss": 1.3570564985275269, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5826086956521739, |
| "grad_norm": 0.16489805281162262, |
| "learning_rate": 1.928982660885971e-05, |
| "loss": 1.3512859344482422, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.591304347826087, |
| "grad_norm": 0.12622074782848358, |
| "learning_rate": 1.8926839360759467e-05, |
| "loss": 1.3253053426742554, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.18052835762500763, |
| "learning_rate": 1.856246349995838e-05, |
| "loss": 1.363409161567688, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.6086956521739131, |
| "grad_norm": 1.0225669145584106, |
| "learning_rate": 1.8197000770893056e-05, |
| "loss": 0.9811846613883972, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.6173913043478261, |
| "grad_norm": 0.12899279594421387, |
| "learning_rate": 1.7830753818049987e-05, |
| "loss": 1.3022255897521973, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.6260869565217392, |
| "grad_norm": 0.15879316627979279, |
| "learning_rate": 1.7464025935341674e-05, |
| "loss": 0.7987528443336487, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.6347826086956522, |
| "grad_norm": 0.15264292061328888, |
| "learning_rate": 1.709712081494495e-05, |
| "loss": 0.790221095085144, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.6434782608695652, |
| "grad_norm": 0.24214515089988708, |
| "learning_rate": 1.673034229580953e-05, |
| "loss": 0.8017470240592957, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6521739130434783, |
| "grad_norm": 0.6817665696144104, |
| "learning_rate": 1.6363994112044934e-05, |
| "loss": 0.8057210445404053, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6608695652173913, |
| "grad_norm": 0.2915583848953247, |
| "learning_rate": 1.5998379641394322e-05, |
| "loss": 0.8728779554367065, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6695652173913044, |
| "grad_norm": 0.22204452753067017, |
| "learning_rate": 1.5633801654003383e-05, |
| "loss": 1.3450850248336792, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6782608695652174, |
| "grad_norm": 0.13754141330718994, |
| "learning_rate": 1.5270562061692413e-05, |
| "loss": 1.295073390007019, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6869565217391305, |
| "grad_norm": 0.1301906257867813, |
| "learning_rate": 1.4908961667939148e-05, |
| "loss": 1.3460811376571655, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6956521739130435, |
| "grad_norm": 0.2238963395357132, |
| "learning_rate": 1.4549299918779476e-05, |
| "loss": 1.4722148180007935, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.7043478260869566, |
| "grad_norm": 0.1082434207201004, |
| "learning_rate": 1.4191874654832224e-05, |
| "loss": 1.301584243774414, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.7130434782608696, |
| "grad_norm": 0.29728466272354126, |
| "learning_rate": 1.3836981864653448e-05, |
| "loss": 0.8648190498352051, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.7217391304347827, |
| "grad_norm": 0.12261836230754852, |
| "learning_rate": 1.3484915439624399e-05, |
| "loss": 1.2689242362976074, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.7304347826086957, |
| "grad_norm": 0.09543546289205551, |
| "learning_rate": 1.3135966930576233e-05, |
| "loss": 0.9420109987258911, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.7391304347826086, |
| "grad_norm": 0.1176115870475769, |
| "learning_rate": 1.2790425306352913e-05, |
| "loss": 1.3050771951675415, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.7478260869565218, |
| "grad_norm": 0.16124227643013, |
| "learning_rate": 1.2448576714512348e-05, |
| "loss": 1.160625696182251, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7565217391304347, |
| "grad_norm": 0.45197612047195435, |
| "learning_rate": 1.2110704244363801e-05, |
| "loss": 0.5953898429870605, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7652173913043478, |
| "grad_norm": 0.09739059209823608, |
| "learning_rate": 1.1777087692537941e-05, |
| "loss": 0.6910260915756226, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7739130434782608, |
| "grad_norm": 0.1326562613248825, |
| "learning_rate": 1.1448003331283567e-05, |
| "loss": 1.332924723625183, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.782608695652174, |
| "grad_norm": 0.3839710056781769, |
| "learning_rate": 1.1123723679682947e-05, |
| "loss": 0.37987953424453735, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7913043478260869, |
| "grad_norm": 0.20606131851673126, |
| "learning_rate": 1.0804517277975145e-05, |
| "loss": 0.6722633242607117, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.2714339792728424, |
| "learning_rate": 1.0490648465174374e-05, |
| "loss": 0.6781936883926392, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.808695652173913, |
| "grad_norm": 0.12319911271333694, |
| "learning_rate": 1.0182377160167355e-05, |
| "loss": 1.3093880414962769, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.8173913043478261, |
| "grad_norm": 0.19812533259391785, |
| "learning_rate": 9.87995864647107e-06, |
| "loss": 1.0597083568572998, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.8260869565217391, |
| "grad_norm": 0.18480421602725983, |
| "learning_rate": 9.583643360829146e-06, |
| "loss": 1.113531231880188, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.8347826086956521, |
| "grad_norm": 0.170735701918602, |
| "learning_rate": 9.293676685821879e-06, |
| "loss": 0.7899190783500671, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.8434782608695652, |
| "grad_norm": 0.16350802779197693, |
| "learning_rate": 9.010298746661722e-06, |
| "loss": 1.299129605293274, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.8521739130434782, |
| "grad_norm": 0.18414396047592163, |
| "learning_rate": 8.733744212342408e-06, |
| "loss": 1.3935397863388062, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8608695652173913, |
| "grad_norm": 0.2395719736814499, |
| "learning_rate": 8.46424210130651e-06, |
| "loss": 1.3050575256347656, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8695652173913043, |
| "grad_norm": 0.33917221426963806, |
| "learning_rate": 8.202015591792223e-06, |
| "loss": 0.6221288442611694, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8782608695652174, |
| "grad_norm": 0.24979734420776367, |
| "learning_rate": 7.947281837016573e-06, |
| "loss": 0.8170241117477417, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8869565217391304, |
| "grad_norm": 0.06852871179580688, |
| "learning_rate": 7.70025178534795e-06, |
| "loss": 0.729548990726471, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.8956521739130435, |
| "grad_norm": 3.180227756500244, |
| "learning_rate": 7.461130005617023e-06, |
| "loss": 0.3102116882801056, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.9043478260869565, |
| "grad_norm": 0.5475708246231079, |
| "learning_rate": 7.230114517710568e-06, |
| "loss": 1.066127061843872, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.9130434782608695, |
| "grad_norm": 0.21181726455688477, |
| "learning_rate": 7.007396628588649e-06, |
| "loss": 1.1609755754470825, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.9217391304347826, |
| "grad_norm": 0.3930780589580536, |
| "learning_rate": 6.793160773860759e-06, |
| "loss": 1.2955363988876343, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.9304347826086956, |
| "grad_norm": 0.2640330195426941, |
| "learning_rate": 6.587584365052308e-06, |
| "loss": 0.22745762765407562, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.9391304347826087, |
| "grad_norm": 0.5837603807449341, |
| "learning_rate": 6.390837642687785e-06, |
| "loss": 0.25208932161331177, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.9478260869565217, |
| "grad_norm": 0.09962380677461624, |
| "learning_rate": 6.203083535312356e-06, |
| "loss": 0.7900155186653137, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.9565217391304348, |
| "grad_norm": 0.6199167966842651, |
| "learning_rate": 6.024477524568591e-06, |
| "loss": 0.8592709302902222, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.9652173913043478, |
| "grad_norm": 0.2959963381290436, |
| "learning_rate": 5.855167516440076e-06, |
| "loss": 1.3669737577438354, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9739130434782609, |
| "grad_norm": 0.13669747114181519, |
| "learning_rate": 5.695293718768518e-06, |
| "loss": 1.4519469738006592, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9826086956521739, |
| "grad_norm": 0.15183264017105103, |
| "learning_rate": 5.5449885251458e-06, |
| "loss": 1.3087224960327148, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.991304347826087, |
| "grad_norm": 0.2697318196296692, |
| "learning_rate": 5.4043764052771e-06, |
| "loss": 1.3946113586425781, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.14553771913051605, |
| "learning_rate": 5.273573801905901e-06, |
| "loss": 1.330439805984497, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.008695652173913, |
| "grad_norm": 0.16245104372501373, |
| "learning_rate": 5.152689034386213e-06, |
| "loss": 1.014844536781311, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.017391304347826, |
| "grad_norm": 0.2378685176372528, |
| "learning_rate": 5.041822208981912e-06, |
| "loss": 0.32737967371940613, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.0260869565217392, |
| "grad_norm": 0.20699624717235565, |
| "learning_rate": 4.941065135967408e-06, |
| "loss": 0.769230842590332, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.0347826086956522, |
| "grad_norm": 0.12227105349302292, |
| "learning_rate": 4.850501253598363e-06, |
| "loss": 1.2364915609359741, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.0434782608695652, |
| "grad_norm": 0.17606157064437866, |
| "learning_rate": 4.770205559015373e-06, |
| "loss": 1.2600164413452148, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.0521739130434782, |
| "grad_norm": 0.24245694279670715, |
| "learning_rate": 4.700244546137851e-06, |
| "loss": 0.7779691815376282, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.0608695652173914, |
| "grad_norm": 0.2091401368379593, |
| "learning_rate": 4.640676150599536e-06, |
| "loss": 1.0165550708770752, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.0695652173913044, |
| "grad_norm": 0.12267211079597473, |
| "learning_rate": 4.5915497017712385e-06, |
| "loss": 1.0626590251922607, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0782608695652174, |
| "grad_norm": 0.1428578644990921, |
| "learning_rate": 4.552905881910531e-06, |
| "loss": 0.778892993927002, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0869565217391304, |
| "grad_norm": 0.12415514886379242, |
| "learning_rate": 4.524776692472253e-06, |
| "loss": 0.8480257391929626, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0956521739130434, |
| "grad_norm": 0.311179518699646, |
| "learning_rate": 4.507185427607667e-06, |
| "loss": 0.15872906148433685, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.1043478260869566, |
| "grad_norm": 0.26307570934295654, |
| "learning_rate": 4.500146654874281e-06, |
| "loss": 1.3229949474334717, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.1130434782608696, |
| "grad_norm": 0.12678803503513336, |
| "learning_rate": 4.50366620317225e-06, |
| "loss": 1.2751566171646118, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.1217391304347826, |
| "grad_norm": 0.12393500655889511, |
| "learning_rate": 4.517741157917401e-06, |
| "loss": 0.7979919910430908, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.1304347826086956, |
| "grad_norm": 0.11631522327661514, |
| "learning_rate": 4.542359863454825e-06, |
| "loss": 1.2487938404083252, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.1391304347826088, |
| "grad_norm": 0.1201433464884758, |
| "learning_rate": 4.577501932711088e-06, |
| "loss": 1.503760814666748, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.1478260869565218, |
| "grad_norm": 0.16790546476840973, |
| "learning_rate": 4.623138264077027e-06, |
| "loss": 1.349209189414978, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.1565217391304348, |
| "grad_norm": 0.12929458916187286, |
| "learning_rate": 4.679231065507181e-06, |
| "loss": 1.3860739469528198, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.1652173913043478, |
| "grad_norm": 1.2671191692352295, |
| "learning_rate": 4.745733885815875e-06, |
| "loss": 0.7751848101615906, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.1739130434782608, |
| "grad_norm": 2.0448155403137207, |
| "learning_rate": 4.822591653144063e-06, |
| "loss": 0.8347904086112976, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.182608695652174, |
| "grad_norm": 0.12114833295345306, |
| "learning_rate": 4.90974072056505e-06, |
| "loss": 1.281104564666748, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.191304347826087, |
| "grad_norm": 0.1411694884300232, |
| "learning_rate": 5.0071089187913535e-06, |
| "loss": 1.2953712940216064, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 0.12283624708652496, |
| "learning_rate": 5.114615615939042e-06, |
| "loss": 1.0042020082473755, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.208695652173913, |
| "grad_norm": 0.5199421048164368, |
| "learning_rate": 5.232171784300044e-06, |
| "loss": 0.8485981225967407, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.2173913043478262, |
| "grad_norm": 0.25646281242370605, |
| "learning_rate": 5.359680074067178e-06, |
| "loss": 0.7609320282936096, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.2260869565217392, |
| "grad_norm": 0.17641501128673553, |
| "learning_rate": 5.4970348939507965e-06, |
| "loss": 1.2442917823791504, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.2347826086956522, |
| "grad_norm": 0.12078163027763367, |
| "learning_rate": 5.644122498620322e-06, |
| "loss": 0.7213406562805176, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.2434782608695651, |
| "grad_norm": 0.2121657282114029, |
| "learning_rate": 5.800821082898257e-06, |
| "loss": 1.2524417638778687, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.2521739130434781, |
| "grad_norm": 0.17032301425933838, |
| "learning_rate": 5.9670008826286325e-06, |
| "loss": 0.7535306215286255, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.2608695652173914, |
| "grad_norm": 0.1459963470697403, |
| "learning_rate": 6.142524282136437e-06, |
| "loss": 1.0591073036193848, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.2695652173913043, |
| "grad_norm": 0.10334640741348267, |
| "learning_rate": 6.327245928188936e-06, |
| "loss": 1.2697545289993286, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.2782608695652173, |
| "grad_norm": 0.6387373805046082, |
| "learning_rate": 6.52101285036462e-06, |
| "loss": 0.8120895624160767, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.2869565217391306, |
| "grad_norm": 0.09141097217798233, |
| "learning_rate": 6.7236645877299985e-06, |
| "loss": 0.16801220178604126, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2956521739130435, |
| "grad_norm": 0.14008603990077972, |
| "learning_rate": 6.935033321719419e-06, |
| "loss": 0.6829316020011902, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.3043478260869565, |
| "grad_norm": 0.13336972892284393, |
| "learning_rate": 7.15494401510782e-06, |
| "loss": 0.24555981159210205, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.3130434782608695, |
| "grad_norm": 0.15153667330741882, |
| "learning_rate": 7.383214556961368e-06, |
| "loss": 0.8401697874069214, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.3217391304347825, |
| "grad_norm": 0.39388230443000793, |
| "learning_rate": 7.619655913445883e-06, |
| "loss": 1.6956262588500977, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.3304347826086955, |
| "grad_norm": 0.0983978733420372, |
| "learning_rate": 7.864072284368284e-06, |
| "loss": 0.7533000111579895, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.3391304347826087, |
| "grad_norm": 0.27145856618881226, |
| "learning_rate": 8.11626126532126e-06, |
| "loss": 1.0896899700164795, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.3478260869565217, |
| "grad_norm": 0.16908416152000427, |
| "learning_rate": 8.376014015297057e-06, |
| "loss": 1.1746755838394165, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.3565217391304347, |
| "grad_norm": 0.5621324777603149, |
| "learning_rate": 8.643115429631413e-06, |
| "loss": 0.28953975439071655, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.365217391304348, |
| "grad_norm": 0.1343858242034912, |
| "learning_rate": 8.917344318134601e-06, |
| "loss": 1.2791374921798706, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.373913043478261, |
| "grad_norm": 0.12179627269506454, |
| "learning_rate": 9.198473588261906e-06, |
| "loss": 1.5186599493026733, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.382608695652174, |
| "grad_norm": 0.6460802555084229, |
| "learning_rate": 9.48627043317196e-06, |
| "loss": 0.7900709509849548, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.391304347826087, |
| "grad_norm": 0.1162571832537651, |
| "learning_rate": 9.780496524517107e-06, |
| "loss": 1.5395351648330688, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.26111987233161926, |
| "learning_rate": 1.0080908209806292e-05, |
| "loss": 0.7073770761489868, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.4086956521739131, |
| "grad_norm": 0.21174176037311554, |
| "learning_rate": 1.0387256714176855e-05, |
| "loss": 1.3696300983428955, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.4173913043478261, |
| "grad_norm": 0.18966367840766907, |
| "learning_rate": 1.0699288346408284e-05, |
| "loss": 0.7893473505973816, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.4260869565217391, |
| "grad_norm": 0.28794315457344055, |
| "learning_rate": 1.1016744709007203e-05, |
| "loss": 1.0100047588348389, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.434782608695652, |
| "grad_norm": 0.26700359582901, |
| "learning_rate": 1.1339362912189803e-05, |
| "loss": 0.8350800275802612, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.4434782608695653, |
| "grad_norm": 0.34453895688056946, |
| "learning_rate": 1.1666875791584241e-05, |
| "loss": 1.2977509498596191, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.4521739130434783, |
| "grad_norm": 0.12350393086671829, |
| "learning_rate": 1.1999012129473044e-05, |
| "loss": 0.7434523105621338, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.4608695652173913, |
| "grad_norm": 0.12331763654947281, |
| "learning_rate": 1.2335496879392006e-05, |
| "loss": 0.9966728091239929, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.4695652173913043, |
| "grad_norm": 0.12660405039787292, |
| "learning_rate": 1.267605139389983e-05, |
| "loss": 1.2345221042633057, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.4782608695652173, |
| "grad_norm": 0.5481416583061218, |
| "learning_rate": 1.3020393655329656e-05, |
| "loss": 0.4076097905635834, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.4869565217391305, |
| "grad_norm": 0.1473967432975769, |
| "learning_rate": 1.3368238509331685e-05, |
| "loss": 1.0471733808517456, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.4956521739130435, |
| "grad_norm": 0.14102469384670258, |
| "learning_rate": 1.3719297901013182e-05, |
| "loss": 1.2860357761383057, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.5043478260869565, |
| "grad_norm": 0.29168060421943665, |
| "learning_rate": 1.4073281113480556e-05, |
| "loss": 1.0981141328811646, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.5130434782608697, |
| "grad_norm": 0.12084399163722992, |
| "learning_rate": 1.4429895008585773e-05, |
| "loss": 1.5693000555038452, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.5217391304347827, |
| "grad_norm": 0.3121376931667328, |
| "learning_rate": 1.4788844269678e-05, |
| "loss": 0.15966485440731049, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.5304347826086957, |
| "grad_norm": 0.7553597092628479, |
| "learning_rate": 1.514983164615912e-05, |
| "loss": 0.7595433592796326, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.5391304347826087, |
| "grad_norm": 0.4193861484527588, |
| "learning_rate": 1.5512558199640922e-05, |
| "loss": 0.8960363268852234, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.5478260869565217, |
| "grad_norm": 2.7174839973449707, |
| "learning_rate": 1.587672355149983e-05, |
| "loss": 0.7889991998672485, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.5565217391304347, |
| "grad_norm": 0.24500806629657745, |
| "learning_rate": 1.6242026131624477e-05, |
| "loss": 0.74623703956604, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.5652173913043477, |
| "grad_norm": 0.12118079513311386, |
| "learning_rate": 1.6608163428149812e-05, |
| "loss": 1.2382371425628662, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.5739130434782609, |
| "grad_norm": 0.21043698489665985, |
| "learning_rate": 1.697483223797118e-05, |
| "loss": 0.7253771424293518, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.5826086956521739, |
| "grad_norm": 0.1428501456975937, |
| "learning_rate": 1.734172891783075e-05, |
| "loss": 0.6049482822418213, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.591304347826087, |
| "grad_norm": 0.24034124612808228, |
| "learning_rate": 1.7708549635768553e-05, |
| "loss": 0.658636212348938, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.15072523057460785, |
| "learning_rate": 1.8074990622729676e-05, |
| "loss": 1.2467550039291382, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.608695652173913, |
| "grad_norm": 0.29888659715652466, |
| "learning_rate": 1.8440748424119435e-05, |
| "loss": 0.332572340965271, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.617391304347826, |
| "grad_norm": 0.4121609926223755, |
| "learning_rate": 1.8805520151098043e-05, |
| "loss": 0.7649952173233032, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.626086956521739, |
| "grad_norm": 0.14216536283493042, |
| "learning_rate": 1.916900373140691e-05, |
| "loss": 0.8258912563323975, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.634782608695652, |
| "grad_norm": 0.12106224149465561, |
| "learning_rate": 1.9530898159518646e-05, |
| "loss": 1.243992805480957, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.643478260869565, |
| "grad_norm": 0.1799318641424179, |
| "learning_rate": 1.9890903745903662e-05, |
| "loss": 0.7369431853294373, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.6521739130434783, |
| "grad_norm": 0.23679573833942413, |
| "learning_rate": 2.0248722365206985e-05, |
| "loss": 1.2425155639648438, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.6608695652173913, |
| "grad_norm": 0.16253520548343658, |
| "learning_rate": 2.060405770312981e-05, |
| "loss": 0.8069741129875183, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.6695652173913045, |
| "grad_norm": 0.12619397044181824, |
| "learning_rate": 2.0956615501811323e-05, |
| "loss": 0.7547456622123718, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.6782608695652175, |
| "grad_norm": 0.2577739953994751, |
| "learning_rate": 2.1306103803507364e-05, |
| "loss": 1.0529491901397705, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.6869565217391305, |
| "grad_norm": 0.32050907611846924, |
| "learning_rate": 2.1652233192364604e-05, |
| "loss": 1.4707834720611572, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.6956521739130435, |
| "grad_norm": 0.1691839098930359, |
| "learning_rate": 2.1994717034089493e-05, |
| "loss": 0.7649831771850586, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.7043478260869565, |
| "grad_norm": 0.11284446716308594, |
| "learning_rate": 2.2333271713314037e-05, |
| "loss": 1.237856388092041, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.7130434782608694, |
| "grad_norm": 0.15681013464927673, |
| "learning_rate": 2.2667616868461238e-05, |
| "loss": 1.2670764923095703, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.7217391304347827, |
| "grad_norm": 0.40354123711586, |
| "learning_rate": 2.299747562391633e-05, |
| "loss": 1.2364072799682617, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.7304347826086957, |
| "grad_norm": 0.1340729296207428, |
| "learning_rate": 2.332257481931103e-05, |
| "loss": 0.7167130708694458, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.7391304347826086, |
| "grad_norm": 0.11962085962295532, |
| "learning_rate": 2.3642645235731285e-05, |
| "loss": 0.7211905717849731, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.7478260869565219, |
| "grad_norm": 0.1367277204990387, |
| "learning_rate": 2.3957421818660958e-05, |
| "loss": 0.7387893795967102, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.7565217391304349, |
| "grad_norm": 0.12049642205238342, |
| "learning_rate": 2.4266643897476975e-05, |
| "loss": 1.2366782426834106, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.7652173913043478, |
| "grad_norm": 0.19714173674583435, |
| "learning_rate": 2.457005540131405e-05, |
| "loss": 0.7809978127479553, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.7739130434782608, |
| "grad_norm": 0.18713834881782532, |
| "learning_rate": 2.4867405071120378e-05, |
| "loss": 1.1861432790756226, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.7826086956521738, |
| "grad_norm": 0.18612703680992126, |
| "learning_rate": 2.5158446667728563e-05, |
| "loss": 1.2835782766342163, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.7913043478260868, |
| "grad_norm": 0.5197505354881287, |
| "learning_rate": 2.5442939175769457e-05, |
| "loss": 0.26566827297210693, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.16716937720775604, |
| "learning_rate": 2.5720647003260177e-05, |
| "loss": 1.253092646598816, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.808695652173913, |
| "grad_norm": 0.16784952580928802, |
| "learning_rate": 2.5991340176700945e-05, |
| "loss": 1.298421859741211, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.8173913043478263, |
| "grad_norm": 0.12985093891620636, |
| "learning_rate": 2.6254794531519122e-05, |
| "loss": 1.5727579593658447, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.8260869565217392, |
| "grad_norm": 0.30004414916038513, |
| "learning_rate": 2.6510791897702884e-05, |
| "loss": 1.372650146484375, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.8347826086956522, |
| "grad_norm": 0.16396737098693848, |
| "learning_rate": 2.675912028047062e-05, |
| "loss": 1.5493252277374268, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.8434782608695652, |
| "grad_norm": 0.14453765749931335, |
| "learning_rate": 2.6999574035826618e-05, |
| "loss": 0.7752541899681091, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.8521739130434782, |
| "grad_norm": 0.3888326585292816, |
| "learning_rate": 2.7231954040857604e-05, |
| "loss": 1.3314841985702515, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.8608695652173912, |
| "grad_norm": 0.4329335391521454, |
| "learning_rate": 2.745606785862895e-05, |
| "loss": 0.36442431807518005, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.8695652173913042, |
| "grad_norm": 0.19990986585617065, |
| "learning_rate": 2.767172989754432e-05, |
| "loss": 0.6660992503166199, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.8782608695652174, |
| "grad_norm": 0.11110001057386398, |
| "learning_rate": 2.7878761565036436e-05, |
| "loss": 1.5394024848937988, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.8869565217391304, |
| "grad_norm": 0.1121024340391159, |
| "learning_rate": 2.807699141546199e-05, |
| "loss": 0.7698279619216919, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.8956521739130436, |
| "grad_norm": 0.15248660743236542, |
| "learning_rate": 2.826625529207797e-05, |
| "loss": 1.2329353094100952, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.9043478260869566, |
| "grad_norm": 0.10831912606954575, |
| "learning_rate": 2.844639646298208e-05, |
| "loss": 1.2454890012741089, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.9130434782608696, |
| "grad_norm": 0.1683008074760437, |
| "learning_rate": 2.8617265750904434e-05, |
| "loss": 1.2287697792053223, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.9217391304347826, |
| "grad_norm": 0.08259912580251694, |
| "learning_rate": 2.8778721656743252e-05, |
| "loss": 0.7477931976318359, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.9304347826086956, |
| "grad_norm": 0.11279872059822083, |
| "learning_rate": 2.893063047674209e-05, |
| "loss": 1.2804430723190308, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.9391304347826086, |
| "grad_norm": 0.16371870040893555, |
| "learning_rate": 2.90728664132117e-05, |
| "loss": 0.7825669646263123, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.9478260869565216, |
| "grad_norm": 0.08895144611597061, |
| "learning_rate": 2.9205311678704742e-05, |
| "loss": 0.6581708192825317, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.9565217391304348, |
| "grad_norm": 0.3610806465148926, |
| "learning_rate": 2.932785659355714e-05, |
| "loss": 0.7231730222702026, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.9652173913043478, |
| "grad_norm": 0.15365619957447052, |
| "learning_rate": 2.944039967671519e-05, |
| "loss": 0.7730036973953247, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.973913043478261, |
| "grad_norm": 0.13084781169891357, |
| "learning_rate": 2.9542847729773423e-05, |
| "loss": 0.736638069152832, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.982608695652174, |
| "grad_norm": 0.18188992142677307, |
| "learning_rate": 2.9635115914153388e-05, |
| "loss": 0.9782851934432983, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.991304347826087, |
| "grad_norm": 0.18510757386684418, |
| "learning_rate": 2.971712782135961e-05, |
| "loss": 1.236344337463379, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.4175490140914917, |
| "learning_rate": 2.978881553625446e-05, |
| "loss": 1.1338064670562744, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.008695652173913, |
| "grad_norm": 0.18492577970027924, |
| "learning_rate": 2.9850119693299577e-05, |
| "loss": 1.1988855600357056, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.017391304347826, |
| "grad_norm": 0.24838964641094208, |
| "learning_rate": 2.9900989525717243e-05, |
| "loss": 0.1962333619594574, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.026086956521739, |
| "grad_norm": 0.9311656951904297, |
| "learning_rate": 2.9941382907531037e-05, |
| "loss": 0.12543362379074097, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.034782608695652, |
| "grad_norm": 0.22585399448871613, |
| "learning_rate": 2.9971266388450884e-05, |
| "loss": 0.7844517827033997, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.0434782608695654, |
| "grad_norm": 0.29823383688926697, |
| "learning_rate": 2.9990615221573707e-05, |
| "loss": 0.7644298076629639, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.0521739130434784, |
| "grad_norm": 0.1510343700647354, |
| "learning_rate": 2.999941338387663e-05, |
| "loss": 1.479148268699646, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.0608695652173914, |
| "grad_norm": 0.7294149994850159, |
| "learning_rate": 2.999765358948589e-05, |
| "loss": 0.2585853636264801, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.0695652173913044, |
| "grad_norm": 0.1448172628879547, |
| "learning_rate": 2.998533729571033e-05, |
| "loss": 1.2844960689544678, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.0782608695652174, |
| "grad_norm": 0.25449809432029724, |
| "learning_rate": 2.9962474701834602e-05, |
| "loss": 0.7490485906600952, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.0869565217391304, |
| "grad_norm": 0.1117057353258133, |
| "learning_rate": 2.9929084740673027e-05, |
| "loss": 1.1616085767745972, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.0956521739130434, |
| "grad_norm": 0.5241631269454956, |
| "learning_rate": 2.9885195062891023e-05, |
| "loss": 1.7963777780532837, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.1043478260869564, |
| "grad_norm": 0.16556978225708008, |
| "learning_rate": 2.9830842014107273e-05, |
| "loss": 1.2623823881149292, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.1130434782608694, |
| "grad_norm": 0.5851399302482605, |
| "learning_rate": 2.9766070604795444e-05, |
| "loss": 0.681275486946106, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.121739130434783, |
| "grad_norm": 0.4062120020389557, |
| "learning_rate": 2.969093447301039e-05, |
| "loss": 0.6947782039642334, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.130434782608696, |
| "grad_norm": 0.14548048377037048, |
| "learning_rate": 2.9605495839969793e-05, |
| "loss": 0.6653367877006531, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.139130434782609, |
| "grad_norm": 0.1378212869167328, |
| "learning_rate": 2.9509825458527955e-05, |
| "loss": 1.178207278251648, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.1478260869565218, |
| "grad_norm": 0.5637624263763428, |
| "learning_rate": 2.940400255458444e-05, |
| "loss": 0.19959603250026703, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.1565217391304348, |
| "grad_norm": 0.12706081569194794, |
| "learning_rate": 2.9288114761476036e-05, |
| "loss": 1.1562106609344482, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.1652173913043478, |
| "grad_norm": 0.15716660022735596, |
| "learning_rate": 2.916225804740647e-05, |
| "loss": 1.1385471820831299, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.1739130434782608, |
| "grad_norm": 0.11568231880664825, |
| "learning_rate": 2.9026536635973857e-05, |
| "loss": 0.49375131726264954, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.1826086956521737, |
| "grad_norm": 0.16767719388008118, |
| "learning_rate": 2.888106291986178e-05, |
| "loss": 0.736272394657135, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.1913043478260867, |
| "grad_norm": 0.1770874410867691, |
| "learning_rate": 2.872595736776543e-05, |
| "loss": 0.8101549744606018, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.2, |
| "grad_norm": 0.8240585327148438, |
| "learning_rate": 2.8561348424629893e-05, |
| "loss": 0.2909260392189026, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.208695652173913, |
| "grad_norm": 0.1195528656244278, |
| "learning_rate": 2.83873724052832e-05, |
| "loss": 0.7722839713096619, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.217391304347826, |
| "grad_norm": 0.8490219712257385, |
| "learning_rate": 2.8204173381552252e-05, |
| "loss": 0.5638793110847473, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.226086956521739, |
| "grad_norm": 0.3193603754043579, |
| "learning_rate": 2.8011903062955016e-05, |
| "loss": 0.31869935989379883, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.234782608695652, |
| "grad_norm": 0.12175068259239197, |
| "learning_rate": 2.7810720671067893e-05, |
| "loss": 1.131230354309082, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.243478260869565, |
| "grad_norm": 0.1269245743751526, |
| "learning_rate": 2.7600792807672248e-05, |
| "loss": 1.208681344985962, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.252173913043478, |
| "grad_norm": 0.18002073466777802, |
| "learning_rate": 2.7382293316789273e-05, |
| "loss": 1.3903454542160034, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.260869565217391, |
| "grad_norm": 0.6649693250656128, |
| "learning_rate": 2.7155403140717488e-05, |
| "loss": 0.7094644904136658, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.269565217391304, |
| "grad_norm": 0.13027699291706085, |
| "learning_rate": 2.6920310170192024e-05, |
| "loss": 0.7558539509773254, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.2782608695652176, |
| "grad_norm": 0.13726165890693665, |
| "learning_rate": 2.6677209088789823e-05, |
| "loss": 1.140393853187561, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.2869565217391306, |
| "grad_norm": 0.21993081271648407, |
| "learning_rate": 2.642630121170968e-05, |
| "loss": 1.1117931604385376, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.2956521739130435, |
| "grad_norm": 0.1521347314119339, |
| "learning_rate": 2.616779431906042e-05, |
| "loss": 0.6197381019592285, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.3043478260869565, |
| "grad_norm": 0.17388762533664703, |
| "learning_rate": 2.5901902483795464e-05, |
| "loss": 1.1404460668563843, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.3130434782608695, |
| "grad_norm": 0.19341905415058136, |
| "learning_rate": 2.5628845894436232e-05, |
| "loss": 1.110768437385559, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.3217391304347825, |
| "grad_norm": 0.11132687330245972, |
| "learning_rate": 2.5348850672731114e-05, |
| "loss": 0.18440936505794525, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.3304347826086955, |
| "grad_norm": 0.12755344808101654, |
| "learning_rate": 2.506214868640111e-05, |
| "loss": 0.6152647137641907, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.3391304347826085, |
| "grad_norm": 0.07117603719234467, |
| "learning_rate": 2.4768977357127068e-05, |
| "loss": 0.6943525075912476, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.3478260869565215, |
| "grad_norm": 0.22992900013923645, |
| "learning_rate": 2.4469579463937736e-05, |
| "loss": 1.155451774597168, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.356521739130435, |
| "grad_norm": 0.13336700201034546, |
| "learning_rate": 2.4164202942161202e-05, |
| "loss": 0.6562126874923706, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.365217391304348, |
| "grad_norm": 0.17184697091579437, |
| "learning_rate": 2.385310067810646e-05, |
| "loss": 1.1728577613830566, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.373913043478261, |
| "grad_norm": 0.24630305171012878, |
| "learning_rate": 2.353653029964486e-05, |
| "loss": 0.7028173208236694, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.382608695652174, |
| "grad_norm": 0.4979984164237976, |
| "learning_rate": 2.3214753962865128e-05, |
| "loss": 1.0240296125411987, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.391304347826087, |
| "grad_norm": 0.786608099937439, |
| "learning_rate": 2.2888038134978446e-05, |
| "loss": 0.6851329207420349, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.21292582154273987, |
| "learning_rate": 2.2556653373653506e-05, |
| "loss": 1.1324046850204468, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.408695652173913, |
| "grad_norm": 0.07219722867012024, |
| "learning_rate": 2.2220874102964066e-05, |
| "loss": 0.5204906463623047, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.417391304347826, |
| "grad_norm": 0.37272965908050537, |
| "learning_rate": 2.1880978386134822e-05, |
| "loss": 1.3587431907653809, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.426086956521739, |
| "grad_norm": 0.1551341414451599, |
| "learning_rate": 2.1537247695273588e-05, |
| "loss": 0.15599098801612854, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.4347826086956523, |
| "grad_norm": 0.10612637549638748, |
| "learning_rate": 2.1189966678280585e-05, |
| "loss": 1.1518821716308594, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.4434782608695653, |
| "grad_norm": 0.2193143665790558, |
| "learning_rate": 2.0839422923127686e-05, |
| "loss": 0.5007261037826538, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.4521739130434783, |
| "grad_norm": 0.45728591084480286, |
| "learning_rate": 2.0485906719703126e-05, |
| "loss": 1.1414357423782349, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.4608695652173913, |
| "grad_norm": 0.19980177283287048, |
| "learning_rate": 2.0129710819418574e-05, |
| "loss": 0.6164332032203674, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.4695652173913043, |
| "grad_norm": 0.12282229214906693, |
| "learning_rate": 1.9771130192777892e-05, |
| "loss": 1.0142958164215088, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.4782608695652173, |
| "grad_norm": 0.4857296347618103, |
| "learning_rate": 1.9410461785108178e-05, |
| "loss": 0.7631978392601013, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.4869565217391303, |
| "grad_norm": 0.2022370547056198, |
| "learning_rate": 1.9048004270655354e-05, |
| "loss": 1.2384475469589233, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.4956521739130437, |
| "grad_norm": 1.394151210784912, |
| "learning_rate": 1.868405780524824e-05, |
| "loss": 0.38715630769729614, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.5043478260869563, |
| "grad_norm": 0.1383918970823288, |
| "learning_rate": 1.831892377773547e-05, |
| "loss": 1.1841545104980469, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.5130434782608697, |
| "grad_norm": 0.19289281964302063, |
| "learning_rate": 1.795290456040148e-05, |
| "loss": 1.3352640867233276, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.5217391304347827, |
| "grad_norm": 0.26483142375946045, |
| "learning_rate": 1.7586303258567964e-05, |
| "loss": 0.6390936374664307, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.5304347826086957, |
| "grad_norm": 0.4453805088996887, |
| "learning_rate": 1.7219423459588513e-05, |
| "loss": 0.9683659672737122, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.5391304347826087, |
| "grad_norm": 0.12072495371103287, |
| "learning_rate": 1.6852568981443806e-05, |
| "loss": 1.1052292585372925, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.5478260869565217, |
| "grad_norm": 0.13637158274650574, |
| "learning_rate": 1.648604362114606e-05, |
| "loss": 0.708678126335144, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.5565217391304347, |
| "grad_norm": 0.10113801807165146, |
| "learning_rate": 1.6120150903160605e-05, |
| "loss": 0.06356725841760635, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.5652173913043477, |
| "grad_norm": 0.1465790867805481, |
| "learning_rate": 1.5755193828053435e-05, |
| "loss": 0.694472074508667, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.573913043478261, |
| "grad_norm": 0.4859929382801056, |
| "learning_rate": 1.539147462157235e-05, |
| "loss": 0.09391395002603531, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.5826086956521737, |
| "grad_norm": 0.4653010368347168, |
| "learning_rate": 1.5029294484369874e-05, |
| "loss": 1.3817299604415894, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.591304347826087, |
| "grad_norm": 0.23374710977077484, |
| "learning_rate": 1.4668953342575074e-05, |
| "loss": 0.28024399280548096, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.6, |
| "grad_norm": 0.19562578201293945, |
| "learning_rate": 1.4310749599420711e-05, |
| "loss": 0.4380612373352051, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.608695652173913, |
| "grad_norm": 0.11721507459878922, |
| "learning_rate": 1.3954979888131745e-05, |
| "loss": 1.1383816003799438, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.617391304347826, |
| "grad_norm": 0.12313393503427505, |
| "learning_rate": 1.3601938826279298e-05, |
| "loss": 0.6719468235969543, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.626086956521739, |
| "grad_norm": 0.2674733102321625, |
| "learning_rate": 1.325191877180408e-05, |
| "loss": 0.8287115097045898, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.634782608695652, |
| "grad_norm": 0.1634570211172104, |
| "learning_rate": 1.2905209580910897e-05, |
| "loss": 0.6804332137107849, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.643478260869565, |
| "grad_norm": 0.2214396893978119, |
| "learning_rate": 1.2562098368034954e-05, |
| "loss": 1.1206378936767578, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.6521739130434785, |
| "grad_norm": 0.17025478184223175, |
| "learning_rate": 1.2222869268078575e-05, |
| "loss": 0.7674025297164917, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.660869565217391, |
| "grad_norm": 0.17491810023784637, |
| "learning_rate": 1.1887803201115396e-05, |
| "loss": 1.1401374340057373, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.6695652173913045, |
| "grad_norm": 0.12606146931648254, |
| "learning_rate": 1.1557177639756725e-05, |
| "loss": 1.3836885690689087, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.6782608695652175, |
| "grad_norm": 0.1093195304274559, |
| "learning_rate": 1.1231266379372912e-05, |
| "loss": 0.737335205078125, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.6869565217391305, |
| "grad_norm": 0.14942651987075806, |
| "learning_rate": 1.0910339311359677e-05, |
| "loss": 0.6436473727226257, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.6956521739130435, |
| "grad_norm": 0.6913372874259949, |
| "learning_rate": 1.0594662199637597e-05, |
| "loss": 0.7471338510513306, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.7043478260869565, |
| "grad_norm": 0.14125873148441315, |
| "learning_rate": 1.028449646056948e-05, |
| "loss": 1.1643157005310059, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.7130434782608694, |
| "grad_norm": 0.13380275666713715, |
| "learning_rate": 9.980098946478024e-06, |
| "loss": 1.1436272859573364, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.7217391304347824, |
| "grad_norm": 0.1280951350927353, |
| "learning_rate": 9.68172173294313e-06, |
| "loss": 1.134834885597229, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.730434782608696, |
| "grad_norm": 0.15834616124629974, |
| "learning_rate": 9.389611910054602e-06, |
| "loss": 1.237627625465393, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.7391304347826084, |
| "grad_norm": 0.20524626970291138, |
| "learning_rate": 9.10401137779385e-06, |
| "loss": 1.1352347135543823, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.747826086956522, |
| "grad_norm": 0.6567396521568298, |
| "learning_rate": 8.825156645713136e-06, |
| "loss": 0.46068328619003296, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.756521739130435, |
| "grad_norm": 0.11645974218845367, |
| "learning_rate": 8.553278637079007e-06, |
| "loss": 0.6736262440681458, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.765217391304348, |
| "grad_norm": 0.14792583882808685, |
| "learning_rate": 8.28860249764168e-06, |
| "loss": 0.6648976802825928, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.773913043478261, |
| "grad_norm": 0.17419379949569702, |
| "learning_rate": 8.031347409188885e-06, |
| "loss": 1.132703423500061, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.782608695652174, |
| "grad_norm": 0.323537677526474, |
| "learning_rate": 7.78172640803854e-06, |
| "loss": 1.2038984298706055, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.791304347826087, |
| "grad_norm": 0.2815156877040863, |
| "learning_rate": 7.539946208620638e-06, |
| "loss": 1.206749677658081, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.5039319396018982, |
| "learning_rate": 7.306207032294216e-06, |
| "loss": 0.7191698551177979, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.8086956521739133, |
| "grad_norm": 0.18908892571926117, |
| "learning_rate": 7.08070244154148e-06, |
| "loss": 0.7605520486831665, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.8173913043478263, |
| "grad_norm": 0.2733303904533386, |
| "learning_rate": 6.8636191796761585e-06, |
| "loss": 0.6928582787513733, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.8260869565217392, |
| "grad_norm": 0.30275958776474, |
| "learning_rate": 6.655137016198907e-06, |
| "loss": 1.3508784770965576, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.8347826086956522, |
| "grad_norm": 0.1958470344543457, |
| "learning_rate": 6.455428597927829e-06, |
| "loss": 0.9979486465454102, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.8434782608695652, |
| "grad_norm": 0.14218895137310028, |
| "learning_rate": 6.264659306027376e-06, |
| "loss": 1.139778971672058, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.8521739130434782, |
| "grad_norm": 0.17221413552761078, |
| "learning_rate": 6.082987119054014e-06, |
| "loss": 0.8573166728019714, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.860869565217391, |
| "grad_norm": 0.13826826214790344, |
| "learning_rate": 5.910562482132162e-06, |
| "loss": 1.1786668300628662, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.869565217391304, |
| "grad_norm": 0.3460427522659302, |
| "learning_rate": 5.747528182368537e-06, |
| "loss": 1.1484174728393555, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.878260869565217, |
| "grad_norm": 0.21556347608566284, |
| "learning_rate": 5.594019230608316e-06, |
| "loss": 1.2150859832763672, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.8869565217391306, |
| "grad_norm": 0.17000877857208252, |
| "learning_rate": 5.4501627496308754e-06, |
| "loss": 1.1722112894058228, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.8956521739130436, |
| "grad_norm": 0.1485866904258728, |
| "learning_rate": 5.316077868877737e-06, |
| "loss": 1.1153019666671753, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.9043478260869566, |
| "grad_norm": 0.12437450885772705, |
| "learning_rate": 5.191875625799956e-06, |
| "loss": 1.164955496788025, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.9130434782608696, |
| "grad_norm": 0.14834971725940704, |
| "learning_rate": 5.077658873906456e-06, |
| "loss": 1.1394230127334595, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.9217391304347826, |
| "grad_norm": 1.5700373649597168, |
| "learning_rate": 4.973522197589804e-06, |
| "loss": 0.6004053354263306, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.9304347826086956, |
| "grad_norm": 0.3607974648475647, |
| "learning_rate": 4.87955183379954e-06, |
| "loss": 0.8902421593666077, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.9391304347826086, |
| "grad_norm": 0.12654371559619904, |
| "learning_rate": 4.795825600628273e-06, |
| "loss": 1.1788089275360107, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.9478260869565216, |
| "grad_norm": 0.18413525819778442, |
| "learning_rate": 4.722412832869478e-06, |
| "loss": 1.0090527534484863, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.9565217391304346, |
| "grad_norm": 0.13298968970775604, |
| "learning_rate": 4.659374324600457e-06, |
| "loss": 1.156622290611267, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.965217391304348, |
| "grad_norm": 0.27358752489089966, |
| "learning_rate": 4.606762278837947e-06, |
| "loss": 0.44529619812965393, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.973913043478261, |
| "grad_norm": 0.2820883095264435, |
| "learning_rate": 4.5646202643081405e-06, |
| "loss": 0.7129748463630676, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.982608695652174, |
| "grad_norm": 0.09756142646074295, |
| "learning_rate": 4.532983179366819e-06, |
| "loss": 0.8244656920433044, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.991304347826087, |
| "grad_norm": 0.19994956254959106, |
| "learning_rate": 4.511877223099601e-06, |
| "loss": 1.285622477531433, |
| "step": 688 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.058878399431705475, |
| "learning_rate": 4.501319873626102e-06, |
| "loss": 0.6546279191970825, |
| "step": 690 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 690, |
| "total_flos": 2.5683697677065257e+18, |
| "train_loss": 1.0660493817018426, |
| "train_runtime": 18433.9416, |
| "train_samples_per_second": 2.396, |
| "train_steps_per_second": 0.037 |
| } |
| ], |
| "logging_steps": 2, |
| "max_steps": 690, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 99999, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.5683697677065257e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|