| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.995983935742972, |
| "eval_steps": 500, |
| "global_step": 1866, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.001606425702811245, |
| "grad_norm": 55.425456981828255, |
| "learning_rate": 0.0, |
| "loss": 11.0914, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00321285140562249, |
| "grad_norm": 55.94565165274405, |
| "learning_rate": 2.6737967914438503e-07, |
| "loss": 11.072, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.004819277108433735, |
| "grad_norm": 55.85336231763491, |
| "learning_rate": 5.347593582887701e-07, |
| "loss": 11.1735, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.00642570281124498, |
| "grad_norm": 57.38149685878335, |
| "learning_rate": 8.021390374331552e-07, |
| "loss": 11.1121, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.008032128514056224, |
| "grad_norm": 53.734533962442185, |
| "learning_rate": 1.0695187165775401e-06, |
| "loss": 11.1462, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.00963855421686747, |
| "grad_norm": 56.856357722018124, |
| "learning_rate": 1.3368983957219252e-06, |
| "loss": 11.0398, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.011244979919678716, |
| "grad_norm": 63.94394813269617, |
| "learning_rate": 1.6042780748663105e-06, |
| "loss": 10.6269, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01285140562248996, |
| "grad_norm": 59.69388329116394, |
| "learning_rate": 1.8716577540106951e-06, |
| "loss": 10.7207, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.014457831325301205, |
| "grad_norm": 65.08501918743596, |
| "learning_rate": 2.1390374331550802e-06, |
| "loss": 10.6603, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01606425702811245, |
| "grad_norm": 90.6773969037033, |
| "learning_rate": 2.4064171122994653e-06, |
| "loss": 9.3899, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.017670682730923693, |
| "grad_norm": 100.40100812807319, |
| "learning_rate": 2.6737967914438504e-06, |
| "loss": 9.067, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.01927710843373494, |
| "grad_norm": 102.64034759457971, |
| "learning_rate": 2.9411764705882355e-06, |
| "loss": 8.8161, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.020883534136546186, |
| "grad_norm": 63.9375793920087, |
| "learning_rate": 3.208556149732621e-06, |
| "loss": 3.6047, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02248995983935743, |
| "grad_norm": 55.14440358777841, |
| "learning_rate": 3.4759358288770056e-06, |
| "loss": 3.2931, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.024096385542168676, |
| "grad_norm": 38.756404211435886, |
| "learning_rate": 3.7433155080213903e-06, |
| "loss": 2.633, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02570281124497992, |
| "grad_norm": 32.624445944408805, |
| "learning_rate": 4.010695187165775e-06, |
| "loss": 2.347, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.027309236947791166, |
| "grad_norm": 6.743482970548288, |
| "learning_rate": 4.2780748663101604e-06, |
| "loss": 1.455, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.02891566265060241, |
| "grad_norm": 4.939939222809209, |
| "learning_rate": 4.5454545454545455e-06, |
| "loss": 1.2889, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.030522088353413655, |
| "grad_norm": 4.037313717639722, |
| "learning_rate": 4.812834224598931e-06, |
| "loss": 1.2466, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0321285140562249, |
| "grad_norm": 2.8978325234814135, |
| "learning_rate": 5.080213903743316e-06, |
| "loss": 1.1727, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.033734939759036145, |
| "grad_norm": 2.4292778275916858, |
| "learning_rate": 5.347593582887701e-06, |
| "loss": 1.1498, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.035341365461847386, |
| "grad_norm": 2.0750809208626078, |
| "learning_rate": 5.614973262032086e-06, |
| "loss": 1.1131, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.036947791164658635, |
| "grad_norm": 1.38050141256256, |
| "learning_rate": 5.882352941176471e-06, |
| "loss": 0.969, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.03855421686746988, |
| "grad_norm": 76.44314104688057, |
| "learning_rate": 6.149732620320856e-06, |
| "loss": 1.0004, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.040160642570281124, |
| "grad_norm": 20.7727103372233, |
| "learning_rate": 6.417112299465242e-06, |
| "loss": 0.9133, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04176706827309237, |
| "grad_norm": 2.4294991120332226, |
| "learning_rate": 6.684491978609626e-06, |
| "loss": 0.937, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.043373493975903614, |
| "grad_norm": 1.2141284131568895, |
| "learning_rate": 6.951871657754011e-06, |
| "loss": 0.868, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.04497991967871486, |
| "grad_norm": 1.066098750215553, |
| "learning_rate": 7.2192513368983955e-06, |
| "loss": 0.8941, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.046586345381526104, |
| "grad_norm": 0.8592318109384306, |
| "learning_rate": 7.4866310160427806e-06, |
| "loss": 0.8108, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04819277108433735, |
| "grad_norm": 0.8600957750100187, |
| "learning_rate": 7.754010695187166e-06, |
| "loss": 0.8223, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.04979919678714859, |
| "grad_norm": 0.7679060410994321, |
| "learning_rate": 8.02139037433155e-06, |
| "loss": 0.7769, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05140562248995984, |
| "grad_norm": 0.8453908066226332, |
| "learning_rate": 8.288770053475937e-06, |
| "loss": 0.7396, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.05301204819277108, |
| "grad_norm": 0.8000437895485839, |
| "learning_rate": 8.556149732620321e-06, |
| "loss": 0.7268, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.05461847389558233, |
| "grad_norm": 0.5961380612903852, |
| "learning_rate": 8.823529411764707e-06, |
| "loss": 0.7434, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.05622489959839357, |
| "grad_norm": 0.7023665082451603, |
| "learning_rate": 9.090909090909091e-06, |
| "loss": 0.7352, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.05783132530120482, |
| "grad_norm": 0.6795471325789469, |
| "learning_rate": 9.358288770053477e-06, |
| "loss": 0.692, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05943775100401606, |
| "grad_norm": 0.6267269031521756, |
| "learning_rate": 9.625668449197861e-06, |
| "loss": 0.7032, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06104417670682731, |
| "grad_norm": 0.5038575166572804, |
| "learning_rate": 9.893048128342247e-06, |
| "loss": 0.6996, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.06265060240963856, |
| "grad_norm": 0.5695324600161978, |
| "learning_rate": 1.0160427807486631e-05, |
| "loss": 0.7056, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.0642570281124498, |
| "grad_norm": 0.5077700319850176, |
| "learning_rate": 1.0427807486631017e-05, |
| "loss": 0.6679, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.06586345381526104, |
| "grad_norm": 0.4743877655225355, |
| "learning_rate": 1.0695187165775402e-05, |
| "loss": 0.6673, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.06746987951807229, |
| "grad_norm": 0.5244542863377575, |
| "learning_rate": 1.0962566844919786e-05, |
| "loss": 0.6671, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06907630522088354, |
| "grad_norm": 0.419545810897641, |
| "learning_rate": 1.1229946524064172e-05, |
| "loss": 0.6545, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07068273092369477, |
| "grad_norm": 0.37783127229950486, |
| "learning_rate": 1.1497326203208558e-05, |
| "loss": 0.6576, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.07228915662650602, |
| "grad_norm": 0.4178580350161024, |
| "learning_rate": 1.1764705882352942e-05, |
| "loss": 0.643, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.07389558232931727, |
| "grad_norm": 0.43845663225475845, |
| "learning_rate": 1.2032085561497326e-05, |
| "loss": 0.6346, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.07550200803212852, |
| "grad_norm": 0.44070415442299, |
| "learning_rate": 1.2299465240641712e-05, |
| "loss": 0.6688, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.07710843373493977, |
| "grad_norm": 0.3669910492352237, |
| "learning_rate": 1.2566844919786098e-05, |
| "loss": 0.6454, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.078714859437751, |
| "grad_norm": 0.362801619015921, |
| "learning_rate": 1.2834224598930484e-05, |
| "loss": 0.6185, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.08032128514056225, |
| "grad_norm": 0.36003109712794723, |
| "learning_rate": 1.3101604278074866e-05, |
| "loss": 0.6082, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0819277108433735, |
| "grad_norm": 0.3790196621978306, |
| "learning_rate": 1.3368983957219252e-05, |
| "loss": 0.6196, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.08353413654618475, |
| "grad_norm": 0.32881533358374376, |
| "learning_rate": 1.3636363636363637e-05, |
| "loss": 0.6379, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.08514056224899598, |
| "grad_norm": 0.2897536087349657, |
| "learning_rate": 1.3903743315508022e-05, |
| "loss": 0.6166, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.08674698795180723, |
| "grad_norm": 0.3347710929583827, |
| "learning_rate": 1.4171122994652408e-05, |
| "loss": 0.5807, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.08835341365461848, |
| "grad_norm": 0.3393972794432162, |
| "learning_rate": 1.4438502673796791e-05, |
| "loss": 0.5832, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.08995983935742972, |
| "grad_norm": 0.29350372494642607, |
| "learning_rate": 1.4705882352941177e-05, |
| "loss": 0.5927, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.09156626506024096, |
| "grad_norm": 0.3252911723032582, |
| "learning_rate": 1.4973262032085561e-05, |
| "loss": 0.619, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.09317269076305221, |
| "grad_norm": 0.3149875163427349, |
| "learning_rate": 1.5240641711229947e-05, |
| "loss": 0.6133, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.09477911646586346, |
| "grad_norm": 0.3108517007068939, |
| "learning_rate": 1.5508021390374333e-05, |
| "loss": 0.582, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.0963855421686747, |
| "grad_norm": 0.28641822750047513, |
| "learning_rate": 1.5775401069518716e-05, |
| "loss": 0.6001, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.09799196787148594, |
| "grad_norm": 0.26760169263753647, |
| "learning_rate": 1.60427807486631e-05, |
| "loss": 0.5834, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.09959839357429719, |
| "grad_norm": 0.2982920467349389, |
| "learning_rate": 1.6310160427807487e-05, |
| "loss": 0.5869, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.10120481927710843, |
| "grad_norm": 0.2671097597027451, |
| "learning_rate": 1.6577540106951873e-05, |
| "loss": 0.5573, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.10281124497991968, |
| "grad_norm": 0.2676703133591729, |
| "learning_rate": 1.684491978609626e-05, |
| "loss": 0.5798, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.10441767068273092, |
| "grad_norm": 0.26495729621046843, |
| "learning_rate": 1.7112299465240642e-05, |
| "loss": 0.5617, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.10602409638554217, |
| "grad_norm": 0.26227499331646464, |
| "learning_rate": 1.7379679144385028e-05, |
| "loss": 0.5616, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.10763052208835341, |
| "grad_norm": 0.2648120721870536, |
| "learning_rate": 1.7647058823529414e-05, |
| "loss": 0.5634, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.10923694779116466, |
| "grad_norm": 0.26215156916312604, |
| "learning_rate": 1.7914438502673796e-05, |
| "loss": 0.5684, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.1108433734939759, |
| "grad_norm": 0.27157578337184746, |
| "learning_rate": 1.8181818181818182e-05, |
| "loss": 0.5913, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.11244979919678715, |
| "grad_norm": 0.27163024369026484, |
| "learning_rate": 1.8449197860962568e-05, |
| "loss": 0.566, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.1140562248995984, |
| "grad_norm": 0.26979630288410955, |
| "learning_rate": 1.8716577540106954e-05, |
| "loss": 0.5656, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.11566265060240964, |
| "grad_norm": 0.2633626931673852, |
| "learning_rate": 1.898395721925134e-05, |
| "loss": 0.5393, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.11726907630522089, |
| "grad_norm": 0.24409011774218942, |
| "learning_rate": 1.9251336898395722e-05, |
| "loss": 0.5563, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.11887550200803212, |
| "grad_norm": 0.25395135149818904, |
| "learning_rate": 1.951871657754011e-05, |
| "loss": 0.5489, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.12048192771084337, |
| "grad_norm": 0.2550278467892386, |
| "learning_rate": 1.9786096256684494e-05, |
| "loss": 0.5614, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.12208835341365462, |
| "grad_norm": 0.38496356216000965, |
| "learning_rate": 2.0053475935828877e-05, |
| "loss": 0.5704, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.12369477911646587, |
| "grad_norm": 0.2567139059670428, |
| "learning_rate": 2.0320855614973263e-05, |
| "loss": 0.5459, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.12530120481927712, |
| "grad_norm": 0.25038957425452213, |
| "learning_rate": 2.058823529411765e-05, |
| "loss": 0.5454, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.12690763052208837, |
| "grad_norm": 0.23452705979435273, |
| "learning_rate": 2.0855614973262035e-05, |
| "loss": 0.5492, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.1285140562248996, |
| "grad_norm": 0.24323287410165828, |
| "learning_rate": 2.112299465240642e-05, |
| "loss": 0.5301, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.13012048192771083, |
| "grad_norm": 0.26334529962321623, |
| "learning_rate": 2.1390374331550803e-05, |
| "loss": 0.523, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.13172690763052208, |
| "grad_norm": 0.22329323094624642, |
| "learning_rate": 2.165775401069519e-05, |
| "loss": 0.5592, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.13333333333333333, |
| "grad_norm": 0.2474076406178327, |
| "learning_rate": 2.192513368983957e-05, |
| "loss": 0.5437, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.13493975903614458, |
| "grad_norm": 0.24355733442576052, |
| "learning_rate": 2.2192513368983957e-05, |
| "loss": 0.542, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.13654618473895583, |
| "grad_norm": 0.2577406108651445, |
| "learning_rate": 2.2459893048128343e-05, |
| "loss": 0.5287, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.13815261044176708, |
| "grad_norm": 0.2512287959204346, |
| "learning_rate": 2.272727272727273e-05, |
| "loss": 0.5527, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.13975903614457832, |
| "grad_norm": 0.24412043857047824, |
| "learning_rate": 2.2994652406417115e-05, |
| "loss": 0.5288, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.14136546184738955, |
| "grad_norm": 0.24004859620662336, |
| "learning_rate": 2.32620320855615e-05, |
| "loss": 0.521, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.1429718875502008, |
| "grad_norm": 0.2685831450802228, |
| "learning_rate": 2.3529411764705884e-05, |
| "loss": 0.553, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.14457831325301204, |
| "grad_norm": 0.22208558557827937, |
| "learning_rate": 2.379679144385027e-05, |
| "loss": 0.5273, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.1461847389558233, |
| "grad_norm": 0.25800455920009974, |
| "learning_rate": 2.4064171122994652e-05, |
| "loss": 0.5292, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.14779116465863454, |
| "grad_norm": 0.26150864060963513, |
| "learning_rate": 2.4331550802139038e-05, |
| "loss": 0.5344, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1493975903614458, |
| "grad_norm": 0.2503576394708978, |
| "learning_rate": 2.4598930481283424e-05, |
| "loss": 0.558, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.15100401606425704, |
| "grad_norm": 0.2723029245292207, |
| "learning_rate": 2.4866310160427807e-05, |
| "loss": 0.5225, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.15261044176706828, |
| "grad_norm": 0.2677703323128444, |
| "learning_rate": 2.5133689839572196e-05, |
| "loss": 0.5051, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.15421686746987953, |
| "grad_norm": 0.2750945129448115, |
| "learning_rate": 2.5401069518716582e-05, |
| "loss": 0.538, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.15582329317269075, |
| "grad_norm": 0.26505576602859154, |
| "learning_rate": 2.5668449197860968e-05, |
| "loss": 0.5183, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.157429718875502, |
| "grad_norm": 0.28571711863479626, |
| "learning_rate": 2.5935828877005347e-05, |
| "loss": 0.4988, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.15903614457831325, |
| "grad_norm": 0.2806910619639985, |
| "learning_rate": 2.6203208556149733e-05, |
| "loss": 0.5251, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.1606425702811245, |
| "grad_norm": 0.2738624463968397, |
| "learning_rate": 2.647058823529412e-05, |
| "loss": 0.5186, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.16224899598393575, |
| "grad_norm": 0.3061159832393734, |
| "learning_rate": 2.6737967914438505e-05, |
| "loss": 0.5365, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.163855421686747, |
| "grad_norm": 0.24613646526070415, |
| "learning_rate": 2.700534759358289e-05, |
| "loss": 0.516, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.16546184738955824, |
| "grad_norm": 0.29918830859130285, |
| "learning_rate": 2.7272727272727273e-05, |
| "loss": 0.5269, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.1670682730923695, |
| "grad_norm": 0.2954328993387257, |
| "learning_rate": 2.754010695187166e-05, |
| "loss": 0.5518, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.1686746987951807, |
| "grad_norm": 0.2839510653050071, |
| "learning_rate": 2.7807486631016045e-05, |
| "loss": 0.5218, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.17028112449799196, |
| "grad_norm": 0.2880614346246641, |
| "learning_rate": 2.807486631016043e-05, |
| "loss": 0.5333, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.1718875502008032, |
| "grad_norm": 0.2724366108411872, |
| "learning_rate": 2.8342245989304817e-05, |
| "loss": 0.538, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.17349397590361446, |
| "grad_norm": 0.2589102891755998, |
| "learning_rate": 2.8609625668449196e-05, |
| "loss": 0.5103, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.1751004016064257, |
| "grad_norm": 0.2785106034490977, |
| "learning_rate": 2.8877005347593582e-05, |
| "loss": 0.5262, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.17670682730923695, |
| "grad_norm": 0.282537837948337, |
| "learning_rate": 2.9144385026737968e-05, |
| "loss": 0.5457, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.1783132530120482, |
| "grad_norm": 0.29229050692264746, |
| "learning_rate": 2.9411764705882354e-05, |
| "loss": 0.5176, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.17991967871485945, |
| "grad_norm": 0.28743219881720417, |
| "learning_rate": 2.9679144385026743e-05, |
| "loss": 0.5373, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.18152610441767067, |
| "grad_norm": 0.29397840241802925, |
| "learning_rate": 2.9946524064171122e-05, |
| "loss": 0.5105, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.18313253012048192, |
| "grad_norm": 0.2556368263796127, |
| "learning_rate": 3.0213903743315508e-05, |
| "loss": 0.5078, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.18473895582329317, |
| "grad_norm": 0.2585019833123394, |
| "learning_rate": 3.0481283422459894e-05, |
| "loss": 0.5084, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.18634538152610441, |
| "grad_norm": 0.3141816091328503, |
| "learning_rate": 3.0748663101604283e-05, |
| "loss": 0.4973, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.18795180722891566, |
| "grad_norm": 0.25636118440595357, |
| "learning_rate": 3.1016042780748666e-05, |
| "loss": 0.514, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.1895582329317269, |
| "grad_norm": 0.2876061400917617, |
| "learning_rate": 3.128342245989305e-05, |
| "loss": 0.5213, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.19116465863453816, |
| "grad_norm": 0.34868750999339, |
| "learning_rate": 3.155080213903743e-05, |
| "loss": 0.5071, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.1927710843373494, |
| "grad_norm": 0.3015203596477017, |
| "learning_rate": 3.181818181818182e-05, |
| "loss": 0.5435, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.19437751004016066, |
| "grad_norm": 0.35119720538788096, |
| "learning_rate": 3.20855614973262e-05, |
| "loss": 0.522, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.19598393574297188, |
| "grad_norm": 0.3477752312107028, |
| "learning_rate": 3.235294117647059e-05, |
| "loss": 0.5123, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.19759036144578312, |
| "grad_norm": 0.29202999960570547, |
| "learning_rate": 3.2620320855614975e-05, |
| "loss": 0.5003, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.19919678714859437, |
| "grad_norm": 0.35278041495114454, |
| "learning_rate": 3.288770053475936e-05, |
| "loss": 0.53, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.20080321285140562, |
| "grad_norm": 0.3142151416610959, |
| "learning_rate": 3.3155080213903747e-05, |
| "loss": 0.4988, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.20240963855421687, |
| "grad_norm": 0.2996067451911012, |
| "learning_rate": 3.342245989304813e-05, |
| "loss": 0.5052, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.20401606425702812, |
| "grad_norm": 0.29176643730247437, |
| "learning_rate": 3.368983957219252e-05, |
| "loss": 0.5114, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.20562248995983937, |
| "grad_norm": 0.30502573267866906, |
| "learning_rate": 3.39572192513369e-05, |
| "loss": 0.519, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.20722891566265061, |
| "grad_norm": 0.3602982254540244, |
| "learning_rate": 3.4224598930481284e-05, |
| "loss": 0.4895, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.20883534136546184, |
| "grad_norm": 0.3631477481903701, |
| "learning_rate": 3.4491978609625666e-05, |
| "loss": 0.5127, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.21044176706827308, |
| "grad_norm": 0.30786368287839666, |
| "learning_rate": 3.4759358288770055e-05, |
| "loss": 0.4864, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.21204819277108433, |
| "grad_norm": 0.43529055398718036, |
| "learning_rate": 3.5026737967914445e-05, |
| "loss": 0.5196, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.21365461847389558, |
| "grad_norm": 0.3322195243932612, |
| "learning_rate": 3.529411764705883e-05, |
| "loss": 0.5045, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.21526104417670683, |
| "grad_norm": 0.3158713023648389, |
| "learning_rate": 3.556149732620321e-05, |
| "loss": 0.5044, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.21686746987951808, |
| "grad_norm": 0.5110582437494932, |
| "learning_rate": 3.582887700534759e-05, |
| "loss": 0.5133, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.21847389558232932, |
| "grad_norm": 0.40979685812262173, |
| "learning_rate": 3.609625668449198e-05, |
| "loss": 0.522, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.22008032128514057, |
| "grad_norm": 0.297856947051714, |
| "learning_rate": 3.6363636363636364e-05, |
| "loss": 0.5046, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.2216867469879518, |
| "grad_norm": 0.40746351543085246, |
| "learning_rate": 3.6631016042780753e-05, |
| "loss": 0.4896, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.22329317269076304, |
| "grad_norm": 0.34013676583539143, |
| "learning_rate": 3.6898395721925136e-05, |
| "loss": 0.4802, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.2248995983935743, |
| "grad_norm": 0.32168863523210933, |
| "learning_rate": 3.716577540106952e-05, |
| "loss": 0.521, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.22650602409638554, |
| "grad_norm": 0.4805805507577635, |
| "learning_rate": 3.743315508021391e-05, |
| "loss": 0.5087, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.2281124497991968, |
| "grad_norm": 0.4250197536795807, |
| "learning_rate": 3.770053475935829e-05, |
| "loss": 0.4834, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.22971887550200804, |
| "grad_norm": 0.32342622533227194, |
| "learning_rate": 3.796791443850268e-05, |
| "loss": 0.4782, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.23132530120481928, |
| "grad_norm": 0.507215711582196, |
| "learning_rate": 3.8235294117647055e-05, |
| "loss": 0.5074, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.23293172690763053, |
| "grad_norm": 0.3529654058092393, |
| "learning_rate": 3.8502673796791445e-05, |
| "loss": 0.4936, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.23453815261044178, |
| "grad_norm": 0.3652515364897037, |
| "learning_rate": 3.877005347593583e-05, |
| "loss": 0.4939, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.236144578313253, |
| "grad_norm": 0.4676716273810472, |
| "learning_rate": 3.903743315508022e-05, |
| "loss": 0.5188, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.23775100401606425, |
| "grad_norm": 0.2695003287936738, |
| "learning_rate": 3.93048128342246e-05, |
| "loss": 0.5001, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.2393574297188755, |
| "grad_norm": 0.4286990558745755, |
| "learning_rate": 3.957219251336899e-05, |
| "loss": 0.4756, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.24096385542168675, |
| "grad_norm": 0.323374315472829, |
| "learning_rate": 3.983957219251337e-05, |
| "loss": 0.4936, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.242570281124498, |
| "grad_norm": 0.3297390702151962, |
| "learning_rate": 4.0106951871657754e-05, |
| "loss": 0.48, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.24417670682730924, |
| "grad_norm": 0.400561308543879, |
| "learning_rate": 4.037433155080214e-05, |
| "loss": 0.5215, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.2457831325301205, |
| "grad_norm": 0.32182070624523856, |
| "learning_rate": 4.0641711229946525e-05, |
| "loss": 0.4903, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.24738955823293174, |
| "grad_norm": 0.3388648123975653, |
| "learning_rate": 4.0909090909090915e-05, |
| "loss": 0.507, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.24899598393574296, |
| "grad_norm": 0.3793550770714379, |
| "learning_rate": 4.11764705882353e-05, |
| "loss": 0.4987, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.25060240963855424, |
| "grad_norm": 0.2589296639211757, |
| "learning_rate": 4.144385026737968e-05, |
| "loss": 0.4902, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.25220883534136546, |
| "grad_norm": 0.35110307031028004, |
| "learning_rate": 4.171122994652407e-05, |
| "loss": 0.4842, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.25381526104417673, |
| "grad_norm": 0.2981715234028958, |
| "learning_rate": 4.197860962566845e-05, |
| "loss": 0.4898, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.25542168674698795, |
| "grad_norm": 0.2950222833943252, |
| "learning_rate": 4.224598930481284e-05, |
| "loss": 0.494, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.2570281124497992, |
| "grad_norm": 0.3257791358359003, |
| "learning_rate": 4.251336898395722e-05, |
| "loss": 0.4968, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.25863453815261045, |
| "grad_norm": 0.30157818953474835, |
| "learning_rate": 4.2780748663101606e-05, |
| "loss": 0.4917, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.26024096385542167, |
| "grad_norm": 0.3575465752435161, |
| "learning_rate": 4.304812834224599e-05, |
| "loss": 0.478, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.26184738955823295, |
| "grad_norm": 0.31606947548536135, |
| "learning_rate": 4.331550802139038e-05, |
| "loss": 0.5003, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.26345381526104417, |
| "grad_norm": 0.305902408372958, |
| "learning_rate": 4.358288770053476e-05, |
| "loss": 0.496, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.26506024096385544, |
| "grad_norm": 0.32628577417972293, |
| "learning_rate": 4.385026737967914e-05, |
| "loss": 0.4685, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.26666666666666666, |
| "grad_norm": 0.30821982802871856, |
| "learning_rate": 4.411764705882353e-05, |
| "loss": 0.4836, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.26827309236947794, |
| "grad_norm": 0.4087606869865735, |
| "learning_rate": 4.4385026737967915e-05, |
| "loss": 0.4841, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.26987951807228916, |
| "grad_norm": 0.3551707415888165, |
| "learning_rate": 4.4652406417112304e-05, |
| "loss": 0.4831, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.2714859437751004, |
| "grad_norm": 0.41179453792210313, |
| "learning_rate": 4.491978609625669e-05, |
| "loss": 0.4961, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.27309236947791166, |
| "grad_norm": 0.4108455418244695, |
| "learning_rate": 4.518716577540107e-05, |
| "loss": 0.5044, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.2746987951807229, |
| "grad_norm": 0.2896753775037168, |
| "learning_rate": 4.545454545454546e-05, |
| "loss": 0.5103, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.27630522088353415, |
| "grad_norm": 0.3758505698298356, |
| "learning_rate": 4.572192513368984e-05, |
| "loss": 0.5014, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.2779116465863454, |
| "grad_norm": 0.29588516880323346, |
| "learning_rate": 4.598930481283423e-05, |
| "loss": 0.4885, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.27951807228915665, |
| "grad_norm": 0.3147164922030141, |
| "learning_rate": 4.625668449197861e-05, |
| "loss": 0.4745, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.28112449799196787, |
| "grad_norm": 0.3573489846573228, |
| "learning_rate": 4.6524064171123e-05, |
| "loss": 0.4842, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.2827309236947791, |
| "grad_norm": 0.28544250545044564, |
| "learning_rate": 4.679144385026738e-05, |
| "loss": 0.4914, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.28433734939759037, |
| "grad_norm": 0.37401209553179027, |
| "learning_rate": 4.705882352941177e-05, |
| "loss": 0.5144, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.2859437751004016, |
| "grad_norm": 0.301831763757364, |
| "learning_rate": 4.732620320855615e-05, |
| "loss": 0.5014, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.28755020080321286, |
| "grad_norm": 0.3211966241616229, |
| "learning_rate": 4.759358288770054e-05, |
| "loss": 0.4773, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.2891566265060241, |
| "grad_norm": 0.32307860861614235, |
| "learning_rate": 4.786096256684492e-05, |
| "loss": 0.4859, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.29076305220883536, |
| "grad_norm": 0.2828104733952161, |
| "learning_rate": 4.8128342245989304e-05, |
| "loss": 0.4655, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.2923694779116466, |
| "grad_norm": 0.33271877867676497, |
| "learning_rate": 4.8395721925133694e-05, |
| "loss": 0.4765, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.29397590361445786, |
| "grad_norm": 0.3260829000408802, |
| "learning_rate": 4.8663101604278076e-05, |
| "loss": 0.514, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.2955823293172691, |
| "grad_norm": 0.3493692790433782, |
| "learning_rate": 4.8930481283422465e-05, |
| "loss": 0.4988, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.2971887550200803, |
| "grad_norm": 0.32066390126257877, |
| "learning_rate": 4.919786096256685e-05, |
| "loss": 0.4717, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2987951807228916, |
| "grad_norm": 0.30294593832444744, |
| "learning_rate": 4.946524064171123e-05, |
| "loss": 0.4987, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.3004016064257028, |
| "grad_norm": 0.28922416659203676, |
| "learning_rate": 4.973262032085561e-05, |
| "loss": 0.5041, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.30200803212851407, |
| "grad_norm": 0.30600524056610523, |
| "learning_rate": 5e-05, |
| "loss": 0.4886, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.3036144578313253, |
| "grad_norm": 0.31028179281939255, |
| "learning_rate": 4.997022036926742e-05, |
| "loss": 0.4908, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.30522088353413657, |
| "grad_norm": 0.3598166672734698, |
| "learning_rate": 4.994044073853485e-05, |
| "loss": 0.4882, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.3068273092369478, |
| "grad_norm": 0.3327095437744154, |
| "learning_rate": 4.9910661107802266e-05, |
| "loss": 0.4721, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.30843373493975906, |
| "grad_norm": 0.33567656707078364, |
| "learning_rate": 4.9880881477069685e-05, |
| "loss": 0.5082, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.3100401606425703, |
| "grad_norm": 0.30629964580097546, |
| "learning_rate": 4.9851101846337103e-05, |
| "loss": 0.4787, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.3116465863453815, |
| "grad_norm": 0.4036803078419087, |
| "learning_rate": 4.982132221560453e-05, |
| "loss": 0.4864, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.3132530120481928, |
| "grad_norm": 0.28293884051805157, |
| "learning_rate": 4.979154258487195e-05, |
| "loss": 0.4511, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.314859437751004, |
| "grad_norm": 0.37218469846708774, |
| "learning_rate": 4.9761762954139374e-05, |
| "loss": 0.4775, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.3164658634538153, |
| "grad_norm": 0.33300859531440663, |
| "learning_rate": 4.973198332340679e-05, |
| "loss": 0.4686, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.3180722891566265, |
| "grad_norm": 0.3032456489439544, |
| "learning_rate": 4.970220369267422e-05, |
| "loss": 0.5017, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.3196787148594378, |
| "grad_norm": 0.3382732850661887, |
| "learning_rate": 4.967242406194164e-05, |
| "loss": 0.4827, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.321285140562249, |
| "grad_norm": 0.27738294797571994, |
| "learning_rate": 4.9642644431209056e-05, |
| "loss": 0.4825, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3228915662650602, |
| "grad_norm": 0.34032502157492933, |
| "learning_rate": 4.9612864800476475e-05, |
| "loss": 0.4684, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.3244979919678715, |
| "grad_norm": 0.30558882899916945, |
| "learning_rate": 4.95830851697439e-05, |
| "loss": 0.4708, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.3261044176706827, |
| "grad_norm": 0.33223396277492057, |
| "learning_rate": 4.955330553901132e-05, |
| "loss": 0.4721, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.327710843373494, |
| "grad_norm": 0.35686857251795423, |
| "learning_rate": 4.952352590827874e-05, |
| "loss": 0.5025, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.3293172690763052, |
| "grad_norm": 0.3144168287969755, |
| "learning_rate": 4.949374627754616e-05, |
| "loss": 0.4891, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.3309236947791165, |
| "grad_norm": 0.36830817356433687, |
| "learning_rate": 4.946396664681358e-05, |
| "loss": 0.4821, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.3325301204819277, |
| "grad_norm": 0.33504115804761303, |
| "learning_rate": 4.9434187016081e-05, |
| "loss": 0.4582, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.334136546184739, |
| "grad_norm": 0.3374864014399646, |
| "learning_rate": 4.940440738534842e-05, |
| "loss": 0.4583, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.3357429718875502, |
| "grad_norm": 0.32010693614220614, |
| "learning_rate": 4.9374627754615846e-05, |
| "loss": 0.476, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.3373493975903614, |
| "grad_norm": 0.3921796646269442, |
| "learning_rate": 4.9344848123883265e-05, |
| "loss": 0.4856, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.3389558232931727, |
| "grad_norm": 0.3583231598969687, |
| "learning_rate": 4.9315068493150684e-05, |
| "loss": 0.4861, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.3405622489959839, |
| "grad_norm": 0.3197993185697981, |
| "learning_rate": 4.928528886241811e-05, |
| "loss": 0.4685, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.3421686746987952, |
| "grad_norm": 0.3447752045592113, |
| "learning_rate": 4.9255509231685535e-05, |
| "loss": 0.478, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.3437751004016064, |
| "grad_norm": 0.3070127440753511, |
| "learning_rate": 4.9225729600952954e-05, |
| "loss": 0.4906, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.3453815261044177, |
| "grad_norm": 0.32721230883556446, |
| "learning_rate": 4.919594997022037e-05, |
| "loss": 0.4623, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.3469879518072289, |
| "grad_norm": 0.3032610735626838, |
| "learning_rate": 4.916617033948779e-05, |
| "loss": 0.4727, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.3485943775100402, |
| "grad_norm": 0.36030842044565586, |
| "learning_rate": 4.913639070875522e-05, |
| "loss": 0.4948, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.3502008032128514, |
| "grad_norm": 0.3578112671065048, |
| "learning_rate": 4.9106611078022636e-05, |
| "loss": 0.5095, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.35180722891566263, |
| "grad_norm": 0.29519131738013554, |
| "learning_rate": 4.9076831447290055e-05, |
| "loss": 0.4938, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.3534136546184739, |
| "grad_norm": 0.3647464193576054, |
| "learning_rate": 4.9047051816557474e-05, |
| "loss": 0.4679, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.3550200803212851, |
| "grad_norm": 0.28402742866330677, |
| "learning_rate": 4.90172721858249e-05, |
| "loss": 0.4889, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.3566265060240964, |
| "grad_norm": 0.35954979625647304, |
| "learning_rate": 4.898749255509232e-05, |
| "loss": 0.4645, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.3582329317269076, |
| "grad_norm": 0.32086331610979696, |
| "learning_rate": 4.895771292435974e-05, |
| "loss": 0.468, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.3598393574297189, |
| "grad_norm": 0.3436497487869299, |
| "learning_rate": 4.892793329362716e-05, |
| "loss": 0.4791, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.3614457831325301, |
| "grad_norm": 0.3487700049222101, |
| "learning_rate": 4.889815366289458e-05, |
| "loss": 0.4736, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.36305220883534134, |
| "grad_norm": 0.31931610499502094, |
| "learning_rate": 4.8868374032162e-05, |
| "loss": 0.4513, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.3646586345381526, |
| "grad_norm": 0.3402642317758613, |
| "learning_rate": 4.8838594401429426e-05, |
| "loss": 0.4464, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.36626506024096384, |
| "grad_norm": 0.3420992789401369, |
| "learning_rate": 4.8808814770696845e-05, |
| "loss": 0.4391, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.3678714859437751, |
| "grad_norm": 0.31932568158310554, |
| "learning_rate": 4.877903513996427e-05, |
| "loss": 0.4725, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.36947791164658633, |
| "grad_norm": 0.3208368137704536, |
| "learning_rate": 4.874925550923169e-05, |
| "loss": 0.478, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.3710843373493976, |
| "grad_norm": 0.3461690668941253, |
| "learning_rate": 4.871947587849911e-05, |
| "loss": 0.4954, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.37269076305220883, |
| "grad_norm": 0.30920224902950433, |
| "learning_rate": 4.8689696247766534e-05, |
| "loss": 0.4585, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.3742971887550201, |
| "grad_norm": 0.2968003609562749, |
| "learning_rate": 4.865991661703395e-05, |
| "loss": 0.46, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.3759036144578313, |
| "grad_norm": 0.3822740069427355, |
| "learning_rate": 4.863013698630137e-05, |
| "loss": 0.4684, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.37751004016064255, |
| "grad_norm": 0.28524554577386263, |
| "learning_rate": 4.860035735556879e-05, |
| "loss": 0.4593, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.3791164658634538, |
| "grad_norm": 0.3972725136832213, |
| "learning_rate": 4.8570577724836216e-05, |
| "loss": 0.4696, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.38072289156626504, |
| "grad_norm": 0.3054040969686469, |
| "learning_rate": 4.8540798094103635e-05, |
| "loss": 0.4606, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.3823293172690763, |
| "grad_norm": 0.4114177747549895, |
| "learning_rate": 4.8511018463371054e-05, |
| "loss": 0.4704, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.38393574297188754, |
| "grad_norm": 0.35567661029453534, |
| "learning_rate": 4.848123883263847e-05, |
| "loss": 0.4867, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.3855421686746988, |
| "grad_norm": 0.4375083408829605, |
| "learning_rate": 4.84514592019059e-05, |
| "loss": 0.4777, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.38714859437751004, |
| "grad_norm": 0.355462219685631, |
| "learning_rate": 4.842167957117332e-05, |
| "loss": 0.4755, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.3887550200803213, |
| "grad_norm": 0.3868787344983709, |
| "learning_rate": 4.839189994044074e-05, |
| "loss": 0.4789, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.39036144578313253, |
| "grad_norm": 0.36707388510401423, |
| "learning_rate": 4.836212030970816e-05, |
| "loss": 0.4716, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.39196787148594375, |
| "grad_norm": 0.40507444450377067, |
| "learning_rate": 4.833234067897559e-05, |
| "loss": 0.4979, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.39357429718875503, |
| "grad_norm": 0.41739786566121945, |
| "learning_rate": 4.8302561048243006e-05, |
| "loss": 0.4548, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.39518072289156625, |
| "grad_norm": 0.3215725343767495, |
| "learning_rate": 4.8272781417510425e-05, |
| "loss": 0.4658, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.3967871485943775, |
| "grad_norm": 0.39159227056027734, |
| "learning_rate": 4.824300178677785e-05, |
| "loss": 0.4591, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.39839357429718875, |
| "grad_norm": 0.3084653643465216, |
| "learning_rate": 4.821322215604527e-05, |
| "loss": 0.4773, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.38462131026029667, |
| "learning_rate": 4.818344252531269e-05, |
| "loss": 0.4822, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.40160642570281124, |
| "grad_norm": 0.277347167260431, |
| "learning_rate": 4.815366289458011e-05, |
| "loss": 0.4794, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.40321285140562246, |
| "grad_norm": 0.3530776967566136, |
| "learning_rate": 4.812388326384753e-05, |
| "loss": 0.4797, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.40481927710843374, |
| "grad_norm": 0.32131209625672547, |
| "learning_rate": 4.809410363311495e-05, |
| "loss": 0.4656, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.40642570281124496, |
| "grad_norm": 0.28272814685756176, |
| "learning_rate": 4.806432400238237e-05, |
| "loss": 0.4661, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.40803212851405624, |
| "grad_norm": 0.3060278443090253, |
| "learning_rate": 4.803454437164979e-05, |
| "loss": 0.4656, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.40963855421686746, |
| "grad_norm": 0.297979403125008, |
| "learning_rate": 4.8004764740917215e-05, |
| "loss": 0.4731, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.41124497991967873, |
| "grad_norm": 0.2858924170890451, |
| "learning_rate": 4.7974985110184634e-05, |
| "loss": 0.4703, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.41285140562248995, |
| "grad_norm": 0.2504985080692676, |
| "learning_rate": 4.794520547945205e-05, |
| "loss": 0.4528, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.41445783132530123, |
| "grad_norm": 0.2624859429254907, |
| "learning_rate": 4.791542584871948e-05, |
| "loss": 0.4698, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.41606425702811245, |
| "grad_norm": 0.2642208647062898, |
| "learning_rate": 4.7885646217986904e-05, |
| "loss": 0.4663, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.41767068273092367, |
| "grad_norm": 0.27787708065649286, |
| "learning_rate": 4.785586658725432e-05, |
| "loss": 0.4824, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.41927710843373495, |
| "grad_norm": 0.26698431800536987, |
| "learning_rate": 4.782608695652174e-05, |
| "loss": 0.4648, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.42088353413654617, |
| "grad_norm": 0.28516211486766074, |
| "learning_rate": 4.779630732578916e-05, |
| "loss": 0.4507, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.42248995983935744, |
| "grad_norm": 0.31276639788022564, |
| "learning_rate": 4.7766527695056586e-05, |
| "loss": 0.4667, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.42409638554216866, |
| "grad_norm": 0.32259830537808853, |
| "learning_rate": 4.7736748064324005e-05, |
| "loss": 0.47, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.42570281124497994, |
| "grad_norm": 0.3118440200761508, |
| "learning_rate": 4.7706968433591424e-05, |
| "loss": 0.4617, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.42730923694779116, |
| "grad_norm": 0.2813632911379893, |
| "learning_rate": 4.767718880285885e-05, |
| "loss": 0.4573, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.42891566265060244, |
| "grad_norm": 0.29321679658663186, |
| "learning_rate": 4.764740917212627e-05, |
| "loss": 0.4715, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.43052208835341366, |
| "grad_norm": 0.22982181765765133, |
| "learning_rate": 4.761762954139369e-05, |
| "loss": 0.4409, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.4321285140562249, |
| "grad_norm": 0.3006542887568241, |
| "learning_rate": 4.7587849910661106e-05, |
| "loss": 0.452, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.43373493975903615, |
| "grad_norm": 0.29535878661480397, |
| "learning_rate": 4.755807027992853e-05, |
| "loss": 0.4778, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.4353413654618474, |
| "grad_norm": 0.28772801070152304, |
| "learning_rate": 4.752829064919595e-05, |
| "loss": 0.4649, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.43694779116465865, |
| "grad_norm": 0.2898158678126117, |
| "learning_rate": 4.749851101846337e-05, |
| "loss": 0.4686, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.43855421686746987, |
| "grad_norm": 0.3234748814438192, |
| "learning_rate": 4.7468731387730795e-05, |
| "loss": 0.4675, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.44016064257028115, |
| "grad_norm": 0.3492017937374251, |
| "learning_rate": 4.743895175699822e-05, |
| "loss": 0.4619, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.44176706827309237, |
| "grad_norm": 0.3814195544285374, |
| "learning_rate": 4.740917212626564e-05, |
| "loss": 0.4438, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.4433734939759036, |
| "grad_norm": 0.3585304162384897, |
| "learning_rate": 4.737939249553306e-05, |
| "loss": 0.4683, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.44497991967871486, |
| "grad_norm": 0.3956072839161844, |
| "learning_rate": 4.734961286480048e-05, |
| "loss": 0.4601, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.4465863453815261, |
| "grad_norm": 0.3911749897730973, |
| "learning_rate": 4.73198332340679e-05, |
| "loss": 0.4771, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.44819277108433736, |
| "grad_norm": 0.3089732227604252, |
| "learning_rate": 4.729005360333532e-05, |
| "loss": 0.454, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.4497991967871486, |
| "grad_norm": 0.39150802953689284, |
| "learning_rate": 4.726027397260274e-05, |
| "loss": 0.4565, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.45140562248995986, |
| "grad_norm": 0.29879509492587897, |
| "learning_rate": 4.723049434187016e-05, |
| "loss": 0.4594, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.4530120481927711, |
| "grad_norm": 0.36082290350674623, |
| "learning_rate": 4.7200714711137585e-05, |
| "loss": 0.4713, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.45461847389558235, |
| "grad_norm": 0.35711098350802967, |
| "learning_rate": 4.7170935080405004e-05, |
| "loss": 0.4793, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.4562248995983936, |
| "grad_norm": 0.2941919814125454, |
| "learning_rate": 4.714115544967242e-05, |
| "loss": 0.463, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.4578313253012048, |
| "grad_norm": 0.33048721274799503, |
| "learning_rate": 4.711137581893985e-05, |
| "loss": 0.4801, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.45943775100401607, |
| "grad_norm": 0.30614689618327984, |
| "learning_rate": 4.708159618820727e-05, |
| "loss": 0.473, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.4610441767068273, |
| "grad_norm": 0.31807094823468063, |
| "learning_rate": 4.7051816557474686e-05, |
| "loss": 0.4736, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.46265060240963857, |
| "grad_norm": 0.3790486412902381, |
| "learning_rate": 4.7022036926742105e-05, |
| "loss": 0.4693, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.4642570281124498, |
| "grad_norm": 0.2811084565359757, |
| "learning_rate": 4.699225729600954e-05, |
| "loss": 0.4818, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.46586345381526106, |
| "grad_norm": 0.3257486496872889, |
| "learning_rate": 4.6962477665276956e-05, |
| "loss": 0.4733, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.4674698795180723, |
| "grad_norm": 0.26923234391566453, |
| "learning_rate": 4.6932698034544375e-05, |
| "loss": 0.4695, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.46907630522088356, |
| "grad_norm": 0.37080256454192484, |
| "learning_rate": 4.6902918403811794e-05, |
| "loss": 0.4569, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.4706827309236948, |
| "grad_norm": 0.30586107586982886, |
| "learning_rate": 4.687313877307922e-05, |
| "loss": 0.4654, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.472289156626506, |
| "grad_norm": 0.36870299166682075, |
| "learning_rate": 4.684335914234664e-05, |
| "loss": 0.4407, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.4738955823293173, |
| "grad_norm": 0.28377325722930746, |
| "learning_rate": 4.681357951161406e-05, |
| "loss": 0.4674, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.4755020080321285, |
| "grad_norm": 0.2953311214919539, |
| "learning_rate": 4.6783799880881476e-05, |
| "loss": 0.4641, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.4771084337349398, |
| "grad_norm": 0.3117438841349859, |
| "learning_rate": 4.67540202501489e-05, |
| "loss": 0.4601, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.478714859437751, |
| "grad_norm": 0.256254866185249, |
| "learning_rate": 4.672424061941632e-05, |
| "loss": 0.4488, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.48032128514056227, |
| "grad_norm": 0.3212937484737991, |
| "learning_rate": 4.669446098868374e-05, |
| "loss": 0.463, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.4819277108433735, |
| "grad_norm": 0.2605989012252139, |
| "learning_rate": 4.6664681357951165e-05, |
| "loss": 0.4488, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.4835341365461847, |
| "grad_norm": 0.36926616999987727, |
| "learning_rate": 4.6634901727218584e-05, |
| "loss": 0.4678, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.485140562248996, |
| "grad_norm": 0.27259057184527724, |
| "learning_rate": 4.6605122096486e-05, |
| "loss": 0.4675, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.4867469879518072, |
| "grad_norm": 0.332589334481269, |
| "learning_rate": 4.657534246575342e-05, |
| "loss": 0.4431, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.4883534136546185, |
| "grad_norm": 0.28195976257236244, |
| "learning_rate": 4.654556283502085e-05, |
| "loss": 0.45, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.4899598393574297, |
| "grad_norm": 0.3311412168331424, |
| "learning_rate": 4.651578320428827e-05, |
| "loss": 0.4865, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.491566265060241, |
| "grad_norm": 0.2547234216247052, |
| "learning_rate": 4.648600357355569e-05, |
| "loss": 0.4507, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.4931726907630522, |
| "grad_norm": 0.2993718374438959, |
| "learning_rate": 4.645622394282311e-05, |
| "loss": 0.4731, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.4947791164658635, |
| "grad_norm": 0.25532758970228964, |
| "learning_rate": 4.6426444312090536e-05, |
| "loss": 0.4286, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.4963855421686747, |
| "grad_norm": 0.3178442593811434, |
| "learning_rate": 4.6396664681357955e-05, |
| "loss": 0.4434, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.4979919678714859, |
| "grad_norm": 0.3057512261403026, |
| "learning_rate": 4.6366885050625374e-05, |
| "loss": 0.4562, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.4995983935742972, |
| "grad_norm": 0.3030624790962943, |
| "learning_rate": 4.633710541989279e-05, |
| "loss": 0.4863, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.5012048192771085, |
| "grad_norm": 0.2979287221067525, |
| "learning_rate": 4.630732578916022e-05, |
| "loss": 0.4421, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.5028112449799197, |
| "grad_norm": 0.31752192755216657, |
| "learning_rate": 4.627754615842764e-05, |
| "loss": 0.471, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.5044176706827309, |
| "grad_norm": 0.2659253763839392, |
| "learning_rate": 4.6247766527695056e-05, |
| "loss": 0.4595, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.5060240963855421, |
| "grad_norm": 0.3255884377064916, |
| "learning_rate": 4.6217986896962475e-05, |
| "loss": 0.4571, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.5076305220883535, |
| "grad_norm": 0.2882963346908339, |
| "learning_rate": 4.61882072662299e-05, |
| "loss": 0.4629, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.5092369477911647, |
| "grad_norm": 0.30168124050054673, |
| "learning_rate": 4.615842763549732e-05, |
| "loss": 0.4647, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.5108433734939759, |
| "grad_norm": 0.3068238071113629, |
| "learning_rate": 4.612864800476474e-05, |
| "loss": 0.4602, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.5124497991967871, |
| "grad_norm": 0.26522047223534434, |
| "learning_rate": 4.6098868374032164e-05, |
| "loss": 0.4504, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.5140562248995983, |
| "grad_norm": 0.332945130105212, |
| "learning_rate": 4.606908874329959e-05, |
| "loss": 0.4427, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.5156626506024097, |
| "grad_norm": 0.34353665143726153, |
| "learning_rate": 4.603930911256701e-05, |
| "loss": 0.4529, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.5172690763052209, |
| "grad_norm": 0.35658461465036295, |
| "learning_rate": 4.600952948183443e-05, |
| "loss": 0.4475, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.5188755020080321, |
| "grad_norm": 0.31267607053571417, |
| "learning_rate": 4.5979749851101846e-05, |
| "loss": 0.4453, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.5204819277108433, |
| "grad_norm": 0.27347014445709883, |
| "learning_rate": 4.594997022036927e-05, |
| "loss": 0.4601, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.5220883534136547, |
| "grad_norm": 0.3477235744237479, |
| "learning_rate": 4.592019058963669e-05, |
| "loss": 0.4543, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.5236947791164659, |
| "grad_norm": 0.332561502304534, |
| "learning_rate": 4.589041095890411e-05, |
| "loss": 0.4442, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.5253012048192771, |
| "grad_norm": 0.2809644940810856, |
| "learning_rate": 4.5860631328171535e-05, |
| "loss": 0.4535, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.5269076305220883, |
| "grad_norm": 0.3576928200471685, |
| "learning_rate": 4.5830851697438954e-05, |
| "loss": 0.4749, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.5285140562248996, |
| "grad_norm": 0.2703626548975125, |
| "learning_rate": 4.580107206670637e-05, |
| "loss": 0.4638, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.5301204819277109, |
| "grad_norm": 0.3636772731014924, |
| "learning_rate": 4.577129243597379e-05, |
| "loss": 0.4788, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.5317269076305221, |
| "grad_norm": 0.26520757679211987, |
| "learning_rate": 4.574151280524122e-05, |
| "loss": 0.4634, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.5333333333333333, |
| "grad_norm": 0.3402903862345153, |
| "learning_rate": 4.5711733174508637e-05, |
| "loss": 0.4703, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.5349397590361445, |
| "grad_norm": 0.27480422946520894, |
| "learning_rate": 4.5681953543776055e-05, |
| "loss": 0.4555, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.5365461847389559, |
| "grad_norm": 0.2797169554300838, |
| "learning_rate": 4.565217391304348e-05, |
| "loss": 0.4534, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.5381526104417671, |
| "grad_norm": 0.269091260800782, |
| "learning_rate": 4.562239428231091e-05, |
| "loss": 0.4494, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.5397590361445783, |
| "grad_norm": 0.27190306586871754, |
| "learning_rate": 4.5592614651578326e-05, |
| "loss": 0.4522, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.5413654618473895, |
| "grad_norm": 0.31334311480552396, |
| "learning_rate": 4.5562835020845744e-05, |
| "loss": 0.4475, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.5429718875502008, |
| "grad_norm": 0.2845123465716489, |
| "learning_rate": 4.553305539011316e-05, |
| "loss": 0.4533, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.5445783132530121, |
| "grad_norm": 0.3345213805522309, |
| "learning_rate": 4.550327575938059e-05, |
| "loss": 0.4546, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.5461847389558233, |
| "grad_norm": 0.3299456597920992, |
| "learning_rate": 4.547349612864801e-05, |
| "loss": 0.4549, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.5477911646586345, |
| "grad_norm": 0.31320544671321066, |
| "learning_rate": 4.5443716497915427e-05, |
| "loss": 0.448, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.5493975903614458, |
| "grad_norm": 0.31255596857244, |
| "learning_rate": 4.541393686718285e-05, |
| "loss": 0.4694, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.551004016064257, |
| "grad_norm": 0.25871783218229927, |
| "learning_rate": 4.538415723645027e-05, |
| "loss": 0.4543, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.5526104417670683, |
| "grad_norm": 0.304269822025114, |
| "learning_rate": 4.535437760571769e-05, |
| "loss": 0.4507, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.5542168674698795, |
| "grad_norm": 0.2656090175745257, |
| "learning_rate": 4.532459797498511e-05, |
| "loss": 0.4502, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.5558232931726907, |
| "grad_norm": 0.2907242019133886, |
| "learning_rate": 4.5294818344252534e-05, |
| "loss": 0.4587, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.557429718875502, |
| "grad_norm": 0.28206137237730466, |
| "learning_rate": 4.526503871351995e-05, |
| "loss": 0.4351, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.5590361445783133, |
| "grad_norm": 0.28586719298856994, |
| "learning_rate": 4.523525908278737e-05, |
| "loss": 0.4681, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.5606425702811245, |
| "grad_norm": 0.28463207414012487, |
| "learning_rate": 4.520547945205479e-05, |
| "loss": 0.4487, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.5622489959839357, |
| "grad_norm": 0.28954112182874076, |
| "learning_rate": 4.5175699821322223e-05, |
| "loss": 0.4486, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.563855421686747, |
| "grad_norm": 0.26885455709526246, |
| "learning_rate": 4.514592019058964e-05, |
| "loss": 0.4299, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.5654618473895582, |
| "grad_norm": 0.2396609192525509, |
| "learning_rate": 4.511614055985706e-05, |
| "loss": 0.4486, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.5670682730923695, |
| "grad_norm": 0.26759074416116824, |
| "learning_rate": 4.508636092912448e-05, |
| "loss": 0.4616, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.5686746987951807, |
| "grad_norm": 0.24351279249189578, |
| "learning_rate": 4.5056581298391906e-05, |
| "loss": 0.4469, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.570281124497992, |
| "grad_norm": 0.299896902235091, |
| "learning_rate": 4.5026801667659325e-05, |
| "loss": 0.4651, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.5718875502008032, |
| "grad_norm": 0.25008325396684505, |
| "learning_rate": 4.499702203692674e-05, |
| "loss": 0.4741, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.5734939759036145, |
| "grad_norm": 0.27752152084274156, |
| "learning_rate": 4.496724240619416e-05, |
| "loss": 0.4428, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.5751004016064257, |
| "grad_norm": 0.28830683956457576, |
| "learning_rate": 4.493746277546159e-05, |
| "loss": 0.4503, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.576706827309237, |
| "grad_norm": 0.2803688735487852, |
| "learning_rate": 4.490768314472901e-05, |
| "loss": 0.4577, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.5783132530120482, |
| "grad_norm": 0.2782744621460348, |
| "learning_rate": 4.4877903513996426e-05, |
| "loss": 0.4612, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.5799196787148594, |
| "grad_norm": 0.2587035764907811, |
| "learning_rate": 4.484812388326385e-05, |
| "loss": 0.422, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.5815261044176707, |
| "grad_norm": 0.3402519963361588, |
| "learning_rate": 4.481834425253127e-05, |
| "loss": 0.4531, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.5831325301204819, |
| "grad_norm": 0.4367512894100571, |
| "learning_rate": 4.478856462179869e-05, |
| "loss": 0.4525, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.5847389558232932, |
| "grad_norm": 0.3058357640519284, |
| "learning_rate": 4.475878499106611e-05, |
| "loss": 0.4635, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.5863453815261044, |
| "grad_norm": 0.28012373506975213, |
| "learning_rate": 4.4729005360333533e-05, |
| "loss": 0.4578, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.5879518072289157, |
| "grad_norm": 0.2659693307554147, |
| "learning_rate": 4.469922572960096e-05, |
| "loss": 0.443, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.5895582329317269, |
| "grad_norm": 0.2631611308004764, |
| "learning_rate": 4.466944609886838e-05, |
| "loss": 0.4509, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.5911646586345382, |
| "grad_norm": 0.26249207008050757, |
| "learning_rate": 4.46396664681358e-05, |
| "loss": 0.4595, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.5927710843373494, |
| "grad_norm": 0.2765644480893888, |
| "learning_rate": 4.460988683740322e-05, |
| "loss": 0.4515, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.5943775100401606, |
| "grad_norm": 0.24009658771752188, |
| "learning_rate": 4.458010720667064e-05, |
| "loss": 0.446, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.5959839357429719, |
| "grad_norm": 0.27183227934106, |
| "learning_rate": 4.455032757593806e-05, |
| "loss": 0.4305, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.5975903614457831, |
| "grad_norm": 0.2517239356470154, |
| "learning_rate": 4.452054794520548e-05, |
| "loss": 0.4537, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.5991967871485944, |
| "grad_norm": 0.30072025208230435, |
| "learning_rate": 4.4490768314472905e-05, |
| "loss": 0.468, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.6008032128514056, |
| "grad_norm": 0.25818282402507275, |
| "learning_rate": 4.4460988683740323e-05, |
| "loss": 0.4406, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.6024096385542169, |
| "grad_norm": 0.2851206120266935, |
| "learning_rate": 4.443120905300774e-05, |
| "loss": 0.4749, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.6040160642570281, |
| "grad_norm": 0.3297131575662275, |
| "learning_rate": 4.440142942227517e-05, |
| "loss": 0.4398, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.6056224899598394, |
| "grad_norm": 0.2824428795313683, |
| "learning_rate": 4.437164979154259e-05, |
| "loss": 0.4612, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.6072289156626506, |
| "grad_norm": 0.33993463600355445, |
| "learning_rate": 4.4341870160810006e-05, |
| "loss": 0.437, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.6088353413654618, |
| "grad_norm": 0.307216587987155, |
| "learning_rate": 4.4312090530077425e-05, |
| "loss": 0.4501, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.6104417670682731, |
| "grad_norm": 0.34827036194744154, |
| "learning_rate": 4.428231089934485e-05, |
| "loss": 0.4679, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.6120481927710844, |
| "grad_norm": 0.3504294248069219, |
| "learning_rate": 4.4252531268612276e-05, |
| "loss": 0.443, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.6136546184738956, |
| "grad_norm": 0.33883468625080354, |
| "learning_rate": 4.4222751637879695e-05, |
| "loss": 0.4497, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.6152610441767068, |
| "grad_norm": 0.30376665359188365, |
| "learning_rate": 4.4192972007147114e-05, |
| "loss": 0.4533, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.6168674698795181, |
| "grad_norm": 0.3434907934945615, |
| "learning_rate": 4.416319237641454e-05, |
| "loss": 0.4524, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.6184738955823293, |
| "grad_norm": 0.2816956482632934, |
| "learning_rate": 4.413341274568196e-05, |
| "loss": 0.433, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.6200803212851406, |
| "grad_norm": 0.32605794938456106, |
| "learning_rate": 4.410363311494938e-05, |
| "loss": 0.4472, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.6216867469879518, |
| "grad_norm": 0.3125757813939004, |
| "learning_rate": 4.4073853484216796e-05, |
| "loss": 0.4484, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.623293172690763, |
| "grad_norm": 0.28211080101504216, |
| "learning_rate": 4.404407385348422e-05, |
| "loss": 0.4485, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.6248995983935743, |
| "grad_norm": 0.3567349794579873, |
| "learning_rate": 4.401429422275164e-05, |
| "loss": 0.4761, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.6265060240963856, |
| "grad_norm": 0.29512054372110924, |
| "learning_rate": 4.398451459201906e-05, |
| "loss": 0.4696, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.6281124497991968, |
| "grad_norm": 0.2783488849608133, |
| "learning_rate": 4.395473496128648e-05, |
| "loss": 0.4618, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.629718875502008, |
| "grad_norm": 0.31557267040914205, |
| "learning_rate": 4.3924955330553904e-05, |
| "loss": 0.4745, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.6313253012048192, |
| "grad_norm": 0.2770239046196387, |
| "learning_rate": 4.389517569982132e-05, |
| "loss": 0.4454, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.6329317269076306, |
| "grad_norm": 0.24316677828872768, |
| "learning_rate": 4.386539606908874e-05, |
| "loss": 0.4385, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.6345381526104418, |
| "grad_norm": 0.35557179231752045, |
| "learning_rate": 4.383561643835617e-05, |
| "loss": 0.4468, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.636144578313253, |
| "grad_norm": 0.24743033676911177, |
| "learning_rate": 4.380583680762359e-05, |
| "loss": 0.4559, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.6377510040160642, |
| "grad_norm": 0.32902571391845864, |
| "learning_rate": 4.377605717689101e-05, |
| "loss": 0.444, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.6393574297188755, |
| "grad_norm": 0.2922383486142178, |
| "learning_rate": 4.374627754615843e-05, |
| "loss": 0.4467, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.6409638554216868, |
| "grad_norm": 0.31920179525012243, |
| "learning_rate": 4.371649791542585e-05, |
| "loss": 0.4639, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.642570281124498, |
| "grad_norm": 0.2631355475269553, |
| "learning_rate": 4.3686718284693275e-05, |
| "loss": 0.4428, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.6441767068273092, |
| "grad_norm": 0.28345993830303334, |
| "learning_rate": 4.3656938653960694e-05, |
| "loss": 0.4369, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.6457831325301204, |
| "grad_norm": 0.26637720130752807, |
| "learning_rate": 4.362715902322811e-05, |
| "loss": 0.4623, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.6473895582329318, |
| "grad_norm": 0.3203791905662697, |
| "learning_rate": 4.359737939249554e-05, |
| "loss": 0.4289, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.648995983935743, |
| "grad_norm": 0.22842574861228213, |
| "learning_rate": 4.356759976176296e-05, |
| "loss": 0.4277, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.6506024096385542, |
| "grad_norm": 0.3067343141644802, |
| "learning_rate": 4.3537820131030376e-05, |
| "loss": 0.4433, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.6522088353413654, |
| "grad_norm": 0.2812065783428807, |
| "learning_rate": 4.3508040500297795e-05, |
| "loss": 0.4322, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.6538152610441768, |
| "grad_norm": 0.3040061933492447, |
| "learning_rate": 4.347826086956522e-05, |
| "loss": 0.437, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.655421686746988, |
| "grad_norm": 0.2987941211140326, |
| "learning_rate": 4.344848123883264e-05, |
| "loss": 0.4555, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.6570281124497992, |
| "grad_norm": 0.2659280792695334, |
| "learning_rate": 4.341870160810006e-05, |
| "loss": 0.431, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.6586345381526104, |
| "grad_norm": 0.28023761684807197, |
| "learning_rate": 4.3388921977367484e-05, |
| "loss": 0.4706, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.6602409638554216, |
| "grad_norm": 0.2963346211131788, |
| "learning_rate": 4.33591423466349e-05, |
| "loss": 0.4563, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.661847389558233, |
| "grad_norm": 0.23247955721717886, |
| "learning_rate": 4.332936271590233e-05, |
| "loss": 0.4297, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.6634538152610442, |
| "grad_norm": 0.29491691232014927, |
| "learning_rate": 4.329958308516975e-05, |
| "loss": 0.4274, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.6650602409638554, |
| "grad_norm": 0.25617212995433053, |
| "learning_rate": 4.3269803454437166e-05, |
| "loss": 0.4564, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.23564356472993905, |
| "learning_rate": 4.324002382370459e-05, |
| "loss": 0.4276, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.668273092369478, |
| "grad_norm": 0.29822994568037164, |
| "learning_rate": 4.321024419297201e-05, |
| "loss": 0.4456, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.6698795180722892, |
| "grad_norm": 0.27049585970987766, |
| "learning_rate": 4.318046456223943e-05, |
| "loss": 0.4378, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.6714859437751004, |
| "grad_norm": 0.24936723553806212, |
| "learning_rate": 4.3150684931506855e-05, |
| "loss": 0.4616, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.6730923694779116, |
| "grad_norm": 0.2711234211903887, |
| "learning_rate": 4.3120905300774274e-05, |
| "loss": 0.4564, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.6746987951807228, |
| "grad_norm": 0.2545386509478158, |
| "learning_rate": 4.309112567004169e-05, |
| "loss": 0.4395, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.6763052208835342, |
| "grad_norm": 0.2584633324804562, |
| "learning_rate": 4.306134603930911e-05, |
| "loss": 0.4498, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.6779116465863454, |
| "grad_norm": 0.2719252003406635, |
| "learning_rate": 4.303156640857654e-05, |
| "loss": 0.4382, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.6795180722891566, |
| "grad_norm": 0.24135698151855509, |
| "learning_rate": 4.3001786777843956e-05, |
| "loss": 0.4347, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.6811244979919678, |
| "grad_norm": 0.268141549049486, |
| "learning_rate": 4.2972007147111375e-05, |
| "loss": 0.4357, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.6827309236947792, |
| "grad_norm": 0.2693180226887475, |
| "learning_rate": 4.2942227516378794e-05, |
| "loss": 0.4683, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.6843373493975904, |
| "grad_norm": 0.27574435135497044, |
| "learning_rate": 4.291244788564622e-05, |
| "loss": 0.4281, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.6859437751004016, |
| "grad_norm": 0.2655247693683116, |
| "learning_rate": 4.2882668254913645e-05, |
| "loss": 0.4408, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.6875502008032128, |
| "grad_norm": 0.23605713593282013, |
| "learning_rate": 4.2852888624181064e-05, |
| "loss": 0.4264, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.689156626506024, |
| "grad_norm": 0.2668756850543248, |
| "learning_rate": 4.282310899344848e-05, |
| "loss": 0.4386, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.6907630522088354, |
| "grad_norm": 0.28508990279511975, |
| "learning_rate": 4.279332936271591e-05, |
| "loss": 0.4608, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.6923694779116466, |
| "grad_norm": 0.2673675554092482, |
| "learning_rate": 4.276354973198333e-05, |
| "loss": 0.452, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.6939759036144578, |
| "grad_norm": 0.25167957382435874, |
| "learning_rate": 4.2733770101250746e-05, |
| "loss": 0.4448, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.695582329317269, |
| "grad_norm": 0.2748905077940865, |
| "learning_rate": 4.2703990470518165e-05, |
| "loss": 0.4328, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.6971887550200804, |
| "grad_norm": 0.25694191753243817, |
| "learning_rate": 4.267421083978559e-05, |
| "loss": 0.4433, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.6987951807228916, |
| "grad_norm": 0.2953083002140047, |
| "learning_rate": 4.264443120905301e-05, |
| "loss": 0.4346, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.7004016064257028, |
| "grad_norm": 0.31346385697940604, |
| "learning_rate": 4.261465157832043e-05, |
| "loss": 0.4585, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.702008032128514, |
| "grad_norm": 0.2943130181110173, |
| "learning_rate": 4.2584871947587854e-05, |
| "loss": 0.4301, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.7036144578313253, |
| "grad_norm": 0.2970743817353812, |
| "learning_rate": 4.255509231685527e-05, |
| "loss": 0.4532, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.7052208835341366, |
| "grad_norm": 0.2831138548453156, |
| "learning_rate": 4.252531268612269e-05, |
| "loss": 0.4466, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.7068273092369478, |
| "grad_norm": 0.26374093187932474, |
| "learning_rate": 4.249553305539011e-05, |
| "loss": 0.4274, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.708433734939759, |
| "grad_norm": 0.24426239862559207, |
| "learning_rate": 4.2465753424657536e-05, |
| "loss": 0.4424, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.7100401606425703, |
| "grad_norm": 0.2641064913637875, |
| "learning_rate": 4.2435973793924955e-05, |
| "loss": 0.4408, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.7116465863453815, |
| "grad_norm": 0.2235304256466447, |
| "learning_rate": 4.240619416319238e-05, |
| "loss": 0.4275, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.7132530120481928, |
| "grad_norm": 0.24784504525361167, |
| "learning_rate": 4.23764145324598e-05, |
| "loss": 0.4381, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.714859437751004, |
| "grad_norm": 0.2370545453905028, |
| "learning_rate": 4.2346634901727225e-05, |
| "loss": 0.4468, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.7164658634538152, |
| "grad_norm": 0.2562860337291831, |
| "learning_rate": 4.2316855270994644e-05, |
| "loss": 0.4705, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.7180722891566265, |
| "grad_norm": 0.23651519929818382, |
| "learning_rate": 4.228707564026206e-05, |
| "loss": 0.4372, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.7196787148594378, |
| "grad_norm": 0.28608450172484184, |
| "learning_rate": 4.225729600952948e-05, |
| "loss": 0.4408, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.721285140562249, |
| "grad_norm": 0.2378382301528455, |
| "learning_rate": 4.222751637879691e-05, |
| "loss": 0.4695, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.7228915662650602, |
| "grad_norm": 0.30330808848040486, |
| "learning_rate": 4.2197736748064326e-05, |
| "loss": 0.4597, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.7244979919678715, |
| "grad_norm": 0.26876726977866233, |
| "learning_rate": 4.2167957117331745e-05, |
| "loss": 0.4658, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.7261044176706827, |
| "grad_norm": 0.2844637152748802, |
| "learning_rate": 4.213817748659917e-05, |
| "loss": 0.4413, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.727710843373494, |
| "grad_norm": 0.2615741396788308, |
| "learning_rate": 4.210839785586659e-05, |
| "loss": 0.4299, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.7293172690763052, |
| "grad_norm": 0.27257653372835033, |
| "learning_rate": 4.207861822513401e-05, |
| "loss": 0.4481, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.7309236947791165, |
| "grad_norm": 0.25639819683469073, |
| "learning_rate": 4.204883859440143e-05, |
| "loss": 0.4324, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.7325301204819277, |
| "grad_norm": 0.23672287570546763, |
| "learning_rate": 4.201905896366885e-05, |
| "loss": 0.4341, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.734136546184739, |
| "grad_norm": 0.23858902027069817, |
| "learning_rate": 4.198927933293627e-05, |
| "loss": 0.4454, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.7357429718875502, |
| "grad_norm": 0.26312986415658596, |
| "learning_rate": 4.19594997022037e-05, |
| "loss": 0.4503, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.7373493975903614, |
| "grad_norm": 0.22359659568823306, |
| "learning_rate": 4.1929720071471116e-05, |
| "loss": 0.4464, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.7389558232931727, |
| "grad_norm": 0.2771729748099005, |
| "learning_rate": 4.189994044073854e-05, |
| "loss": 0.4714, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.7405622489959839, |
| "grad_norm": 0.21993164913808572, |
| "learning_rate": 4.187016081000596e-05, |
| "loss": 0.4325, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.7421686746987952, |
| "grad_norm": 0.2889751049620364, |
| "learning_rate": 4.184038117927338e-05, |
| "loss": 0.4449, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.7437751004016064, |
| "grad_norm": 0.2949855484027405, |
| "learning_rate": 4.18106015485408e-05, |
| "loss": 0.4262, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.7453815261044177, |
| "grad_norm": 0.26377183894449047, |
| "learning_rate": 4.1780821917808224e-05, |
| "loss": 0.4545, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.7469879518072289, |
| "grad_norm": 0.30726865720505625, |
| "learning_rate": 4.175104228707564e-05, |
| "loss": 0.4382, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.7485943775100402, |
| "grad_norm": 0.23122382644642359, |
| "learning_rate": 4.172126265634306e-05, |
| "loss": 0.4406, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.7502008032128514, |
| "grad_norm": 0.276949750828653, |
| "learning_rate": 4.169148302561048e-05, |
| "loss": 0.4418, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.7518072289156627, |
| "grad_norm": 0.27713071319003524, |
| "learning_rate": 4.1661703394877906e-05, |
| "loss": 0.4537, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.7534136546184739, |
| "grad_norm": 0.2526256098750081, |
| "learning_rate": 4.1631923764145325e-05, |
| "loss": 0.4228, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.7550200803212851, |
| "grad_norm": 0.2638363919651469, |
| "learning_rate": 4.1602144133412744e-05, |
| "loss": 0.4285, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.7566265060240964, |
| "grad_norm": 0.258408639046892, |
| "learning_rate": 4.157236450268017e-05, |
| "loss": 0.4783, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.7582329317269076, |
| "grad_norm": 0.27786588034932264, |
| "learning_rate": 4.154258487194759e-05, |
| "loss": 0.4233, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.7598393574297189, |
| "grad_norm": 0.2755128011878773, |
| "learning_rate": 4.1512805241215014e-05, |
| "loss": 0.4381, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.7614457831325301, |
| "grad_norm": 0.2413178291750358, |
| "learning_rate": 4.148302561048243e-05, |
| "loss": 0.4426, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.7630522088353414, |
| "grad_norm": 0.2625618384300778, |
| "learning_rate": 4.145324597974985e-05, |
| "loss": 0.4254, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.7646586345381526, |
| "grad_norm": 0.24765395667571197, |
| "learning_rate": 4.142346634901728e-05, |
| "loss": 0.4085, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.7662650602409639, |
| "grad_norm": 0.24032530369165062, |
| "learning_rate": 4.1393686718284696e-05, |
| "loss": 0.4362, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.7678714859437751, |
| "grad_norm": 0.29944650953941004, |
| "learning_rate": 4.1363907087552115e-05, |
| "loss": 0.4418, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.7694779116465863, |
| "grad_norm": 0.23902526885476016, |
| "learning_rate": 4.133412745681954e-05, |
| "loss": 0.4428, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.7710843373493976, |
| "grad_norm": 0.2765681820302345, |
| "learning_rate": 4.130434782608696e-05, |
| "loss": 0.4303, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.7726907630522089, |
| "grad_norm": 0.2842130912447409, |
| "learning_rate": 4.127456819535438e-05, |
| "loss": 0.4475, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.7742971887550201, |
| "grad_norm": 0.2438936958185551, |
| "learning_rate": 4.12447885646218e-05, |
| "loss": 0.4347, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.7759036144578313, |
| "grad_norm": 0.3168618858927945, |
| "learning_rate": 4.121500893388922e-05, |
| "loss": 0.4269, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.7775100401606426, |
| "grad_norm": 0.2758789903313452, |
| "learning_rate": 4.118522930315664e-05, |
| "loss": 0.4459, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.7791164658634538, |
| "grad_norm": 0.2493181686533524, |
| "learning_rate": 4.115544967242406e-05, |
| "loss": 0.4148, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.7807228915662651, |
| "grad_norm": 0.308879700403648, |
| "learning_rate": 4.1125670041691486e-05, |
| "loss": 0.4234, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.7823293172690763, |
| "grad_norm": 0.25111177333337475, |
| "learning_rate": 4.1095890410958905e-05, |
| "loss": 0.4326, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.7839357429718875, |
| "grad_norm": 0.27869616318648066, |
| "learning_rate": 4.1066110780226324e-05, |
| "loss": 0.4636, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.7855421686746988, |
| "grad_norm": 0.26843185289016686, |
| "learning_rate": 4.103633114949375e-05, |
| "loss": 0.4489, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.7871485943775101, |
| "grad_norm": 0.2515875459811497, |
| "learning_rate": 4.100655151876117e-05, |
| "loss": 0.4365, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.7887550200803213, |
| "grad_norm": 0.3043990948350012, |
| "learning_rate": 4.0976771888028594e-05, |
| "loss": 0.4461, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.7903614457831325, |
| "grad_norm": 0.2574069687114595, |
| "learning_rate": 4.094699225729601e-05, |
| "loss": 0.457, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.7919678714859437, |
| "grad_norm": 0.27116949616970154, |
| "learning_rate": 4.091721262656343e-05, |
| "loss": 0.4409, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.793574297188755, |
| "grad_norm": 0.25968831554328287, |
| "learning_rate": 4.088743299583086e-05, |
| "loss": 0.4468, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.7951807228915663, |
| "grad_norm": 0.26335068526437494, |
| "learning_rate": 4.0857653365098276e-05, |
| "loss": 0.4327, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.7967871485943775, |
| "grad_norm": 0.24080104395558735, |
| "learning_rate": 4.0827873734365695e-05, |
| "loss": 0.4634, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.7983935742971887, |
| "grad_norm": 0.26294966531248926, |
| "learning_rate": 4.0798094103633114e-05, |
| "loss": 0.4391, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.25051730546706547, |
| "learning_rate": 4.076831447290054e-05, |
| "loss": 0.4325, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.8016064257028113, |
| "grad_norm": 0.2427733402448281, |
| "learning_rate": 4.073853484216796e-05, |
| "loss": 0.4374, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.8032128514056225, |
| "grad_norm": 0.24425221729505334, |
| "learning_rate": 4.070875521143538e-05, |
| "loss": 0.4473, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.8048192771084337, |
| "grad_norm": 0.2683791345440276, |
| "learning_rate": 4.0678975580702796e-05, |
| "loss": 0.4337, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.8064257028112449, |
| "grad_norm": 0.24711519506263632, |
| "learning_rate": 4.064919594997022e-05, |
| "loss": 0.4282, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.8080321285140563, |
| "grad_norm": 0.28860665208927944, |
| "learning_rate": 4.061941631923764e-05, |
| "loss": 0.4279, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.8096385542168675, |
| "grad_norm": 0.26118713217431333, |
| "learning_rate": 4.0589636688505067e-05, |
| "loss": 0.4426, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.8112449799196787, |
| "grad_norm": 0.2730270735024781, |
| "learning_rate": 4.0559857057772485e-05, |
| "loss": 0.4335, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.8128514056224899, |
| "grad_norm": 0.26209320634654937, |
| "learning_rate": 4.053007742703991e-05, |
| "loss": 0.4197, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.8144578313253013, |
| "grad_norm": 0.2805274090853642, |
| "learning_rate": 4.050029779630733e-05, |
| "loss": 0.4378, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.8160642570281125, |
| "grad_norm": 0.2667961879265187, |
| "learning_rate": 4.047051816557475e-05, |
| "loss": 0.424, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.8176706827309237, |
| "grad_norm": 0.25928083584015393, |
| "learning_rate": 4.044073853484217e-05, |
| "loss": 0.4422, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.8192771084337349, |
| "grad_norm": 0.2901803923450964, |
| "learning_rate": 4.041095890410959e-05, |
| "loss": 0.4414, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.8208835341365461, |
| "grad_norm": 0.3193674709588745, |
| "learning_rate": 4.038117927337701e-05, |
| "loss": 0.4534, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.8224899598393575, |
| "grad_norm": 0.24227403826420443, |
| "learning_rate": 4.035139964264443e-05, |
| "loss": 0.4587, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.8240963855421687, |
| "grad_norm": 0.2964629851317735, |
| "learning_rate": 4.0321620011911857e-05, |
| "loss": 0.4323, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.8257028112449799, |
| "grad_norm": 0.23139253151927322, |
| "learning_rate": 4.0291840381179275e-05, |
| "loss": 0.4428, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.8273092369477911, |
| "grad_norm": 0.2766551845263243, |
| "learning_rate": 4.0262060750446694e-05, |
| "loss": 0.4396, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.8289156626506025, |
| "grad_norm": 0.2558099712232151, |
| "learning_rate": 4.023228111971411e-05, |
| "loss": 0.4531, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.8305220883534137, |
| "grad_norm": 0.248132214894419, |
| "learning_rate": 4.020250148898154e-05, |
| "loss": 0.4343, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.8321285140562249, |
| "grad_norm": 0.218481604590402, |
| "learning_rate": 4.017272185824896e-05, |
| "loss": 0.4422, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.8337349397590361, |
| "grad_norm": 0.24735349079682337, |
| "learning_rate": 4.0142942227516376e-05, |
| "loss": 0.448, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.8353413654618473, |
| "grad_norm": 0.24763740642710605, |
| "learning_rate": 4.01131625967838e-05, |
| "loss": 0.4681, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.8369477911646587, |
| "grad_norm": 0.2318003082200106, |
| "learning_rate": 4.008338296605123e-05, |
| "loss": 0.4266, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.8385542168674699, |
| "grad_norm": 0.2938561150927858, |
| "learning_rate": 4.005360333531865e-05, |
| "loss": 0.4539, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.8401606425702811, |
| "grad_norm": 0.21988408888482563, |
| "learning_rate": 4.0023823704586065e-05, |
| "loss": 0.4212, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.8417670682730923, |
| "grad_norm": 0.26776989445692106, |
| "learning_rate": 3.9994044073853484e-05, |
| "loss": 0.419, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.8433734939759037, |
| "grad_norm": 0.27196304375993613, |
| "learning_rate": 3.996426444312091e-05, |
| "loss": 0.4393, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.8449799196787149, |
| "grad_norm": 0.23459828989262607, |
| "learning_rate": 3.993448481238833e-05, |
| "loss": 0.4506, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.8465863453815261, |
| "grad_norm": 0.2867482982512224, |
| "learning_rate": 3.990470518165575e-05, |
| "loss": 0.4156, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.8481927710843373, |
| "grad_norm": 0.2379855499866802, |
| "learning_rate": 3.987492555092317e-05, |
| "loss": 0.445, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.8497991967871485, |
| "grad_norm": 0.26332013956044786, |
| "learning_rate": 3.984514592019059e-05, |
| "loss": 0.4326, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.8514056224899599, |
| "grad_norm": 0.26813329833183946, |
| "learning_rate": 3.981536628945801e-05, |
| "loss": 0.43, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.8530120481927711, |
| "grad_norm": 0.2389778853354646, |
| "learning_rate": 3.978558665872543e-05, |
| "loss": 0.422, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.8546184738955823, |
| "grad_norm": 0.25166138672793514, |
| "learning_rate": 3.9755807027992856e-05, |
| "loss": 0.4398, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.8562248995983935, |
| "grad_norm": 0.30458405837769154, |
| "learning_rate": 3.9726027397260274e-05, |
| "loss": 0.4363, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.8578313253012049, |
| "grad_norm": 0.2561268269052372, |
| "learning_rate": 3.969624776652769e-05, |
| "loss": 0.4318, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.8594377510040161, |
| "grad_norm": 0.31382863625958424, |
| "learning_rate": 3.966646813579512e-05, |
| "loss": 0.4519, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.8610441767068273, |
| "grad_norm": 0.27399140852336135, |
| "learning_rate": 3.9636688505062545e-05, |
| "loss": 0.4493, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.8626506024096385, |
| "grad_norm": 0.2708572339865821, |
| "learning_rate": 3.960690887432996e-05, |
| "loss": 0.4157, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.8642570281124498, |
| "grad_norm": 0.3013531430351654, |
| "learning_rate": 3.957712924359738e-05, |
| "loss": 0.428, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.8658634538152611, |
| "grad_norm": 0.2154447354851553, |
| "learning_rate": 3.95473496128648e-05, |
| "loss": 0.4423, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.8674698795180723, |
| "grad_norm": 0.27209553350557136, |
| "learning_rate": 3.951756998213223e-05, |
| "loss": 0.4493, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.8690763052208835, |
| "grad_norm": 0.25010423326607467, |
| "learning_rate": 3.9487790351399646e-05, |
| "loss": 0.4415, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.8706827309236947, |
| "grad_norm": 0.3204511427575982, |
| "learning_rate": 3.9458010720667064e-05, |
| "loss": 0.4375, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.8722891566265061, |
| "grad_norm": 0.24205733838152174, |
| "learning_rate": 3.942823108993448e-05, |
| "loss": 0.4444, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.8738955823293173, |
| "grad_norm": 0.2294213191593051, |
| "learning_rate": 3.939845145920191e-05, |
| "loss": 0.4267, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.8755020080321285, |
| "grad_norm": 0.2257426838434749, |
| "learning_rate": 3.936867182846933e-05, |
| "loss": 0.4221, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.8771084337349397, |
| "grad_norm": 0.2706901866438114, |
| "learning_rate": 3.933889219773675e-05, |
| "loss": 0.4269, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.878714859437751, |
| "grad_norm": 0.2225358386339718, |
| "learning_rate": 3.930911256700417e-05, |
| "loss": 0.4252, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.8803212851405623, |
| "grad_norm": 0.237633405654431, |
| "learning_rate": 3.927933293627159e-05, |
| "loss": 0.4472, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.8819277108433735, |
| "grad_norm": 0.25486484276588334, |
| "learning_rate": 3.924955330553901e-05, |
| "loss": 0.4509, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.8835341365461847, |
| "grad_norm": 0.2959254315333727, |
| "learning_rate": 3.9219773674806436e-05, |
| "loss": 0.4357, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.885140562248996, |
| "grad_norm": 0.2517468143857465, |
| "learning_rate": 3.9189994044073855e-05, |
| "loss": 0.4346, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.8867469879518072, |
| "grad_norm": 0.24459158559461538, |
| "learning_rate": 3.916021441334128e-05, |
| "loss": 0.4457, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.8883534136546185, |
| "grad_norm": 0.29549737944364596, |
| "learning_rate": 3.91304347826087e-05, |
| "loss": 0.4382, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.8899598393574297, |
| "grad_norm": 0.26996542381353816, |
| "learning_rate": 3.910065515187612e-05, |
| "loss": 0.459, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.891566265060241, |
| "grad_norm": 0.23705682208176873, |
| "learning_rate": 3.9070875521143543e-05, |
| "loss": 0.4128, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.8931726907630522, |
| "grad_norm": 0.31888298370501633, |
| "learning_rate": 3.904109589041096e-05, |
| "loss": 0.4352, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.8947791164658635, |
| "grad_norm": 0.25562028810153453, |
| "learning_rate": 3.901131625967838e-05, |
| "loss": 0.4206, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.8963855421686747, |
| "grad_norm": 0.2990297387911476, |
| "learning_rate": 3.89815366289458e-05, |
| "loss": 0.4325, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.8979919678714859, |
| "grad_norm": 0.28378737269849236, |
| "learning_rate": 3.8951756998213226e-05, |
| "loss": 0.4305, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.8995983935742972, |
| "grad_norm": 0.24043681590576332, |
| "learning_rate": 3.8921977367480645e-05, |
| "loss": 0.4289, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.9012048192771084, |
| "grad_norm": 0.27719811894493096, |
| "learning_rate": 3.8892197736748063e-05, |
| "loss": 0.427, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.9028112449799197, |
| "grad_norm": 0.2419776006882972, |
| "learning_rate": 3.886241810601549e-05, |
| "loss": 0.4124, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.9044176706827309, |
| "grad_norm": 0.24970037471349055, |
| "learning_rate": 3.883263847528291e-05, |
| "loss": 0.4319, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.9060240963855422, |
| "grad_norm": 0.2906891588270049, |
| "learning_rate": 3.880285884455033e-05, |
| "loss": 0.456, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.9076305220883534, |
| "grad_norm": 0.23010103423549969, |
| "learning_rate": 3.8773079213817746e-05, |
| "loss": 0.4387, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.9092369477911647, |
| "grad_norm": 0.25700570128801525, |
| "learning_rate": 3.874329958308517e-05, |
| "loss": 0.4476, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.9108433734939759, |
| "grad_norm": 0.24990675488537026, |
| "learning_rate": 3.87135199523526e-05, |
| "loss": 0.4552, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.9124497991967871, |
| "grad_norm": 0.2307282093649637, |
| "learning_rate": 3.8683740321620016e-05, |
| "loss": 0.4311, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.9140562248995984, |
| "grad_norm": 0.27738696314612776, |
| "learning_rate": 3.8653960690887435e-05, |
| "loss": 0.4444, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.9156626506024096, |
| "grad_norm": 0.2552012268400254, |
| "learning_rate": 3.862418106015486e-05, |
| "loss": 0.4373, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.9172690763052209, |
| "grad_norm": 0.24376104605564367, |
| "learning_rate": 3.859440142942228e-05, |
| "loss": 0.4437, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.9188755020080321, |
| "grad_norm": 0.24493252905116827, |
| "learning_rate": 3.85646217986897e-05, |
| "loss": 0.4288, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.9204819277108434, |
| "grad_norm": 0.26709579469582134, |
| "learning_rate": 3.853484216795712e-05, |
| "loss": 0.4506, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.9220883534136546, |
| "grad_norm": 0.22939166264690128, |
| "learning_rate": 3.850506253722454e-05, |
| "loss": 0.4201, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.9236947791164659, |
| "grad_norm": 0.2760775596448828, |
| "learning_rate": 3.847528290649196e-05, |
| "loss": 0.4302, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.9253012048192771, |
| "grad_norm": 0.2341167029172588, |
| "learning_rate": 3.844550327575938e-05, |
| "loss": 0.4206, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.9269076305220884, |
| "grad_norm": 0.28845596603073687, |
| "learning_rate": 3.84157236450268e-05, |
| "loss": 0.4303, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.9285140562248996, |
| "grad_norm": 0.26016271659068463, |
| "learning_rate": 3.8385944014294225e-05, |
| "loss": 0.4277, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.9301204819277108, |
| "grad_norm": 0.2573057822086778, |
| "learning_rate": 3.8356164383561644e-05, |
| "loss": 0.4343, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.9317269076305221, |
| "grad_norm": 0.2600638208053094, |
| "learning_rate": 3.832638475282906e-05, |
| "loss": 0.4426, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.9333333333333333, |
| "grad_norm": 0.23909074619126763, |
| "learning_rate": 3.829660512209649e-05, |
| "loss": 0.438, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.9349397590361446, |
| "grad_norm": 0.2779421249279328, |
| "learning_rate": 3.8266825491363914e-05, |
| "loss": 0.4413, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.9365461847389558, |
| "grad_norm": 0.23613930700473348, |
| "learning_rate": 3.823704586063133e-05, |
| "loss": 0.4524, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.9381526104417671, |
| "grad_norm": 0.24231621353205904, |
| "learning_rate": 3.820726622989875e-05, |
| "loss": 0.4172, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.9397590361445783, |
| "grad_norm": 0.27548795148960975, |
| "learning_rate": 3.817748659916617e-05, |
| "loss": 0.4234, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.9413654618473896, |
| "grad_norm": 0.25527044160278795, |
| "learning_rate": 3.8147706968433596e-05, |
| "loss": 0.4382, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.9429718875502008, |
| "grad_norm": 0.2349394226856958, |
| "learning_rate": 3.8117927337701015e-05, |
| "loss": 0.4349, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.944578313253012, |
| "grad_norm": 0.22907020884629825, |
| "learning_rate": 3.8088147706968434e-05, |
| "loss": 0.4228, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.9461847389558233, |
| "grad_norm": 0.28681246806321475, |
| "learning_rate": 3.805836807623586e-05, |
| "loss": 0.4303, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.9477911646586346, |
| "grad_norm": 0.23123430367180658, |
| "learning_rate": 3.802858844550328e-05, |
| "loss": 0.451, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.9493975903614458, |
| "grad_norm": 0.25837739673979626, |
| "learning_rate": 3.79988088147707e-05, |
| "loss": 0.4246, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.951004016064257, |
| "grad_norm": 0.26932761173674435, |
| "learning_rate": 3.7969029184038116e-05, |
| "loss": 0.4302, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.9526104417670683, |
| "grad_norm": 0.2689813929288827, |
| "learning_rate": 3.793924955330554e-05, |
| "loss": 0.4196, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.9542168674698795, |
| "grad_norm": 0.24482217188234856, |
| "learning_rate": 3.790946992257296e-05, |
| "loss": 0.4383, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.9558232931726908, |
| "grad_norm": 0.29914093028504873, |
| "learning_rate": 3.787969029184038e-05, |
| "loss": 0.4522, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.957429718875502, |
| "grad_norm": 0.2359952335036009, |
| "learning_rate": 3.78499106611078e-05, |
| "loss": 0.4465, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.9590361445783132, |
| "grad_norm": 0.2532581617564896, |
| "learning_rate": 3.782013103037523e-05, |
| "loss": 0.4335, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.9606425702811245, |
| "grad_norm": 0.2514263045025721, |
| "learning_rate": 3.779035139964265e-05, |
| "loss": 0.4325, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.9622489959839358, |
| "grad_norm": 0.23570835352148856, |
| "learning_rate": 3.776057176891007e-05, |
| "loss": 0.4314, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.963855421686747, |
| "grad_norm": 0.23976696550069423, |
| "learning_rate": 3.773079213817749e-05, |
| "loss": 0.4427, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.9654618473895582, |
| "grad_norm": 0.22055205479897635, |
| "learning_rate": 3.770101250744491e-05, |
| "loss": 0.4221, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.9670682730923694, |
| "grad_norm": 0.24343117535869208, |
| "learning_rate": 3.767123287671233e-05, |
| "loss": 0.4421, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.9686746987951808, |
| "grad_norm": 0.24390631127271806, |
| "learning_rate": 3.764145324597975e-05, |
| "loss": 0.4313, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.970281124497992, |
| "grad_norm": 0.23440381138018293, |
| "learning_rate": 3.7611673615247176e-05, |
| "loss": 0.4335, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.9718875502008032, |
| "grad_norm": 0.23653143643840432, |
| "learning_rate": 3.7581893984514595e-05, |
| "loss": 0.4501, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.9734939759036144, |
| "grad_norm": 0.25580575112258935, |
| "learning_rate": 3.7552114353782014e-05, |
| "loss": 0.4318, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.9751004016064257, |
| "grad_norm": 0.26355197800261765, |
| "learning_rate": 3.752233472304943e-05, |
| "loss": 0.4197, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.976706827309237, |
| "grad_norm": 0.24291180092148457, |
| "learning_rate": 3.749255509231686e-05, |
| "loss": 0.4234, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.9783132530120482, |
| "grad_norm": 0.2847500740226297, |
| "learning_rate": 3.746277546158428e-05, |
| "loss": 0.4443, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.9799196787148594, |
| "grad_norm": 0.2610758749435212, |
| "learning_rate": 3.7432995830851696e-05, |
| "loss": 0.4289, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.9815261044176706, |
| "grad_norm": 0.24968845981927007, |
| "learning_rate": 3.7403216200119115e-05, |
| "loss": 0.4276, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.983132530120482, |
| "grad_norm": 0.3323334996216219, |
| "learning_rate": 3.737343656938655e-05, |
| "loss": 0.4574, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.9847389558232932, |
| "grad_norm": 0.26279163924567633, |
| "learning_rate": 3.7343656938653966e-05, |
| "loss": 0.4336, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.9863453815261044, |
| "grad_norm": 0.24348461047320755, |
| "learning_rate": 3.7313877307921385e-05, |
| "loss": 0.434, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.9879518072289156, |
| "grad_norm": 0.24839350837891416, |
| "learning_rate": 3.7284097677188804e-05, |
| "loss": 0.4136, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.989558232931727, |
| "grad_norm": 0.26281972473580856, |
| "learning_rate": 3.725431804645623e-05, |
| "loss": 0.4431, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.9911646586345382, |
| "grad_norm": 0.2313225564914852, |
| "learning_rate": 3.722453841572365e-05, |
| "loss": 0.4277, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.9927710843373494, |
| "grad_norm": 0.2615224862640514, |
| "learning_rate": 3.719475878499107e-05, |
| "loss": 0.4286, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.9943775100401606, |
| "grad_norm": 0.2520240607539348, |
| "learning_rate": 3.7164979154258486e-05, |
| "loss": 0.4286, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.9959839357429718, |
| "grad_norm": 0.25262720498315444, |
| "learning_rate": 3.713519952352591e-05, |
| "loss": 0.4311, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.9975903614457832, |
| "grad_norm": 0.22632961503761864, |
| "learning_rate": 3.710541989279333e-05, |
| "loss": 0.4415, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.9991967871485944, |
| "grad_norm": 0.2371805967007009, |
| "learning_rate": 3.707564026206075e-05, |
| "loss": 0.4015, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.2371805967007009, |
| "learning_rate": 3.7045860631328175e-05, |
| "loss": 0.4294, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.0016064257028112, |
| "grad_norm": 0.37932341441984047, |
| "learning_rate": 3.7016081000595594e-05, |
| "loss": 0.3495, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.0032128514056224, |
| "grad_norm": 0.2759554804160266, |
| "learning_rate": 3.698630136986301e-05, |
| "loss": 0.3745, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.0048192771084337, |
| "grad_norm": 0.2546676949628782, |
| "learning_rate": 3.695652173913043e-05, |
| "loss": 0.3681, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.0064257028112449, |
| "grad_norm": 0.2935521485778336, |
| "learning_rate": 3.692674210839786e-05, |
| "loss": 0.3734, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.0080321285140563, |
| "grad_norm": 0.29990889534005793, |
| "learning_rate": 3.689696247766528e-05, |
| "loss": 0.3801, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.0096385542168675, |
| "grad_norm": 0.26748247562403804, |
| "learning_rate": 3.68671828469327e-05, |
| "loss": 0.3743, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.0112449799196788, |
| "grad_norm": 0.26734512122421833, |
| "learning_rate": 3.683740321620012e-05, |
| "loss": 0.3478, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.01285140562249, |
| "grad_norm": 0.2847423153327123, |
| "learning_rate": 3.6807623585467546e-05, |
| "loss": 0.3699, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.0144578313253012, |
| "grad_norm": 0.2650366814150442, |
| "learning_rate": 3.6777843954734965e-05, |
| "loss": 0.3488, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.0160642570281124, |
| "grad_norm": 0.2596723308109665, |
| "learning_rate": 3.6748064324002384e-05, |
| "loss": 0.3839, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.0176706827309236, |
| "grad_norm": 0.2775391732767592, |
| "learning_rate": 3.67182846932698e-05, |
| "loss": 0.3634, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.0192771084337349, |
| "grad_norm": 0.3039539665914044, |
| "learning_rate": 3.668850506253723e-05, |
| "loss": 0.3689, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.020883534136546, |
| "grad_norm": 0.28485132467442625, |
| "learning_rate": 3.665872543180465e-05, |
| "loss": 0.3769, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.0224899598393575, |
| "grad_norm": 0.2890348401840446, |
| "learning_rate": 3.6628945801072066e-05, |
| "loss": 0.3741, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.0240963855421688, |
| "grad_norm": 0.2865555236023417, |
| "learning_rate": 3.659916617033949e-05, |
| "loss": 0.3675, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.02570281124498, |
| "grad_norm": 0.2675353012405362, |
| "learning_rate": 3.656938653960691e-05, |
| "loss": 0.3748, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.0273092369477912, |
| "grad_norm": 0.2596693456604531, |
| "learning_rate": 3.653960690887433e-05, |
| "loss": 0.3594, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.0289156626506024, |
| "grad_norm": 0.270414602391874, |
| "learning_rate": 3.650982727814175e-05, |
| "loss": 0.3633, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.0305220883534136, |
| "grad_norm": 0.22619014147747027, |
| "learning_rate": 3.6480047647409174e-05, |
| "loss": 0.3776, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.0321285140562249, |
| "grad_norm": 0.29226927393529, |
| "learning_rate": 3.64502680166766e-05, |
| "loss": 0.3646, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.033734939759036, |
| "grad_norm": 0.2395639919856441, |
| "learning_rate": 3.642048838594402e-05, |
| "loss": 0.3654, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.0353413654618473, |
| "grad_norm": 0.25348584398956553, |
| "learning_rate": 3.639070875521144e-05, |
| "loss": 0.3924, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.0369477911646587, |
| "grad_norm": 0.2672275246998648, |
| "learning_rate": 3.636092912447886e-05, |
| "loss": 0.3538, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.03855421686747, |
| "grad_norm": 0.2346022042725061, |
| "learning_rate": 3.633114949374628e-05, |
| "loss": 0.367, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.0401606425702812, |
| "grad_norm": 0.2525831094063313, |
| "learning_rate": 3.63013698630137e-05, |
| "loss": 0.3665, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.0417670682730924, |
| "grad_norm": 0.23157749157994945, |
| "learning_rate": 3.627159023228112e-05, |
| "loss": 0.3746, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.0433734939759036, |
| "grad_norm": 0.23185354633237557, |
| "learning_rate": 3.6241810601548545e-05, |
| "loss": 0.3868, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.0449799196787148, |
| "grad_norm": 0.2432844999424583, |
| "learning_rate": 3.6212030970815964e-05, |
| "loss": 0.3719, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.046586345381526, |
| "grad_norm": 0.2192600088467529, |
| "learning_rate": 3.618225134008338e-05, |
| "loss": 0.3644, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.0481927710843373, |
| "grad_norm": 0.25117602242785403, |
| "learning_rate": 3.61524717093508e-05, |
| "loss": 0.3516, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.0497991967871485, |
| "grad_norm": 0.24915531821305087, |
| "learning_rate": 3.612269207861823e-05, |
| "loss": 0.3514, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.05140562248996, |
| "grad_norm": 0.22692662484237394, |
| "learning_rate": 3.6092912447885646e-05, |
| "loss": 0.3804, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.0530120481927712, |
| "grad_norm": 0.25324103379881957, |
| "learning_rate": 3.6063132817153065e-05, |
| "loss": 0.3615, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.0546184738955824, |
| "grad_norm": 0.2429896691080188, |
| "learning_rate": 3.603335318642049e-05, |
| "loss": 0.3781, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.0562248995983936, |
| "grad_norm": 0.23497370537491616, |
| "learning_rate": 3.6003573555687916e-05, |
| "loss": 0.3857, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.0578313253012048, |
| "grad_norm": 0.24625075829341786, |
| "learning_rate": 3.5973793924955335e-05, |
| "loss": 0.3752, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.059437751004016, |
| "grad_norm": 0.25709099687928116, |
| "learning_rate": 3.5944014294222754e-05, |
| "loss": 0.3596, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.0610441767068273, |
| "grad_norm": 0.23056836241774717, |
| "learning_rate": 3.591423466349017e-05, |
| "loss": 0.3483, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.0626506024096385, |
| "grad_norm": 0.24580094444062736, |
| "learning_rate": 3.58844550327576e-05, |
| "loss": 0.3603, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.0642570281124497, |
| "grad_norm": 0.27979904138127654, |
| "learning_rate": 3.585467540202502e-05, |
| "loss": 0.3821, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.0658634538152612, |
| "grad_norm": 0.221287404622989, |
| "learning_rate": 3.5824895771292436e-05, |
| "loss": 0.3512, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.0674698795180724, |
| "grad_norm": 0.26457549896181337, |
| "learning_rate": 3.579511614055986e-05, |
| "loss": 0.3723, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.0690763052208836, |
| "grad_norm": 0.22452705568993894, |
| "learning_rate": 3.576533650982728e-05, |
| "loss": 0.3708, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.0706827309236948, |
| "grad_norm": 0.2360253339758698, |
| "learning_rate": 3.57355568790947e-05, |
| "loss": 0.3539, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.072289156626506, |
| "grad_norm": 0.23874769004827587, |
| "learning_rate": 3.570577724836212e-05, |
| "loss": 0.3567, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.0738955823293173, |
| "grad_norm": 0.2477102370108865, |
| "learning_rate": 3.5675997617629544e-05, |
| "loss": 0.3655, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.0755020080321285, |
| "grad_norm": 0.20742644891736414, |
| "learning_rate": 3.564621798689696e-05, |
| "loss": 0.3618, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.0771084337349397, |
| "grad_norm": 0.25364731028659276, |
| "learning_rate": 3.561643835616438e-05, |
| "loss": 0.3633, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.078714859437751, |
| "grad_norm": 0.21206121186535307, |
| "learning_rate": 3.55866587254318e-05, |
| "loss": 0.3829, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.0803212851405624, |
| "grad_norm": 0.23809685517326412, |
| "learning_rate": 3.5556879094699226e-05, |
| "loss": 0.3771, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.0819277108433736, |
| "grad_norm": 0.25148288245072126, |
| "learning_rate": 3.552709946396665e-05, |
| "loss": 0.3573, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.0835341365461848, |
| "grad_norm": 0.24148870358153765, |
| "learning_rate": 3.549731983323407e-05, |
| "loss": 0.3677, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.085140562248996, |
| "grad_norm": 0.23469672202622077, |
| "learning_rate": 3.546754020250149e-05, |
| "loss": 0.3621, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.0867469879518072, |
| "grad_norm": 0.2184344550196849, |
| "learning_rate": 3.5437760571768915e-05, |
| "loss": 0.3489, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.0883534136546185, |
| "grad_norm": 0.24328481848849765, |
| "learning_rate": 3.5407980941036334e-05, |
| "loss": 0.3684, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.0899598393574297, |
| "grad_norm": 0.24479406299704576, |
| "learning_rate": 3.537820131030375e-05, |
| "loss": 0.3907, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.091566265060241, |
| "grad_norm": 0.22146902705233215, |
| "learning_rate": 3.534842167957118e-05, |
| "loss": 0.3802, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.0931726907630521, |
| "grad_norm": 0.22443432974493835, |
| "learning_rate": 3.53186420488386e-05, |
| "loss": 0.3693, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.0947791164658636, |
| "grad_norm": 0.2142069300586743, |
| "learning_rate": 3.5288862418106016e-05, |
| "loss": 0.3597, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.0963855421686748, |
| "grad_norm": 0.21276658389057349, |
| "learning_rate": 3.5259082787373435e-05, |
| "loss": 0.3791, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.097991967871486, |
| "grad_norm": 0.22001139189062285, |
| "learning_rate": 3.522930315664086e-05, |
| "loss": 0.359, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.0995983935742972, |
| "grad_norm": 0.27030583552920734, |
| "learning_rate": 3.519952352590828e-05, |
| "loss": 0.3639, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.1012048192771084, |
| "grad_norm": 0.21645081863138144, |
| "learning_rate": 3.51697438951757e-05, |
| "loss": 0.3845, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.1028112449799197, |
| "grad_norm": 0.23213039417581863, |
| "learning_rate": 3.513996426444312e-05, |
| "loss": 0.3695, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.104417670682731, |
| "grad_norm": 0.2473426640084354, |
| "learning_rate": 3.511018463371054e-05, |
| "loss": 0.353, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.106024096385542, |
| "grad_norm": 0.2389357656774047, |
| "learning_rate": 3.508040500297797e-05, |
| "loss": 0.3749, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.1076305220883533, |
| "grad_norm": 0.2552532077149984, |
| "learning_rate": 3.505062537224539e-05, |
| "loss": 0.3653, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.1092369477911648, |
| "grad_norm": 0.25188268389731727, |
| "learning_rate": 3.5020845741512806e-05, |
| "loss": 0.3715, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.110843373493976, |
| "grad_norm": 0.25344476363181, |
| "learning_rate": 3.499106611078023e-05, |
| "loss": 0.3752, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.1124497991967872, |
| "grad_norm": 0.22356543631446663, |
| "learning_rate": 3.496128648004765e-05, |
| "loss": 0.3608, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.1140562248995984, |
| "grad_norm": 0.22701786012495878, |
| "learning_rate": 3.493150684931507e-05, |
| "loss": 0.3657, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.1156626506024097, |
| "grad_norm": 0.25341083733534187, |
| "learning_rate": 3.490172721858249e-05, |
| "loss": 0.3767, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.1172690763052209, |
| "grad_norm": 0.23595910429128436, |
| "learning_rate": 3.4871947587849914e-05, |
| "loss": 0.3487, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.118875502008032, |
| "grad_norm": 0.23068513649779648, |
| "learning_rate": 3.484216795711733e-05, |
| "loss": 0.3622, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.1204819277108433, |
| "grad_norm": 0.23256189957817827, |
| "learning_rate": 3.481238832638475e-05, |
| "loss": 0.3406, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.1220883534136545, |
| "grad_norm": 0.25966731002559584, |
| "learning_rate": 3.478260869565218e-05, |
| "loss": 0.3446, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.123694779116466, |
| "grad_norm": 0.20834092240579738, |
| "learning_rate": 3.4752829064919597e-05, |
| "loss": 0.3594, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.1253012048192772, |
| "grad_norm": 0.2464905111090441, |
| "learning_rate": 3.4723049434187015e-05, |
| "loss": 0.3509, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.1269076305220884, |
| "grad_norm": 0.25837388163460817, |
| "learning_rate": 3.4693269803454434e-05, |
| "loss": 0.3846, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.1285140562248996, |
| "grad_norm": 0.24333941968302056, |
| "learning_rate": 3.466349017272186e-05, |
| "loss": 0.3896, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.1301204819277109, |
| "grad_norm": 0.26919093911243963, |
| "learning_rate": 3.463371054198928e-05, |
| "loss": 0.3811, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.131726907630522, |
| "grad_norm": 0.2791697185654261, |
| "learning_rate": 3.4603930911256704e-05, |
| "loss": 0.3628, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.1333333333333333, |
| "grad_norm": 0.2904893441742124, |
| "learning_rate": 3.457415128052412e-05, |
| "loss": 0.3643, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.1349397590361445, |
| "grad_norm": 0.25399716724167515, |
| "learning_rate": 3.454437164979155e-05, |
| "loss": 0.3606, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.1365461847389557, |
| "grad_norm": 0.27569474381536324, |
| "learning_rate": 3.451459201905897e-05, |
| "loss": 0.3774, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.1381526104417672, |
| "grad_norm": 0.29988319701980504, |
| "learning_rate": 3.4484812388326387e-05, |
| "loss": 0.3536, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.1397590361445784, |
| "grad_norm": 0.23099364928570554, |
| "learning_rate": 3.4455032757593805e-05, |
| "loss": 0.3611, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.1413654618473896, |
| "grad_norm": 0.2805499124103727, |
| "learning_rate": 3.442525312686123e-05, |
| "loss": 0.3572, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.1429718875502008, |
| "grad_norm": 0.26449450906138705, |
| "learning_rate": 3.439547349612865e-05, |
| "loss": 0.3439, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.144578313253012, |
| "grad_norm": 0.288733039004294, |
| "learning_rate": 3.436569386539607e-05, |
| "loss": 0.3709, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.1461847389558233, |
| "grad_norm": 0.3036364491451544, |
| "learning_rate": 3.433591423466349e-05, |
| "loss": 0.367, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.1477911646586345, |
| "grad_norm": 0.29675889950840706, |
| "learning_rate": 3.430613460393091e-05, |
| "loss": 0.3775, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.1493975903614457, |
| "grad_norm": 0.3259907065820817, |
| "learning_rate": 3.427635497319833e-05, |
| "loss": 0.3543, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.151004016064257, |
| "grad_norm": 0.28452983500030143, |
| "learning_rate": 3.424657534246575e-05, |
| "loss": 0.3758, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.1526104417670684, |
| "grad_norm": 0.2965433380474179, |
| "learning_rate": 3.421679571173318e-05, |
| "loss": 0.3595, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.1542168674698796, |
| "grad_norm": 0.32133186307611517, |
| "learning_rate": 3.4187016081000595e-05, |
| "loss": 0.3589, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.1558232931726908, |
| "grad_norm": 0.24143132234827158, |
| "learning_rate": 3.415723645026802e-05, |
| "loss": 0.3657, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.157429718875502, |
| "grad_norm": 0.28034999472823113, |
| "learning_rate": 3.412745681953544e-05, |
| "loss": 0.3732, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.1590361445783133, |
| "grad_norm": 0.3567620337310111, |
| "learning_rate": 3.4097677188802866e-05, |
| "loss": 0.3765, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.1606425702811245, |
| "grad_norm": 0.2734839295396049, |
| "learning_rate": 3.4067897558070284e-05, |
| "loss": 0.349, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.1622489959839357, |
| "grad_norm": 0.302098968412399, |
| "learning_rate": 3.40381179273377e-05, |
| "loss": 0.3703, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.163855421686747, |
| "grad_norm": 0.2770751634090622, |
| "learning_rate": 3.400833829660512e-05, |
| "loss": 0.3525, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.1654618473895582, |
| "grad_norm": 0.2338401666046532, |
| "learning_rate": 3.397855866587255e-05, |
| "loss": 0.3652, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.1670682730923696, |
| "grad_norm": 0.2881681866550499, |
| "learning_rate": 3.394877903513997e-05, |
| "loss": 0.3586, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.1686746987951806, |
| "grad_norm": 0.22750968897316734, |
| "learning_rate": 3.3918999404407386e-05, |
| "loss": 0.3681, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.170281124497992, |
| "grad_norm": 0.3040420516406043, |
| "learning_rate": 3.3889219773674804e-05, |
| "loss": 0.3672, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.1718875502008033, |
| "grad_norm": 0.2330663790560336, |
| "learning_rate": 3.385944014294223e-05, |
| "loss": 0.358, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.1734939759036145, |
| "grad_norm": 0.2636617763473861, |
| "learning_rate": 3.382966051220965e-05, |
| "loss": 0.3557, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.1751004016064257, |
| "grad_norm": 0.2712954225906756, |
| "learning_rate": 3.379988088147707e-05, |
| "loss": 0.3658, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.176706827309237, |
| "grad_norm": 0.25644409732677503, |
| "learning_rate": 3.377010125074449e-05, |
| "loss": 0.3772, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.1783132530120481, |
| "grad_norm": 0.2736176276124936, |
| "learning_rate": 3.374032162001191e-05, |
| "loss": 0.3523, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.1799196787148594, |
| "grad_norm": 0.24856888153925333, |
| "learning_rate": 3.371054198927934e-05, |
| "loss": 0.3644, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.1815261044176706, |
| "grad_norm": 0.23373015060576086, |
| "learning_rate": 3.368076235854676e-05, |
| "loss": 0.3685, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.1831325301204818, |
| "grad_norm": 0.23172878048190607, |
| "learning_rate": 3.3650982727814176e-05, |
| "loss": 0.3765, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.1847389558232932, |
| "grad_norm": 0.25808246859075906, |
| "learning_rate": 3.36212030970816e-05, |
| "loss": 0.379, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.1863453815261045, |
| "grad_norm": 0.2166593271905805, |
| "learning_rate": 3.359142346634902e-05, |
| "loss": 0.3593, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.1879518072289157, |
| "grad_norm": 0.21395631375601631, |
| "learning_rate": 3.356164383561644e-05, |
| "loss": 0.3452, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.189558232931727, |
| "grad_norm": 0.22667954531445253, |
| "learning_rate": 3.3531864204883865e-05, |
| "loss": 0.3602, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.1911646586345381, |
| "grad_norm": 0.21873569788689487, |
| "learning_rate": 3.3502084574151283e-05, |
| "loss": 0.3334, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.1927710843373494, |
| "grad_norm": 0.1920911060477098, |
| "learning_rate": 3.34723049434187e-05, |
| "loss": 0.3633, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.1943775100401606, |
| "grad_norm": 0.2574898749410916, |
| "learning_rate": 3.344252531268612e-05, |
| "loss": 0.3503, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.1959839357429718, |
| "grad_norm": 0.22614786106563914, |
| "learning_rate": 3.341274568195355e-05, |
| "loss": 0.3623, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.197590361445783, |
| "grad_norm": 0.26822964876930067, |
| "learning_rate": 3.3382966051220966e-05, |
| "loss": 0.3735, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.1991967871485945, |
| "grad_norm": 0.22224303726380826, |
| "learning_rate": 3.3353186420488385e-05, |
| "loss": 0.372, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.2008032128514057, |
| "grad_norm": 0.22078850305949052, |
| "learning_rate": 3.33234067897558e-05, |
| "loss": 0.3676, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.202409638554217, |
| "grad_norm": 0.26633556835778005, |
| "learning_rate": 3.329362715902323e-05, |
| "loss": 0.371, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.2040160642570281, |
| "grad_norm": 0.2788503075716381, |
| "learning_rate": 3.326384752829065e-05, |
| "loss": 0.3653, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.2056224899598393, |
| "grad_norm": 0.23849352579520705, |
| "learning_rate": 3.3234067897558073e-05, |
| "loss": 0.3663, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.2072289156626506, |
| "grad_norm": 0.24013045056887264, |
| "learning_rate": 3.320428826682549e-05, |
| "loss": 0.353, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.2088353413654618, |
| "grad_norm": 0.20855339337194004, |
| "learning_rate": 3.317450863609292e-05, |
| "loss": 0.3774, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.210441767068273, |
| "grad_norm": 0.2424426225359671, |
| "learning_rate": 3.314472900536034e-05, |
| "loss": 0.3663, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.2120481927710842, |
| "grad_norm": 0.21880338853845513, |
| "learning_rate": 3.3114949374627756e-05, |
| "loss": 0.3702, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.2136546184738957, |
| "grad_norm": 0.2153739479920725, |
| "learning_rate": 3.308516974389518e-05, |
| "loss": 0.3497, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.2152610441767069, |
| "grad_norm": 0.23494454996837524, |
| "learning_rate": 3.30553901131626e-05, |
| "loss": 0.3559, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.216867469879518, |
| "grad_norm": 0.21465927488059922, |
| "learning_rate": 3.302561048243002e-05, |
| "loss": 0.3676, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.2184738955823293, |
| "grad_norm": 0.29985885980431837, |
| "learning_rate": 3.299583085169744e-05, |
| "loss": 0.3597, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.2200803212851405, |
| "grad_norm": 0.22961190904343967, |
| "learning_rate": 3.2966051220964864e-05, |
| "loss": 0.3739, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.2216867469879518, |
| "grad_norm": 0.24411542482113738, |
| "learning_rate": 3.293627159023228e-05, |
| "loss": 0.3695, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.223293172690763, |
| "grad_norm": 0.22666726531935338, |
| "learning_rate": 3.29064919594997e-05, |
| "loss": 0.3663, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.2248995983935742, |
| "grad_norm": 0.24629498678132544, |
| "learning_rate": 3.287671232876712e-05, |
| "loss": 0.3586, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.2265060240963854, |
| "grad_norm": 0.21406505000866682, |
| "learning_rate": 3.2846932698034546e-05, |
| "loss": 0.3776, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.2281124497991969, |
| "grad_norm": 0.24598567814668207, |
| "learning_rate": 3.2817153067301965e-05, |
| "loss": 0.3755, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.229718875502008, |
| "grad_norm": 0.22021354268684526, |
| "learning_rate": 3.278737343656939e-05, |
| "loss": 0.3956, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.2313253012048193, |
| "grad_norm": 0.2179847387106146, |
| "learning_rate": 3.275759380583681e-05, |
| "loss": 0.3699, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.2329317269076305, |
| "grad_norm": 0.25186296667067626, |
| "learning_rate": 3.2727814175104235e-05, |
| "loss": 0.3541, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.2345381526104418, |
| "grad_norm": 0.21321707223657882, |
| "learning_rate": 3.2698034544371654e-05, |
| "loss": 0.3511, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.236144578313253, |
| "grad_norm": 0.21028796507808997, |
| "learning_rate": 3.266825491363907e-05, |
| "loss": 0.3713, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.2377510040160642, |
| "grad_norm": 0.22137129389730711, |
| "learning_rate": 3.263847528290649e-05, |
| "loss": 0.3615, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.2393574297188754, |
| "grad_norm": 0.21424357254598164, |
| "learning_rate": 3.260869565217392e-05, |
| "loss": 0.3646, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.2409638554216866, |
| "grad_norm": 0.21445314064635118, |
| "learning_rate": 3.2578916021441336e-05, |
| "loss": 0.3725, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.242570281124498, |
| "grad_norm": 0.22051212791632466, |
| "learning_rate": 3.2549136390708755e-05, |
| "loss": 0.3661, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.2441767068273093, |
| "grad_norm": 0.2344032506467021, |
| "learning_rate": 3.251935675997618e-05, |
| "loss": 0.3542, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.2457831325301205, |
| "grad_norm": 0.227403160247532, |
| "learning_rate": 3.24895771292436e-05, |
| "loss": 0.3582, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.2473895582329317, |
| "grad_norm": 0.21636326335669, |
| "learning_rate": 3.245979749851102e-05, |
| "loss": 0.3505, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.248995983935743, |
| "grad_norm": 0.2281562730752796, |
| "learning_rate": 3.243001786777844e-05, |
| "loss": 0.364, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.2506024096385542, |
| "grad_norm": 0.2292249330966563, |
| "learning_rate": 3.240023823704586e-05, |
| "loss": 0.3722, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.2522088353413654, |
| "grad_norm": 0.24790847991703668, |
| "learning_rate": 3.237045860631328e-05, |
| "loss": 0.37, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.2538152610441768, |
| "grad_norm": 0.23734414857881575, |
| "learning_rate": 3.23406789755807e-05, |
| "loss": 0.3851, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.2554216867469878, |
| "grad_norm": 0.23966985858047482, |
| "learning_rate": 3.2310899344848126e-05, |
| "loss": 0.3513, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.2570281124497993, |
| "grad_norm": 0.26861252599311647, |
| "learning_rate": 3.228111971411555e-05, |
| "loss": 0.3742, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.2586345381526105, |
| "grad_norm": 0.2405381223420924, |
| "learning_rate": 3.225134008338297e-05, |
| "loss": 0.3675, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.2602409638554217, |
| "grad_norm": 0.25133074956933993, |
| "learning_rate": 3.222156045265039e-05, |
| "loss": 0.3476, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.261847389558233, |
| "grad_norm": 0.22002417112468556, |
| "learning_rate": 3.219178082191781e-05, |
| "loss": 0.3613, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.2634538152610442, |
| "grad_norm": 0.23643945898649082, |
| "learning_rate": 3.2162001191185234e-05, |
| "loss": 0.3805, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.2650602409638554, |
| "grad_norm": 0.2340836057538758, |
| "learning_rate": 3.213222156045265e-05, |
| "loss": 0.3598, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.2666666666666666, |
| "grad_norm": 0.24621197268298853, |
| "learning_rate": 3.210244192972007e-05, |
| "loss": 0.3667, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.268273092369478, |
| "grad_norm": 0.21790940067218326, |
| "learning_rate": 3.207266229898749e-05, |
| "loss": 0.3901, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.269879518072289, |
| "grad_norm": 0.27818463747836164, |
| "learning_rate": 3.2042882668254916e-05, |
| "loss": 0.36, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.2714859437751005, |
| "grad_norm": 0.23485828790810853, |
| "learning_rate": 3.2013103037522335e-05, |
| "loss": 0.379, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.2730923694779117, |
| "grad_norm": 0.2428969954061332, |
| "learning_rate": 3.1983323406789754e-05, |
| "loss": 0.3839, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.274698795180723, |
| "grad_norm": 0.25201173026955526, |
| "learning_rate": 3.195354377605718e-05, |
| "loss": 0.3578, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.2763052208835342, |
| "grad_norm": 0.2543167963872766, |
| "learning_rate": 3.19237641453246e-05, |
| "loss": 0.3735, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.2779116465863454, |
| "grad_norm": 0.21114445186703706, |
| "learning_rate": 3.189398451459202e-05, |
| "loss": 0.357, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.2795180722891566, |
| "grad_norm": 0.2734430777164395, |
| "learning_rate": 3.186420488385944e-05, |
| "loss": 0.3593, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.2811244979919678, |
| "grad_norm": 0.21392607525142876, |
| "learning_rate": 3.183442525312687e-05, |
| "loss": 0.3496, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.282730923694779, |
| "grad_norm": 0.21427758438676245, |
| "learning_rate": 3.180464562239429e-05, |
| "loss": 0.3695, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.2843373493975903, |
| "grad_norm": 0.23448772916727945, |
| "learning_rate": 3.1774865991661706e-05, |
| "loss": 0.3715, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.2859437751004017, |
| "grad_norm": 0.23891654697924247, |
| "learning_rate": 3.1745086360929125e-05, |
| "loss": 0.3794, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.287550200803213, |
| "grad_norm": 0.25040550089891145, |
| "learning_rate": 3.171530673019655e-05, |
| "loss": 0.3802, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.2891566265060241, |
| "grad_norm": 0.22959015673231148, |
| "learning_rate": 3.168552709946397e-05, |
| "loss": 0.3855, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.2907630522088354, |
| "grad_norm": 0.21504738444833543, |
| "learning_rate": 3.165574746873139e-05, |
| "loss": 0.3706, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.2923694779116466, |
| "grad_norm": 0.22916959554798327, |
| "learning_rate": 3.162596783799881e-05, |
| "loss": 0.3734, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.2939759036144578, |
| "grad_norm": 0.2261734860287735, |
| "learning_rate": 3.159618820726623e-05, |
| "loss": 0.3796, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.295582329317269, |
| "grad_norm": 0.22653415939680627, |
| "learning_rate": 3.156640857653365e-05, |
| "loss": 0.3686, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.2971887550200802, |
| "grad_norm": 0.23392504669301062, |
| "learning_rate": 3.153662894580107e-05, |
| "loss": 0.3802, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.2987951807228915, |
| "grad_norm": 0.22088590455041548, |
| "learning_rate": 3.1506849315068496e-05, |
| "loss": 0.367, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.300401606425703, |
| "grad_norm": 0.23792429321888736, |
| "learning_rate": 3.1477069684335915e-05, |
| "loss": 0.3563, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.3020080321285141, |
| "grad_norm": 0.20161084796105802, |
| "learning_rate": 3.1447290053603334e-05, |
| "loss": 0.3676, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.3036144578313253, |
| "grad_norm": 0.2277725701014789, |
| "learning_rate": 3.141751042287076e-05, |
| "loss": 0.3529, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.3052208835341366, |
| "grad_norm": 0.23045297881934104, |
| "learning_rate": 3.138773079213818e-05, |
| "loss": 0.3847, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.3068273092369478, |
| "grad_norm": 0.22210517944789068, |
| "learning_rate": 3.1357951161405604e-05, |
| "loss": 0.3586, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.308433734939759, |
| "grad_norm": 0.2072831680450955, |
| "learning_rate": 3.132817153067302e-05, |
| "loss": 0.3573, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.3100401606425702, |
| "grad_norm": 0.2398466310783302, |
| "learning_rate": 3.129839189994044e-05, |
| "loss": 0.3615, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.3116465863453814, |
| "grad_norm": 0.19600369300484732, |
| "learning_rate": 3.126861226920787e-05, |
| "loss": 0.3497, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.3132530120481927, |
| "grad_norm": 0.2189372363986866, |
| "learning_rate": 3.1238832638475286e-05, |
| "loss": 0.3571, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.3148594377510041, |
| "grad_norm": 0.24132969320550957, |
| "learning_rate": 3.1209053007742705e-05, |
| "loss": 0.3717, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.3164658634538153, |
| "grad_norm": 0.2220779044504832, |
| "learning_rate": 3.1179273377010124e-05, |
| "loss": 0.3668, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.3180722891566266, |
| "grad_norm": 0.21951027884242172, |
| "learning_rate": 3.114949374627755e-05, |
| "loss": 0.3666, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.3196787148594378, |
| "grad_norm": 0.23139773130369037, |
| "learning_rate": 3.111971411554497e-05, |
| "loss": 0.3819, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.321285140562249, |
| "grad_norm": 0.2436895797315047, |
| "learning_rate": 3.108993448481239e-05, |
| "loss": 0.3756, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.3228915662650602, |
| "grad_norm": 0.24398206222736174, |
| "learning_rate": 3.1060154854079806e-05, |
| "loss": 0.3784, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.3244979919678714, |
| "grad_norm": 0.22574883293170708, |
| "learning_rate": 3.103037522334723e-05, |
| "loss": 0.351, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.3261044176706827, |
| "grad_norm": 0.23562300733545966, |
| "learning_rate": 3.100059559261465e-05, |
| "loss": 0.3525, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.3277108433734939, |
| "grad_norm": 0.21956446777264724, |
| "learning_rate": 3.097081596188207e-05, |
| "loss": 0.365, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.3293172690763053, |
| "grad_norm": 0.22065986302717655, |
| "learning_rate": 3.0941036331149495e-05, |
| "loss": 0.3596, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.3309236947791165, |
| "grad_norm": 0.21384764922460403, |
| "learning_rate": 3.091125670041692e-05, |
| "loss": 0.3425, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.3325301204819278, |
| "grad_norm": 0.21762415293360973, |
| "learning_rate": 3.088147706968434e-05, |
| "loss": 0.3682, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.334136546184739, |
| "grad_norm": 0.221693082369684, |
| "learning_rate": 3.085169743895176e-05, |
| "loss": 0.3386, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.3357429718875502, |
| "grad_norm": 0.22070044364874203, |
| "learning_rate": 3.082191780821918e-05, |
| "loss": 0.3803, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.3373493975903614, |
| "grad_norm": 0.24697590279902848, |
| "learning_rate": 3.07921381774866e-05, |
| "loss": 0.3609, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.3389558232931726, |
| "grad_norm": 0.20975548827859883, |
| "learning_rate": 3.076235854675402e-05, |
| "loss": 0.3568, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.3405622489959839, |
| "grad_norm": 0.24626094973921908, |
| "learning_rate": 3.073257891602144e-05, |
| "loss": 0.3821, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.342168674698795, |
| "grad_norm": 0.21878005364172165, |
| "learning_rate": 3.0702799285288866e-05, |
| "loss": 0.3541, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.3437751004016065, |
| "grad_norm": 0.2091599497342082, |
| "learning_rate": 3.0673019654556285e-05, |
| "loss": 0.3777, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.3453815261044177, |
| "grad_norm": 0.23698156981964014, |
| "learning_rate": 3.0643240023823704e-05, |
| "loss": 0.3577, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.346987951807229, |
| "grad_norm": 0.19976369485296863, |
| "learning_rate": 3.061346039309112e-05, |
| "loss": 0.3492, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.3485943775100402, |
| "grad_norm": 0.25041720546760476, |
| "learning_rate": 3.058368076235855e-05, |
| "loss": 0.3788, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.3502008032128514, |
| "grad_norm": 0.22628458952334474, |
| "learning_rate": 3.055390113162597e-05, |
| "loss": 0.3777, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.3518072289156626, |
| "grad_norm": 0.22779377389101385, |
| "learning_rate": 3.0524121500893386e-05, |
| "loss": 0.3465, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.3534136546184738, |
| "grad_norm": 0.2028780371997436, |
| "learning_rate": 3.0494341870160815e-05, |
| "loss": 0.3584, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.355020080321285, |
| "grad_norm": 0.2258560159139495, |
| "learning_rate": 3.0464562239428234e-05, |
| "loss": 0.3809, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.3566265060240963, |
| "grad_norm": 0.22782391594390167, |
| "learning_rate": 3.0434782608695656e-05, |
| "loss": 0.3741, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.3582329317269077, |
| "grad_norm": 0.21440199029792373, |
| "learning_rate": 3.0405002977963075e-05, |
| "loss": 0.3584, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.359839357429719, |
| "grad_norm": 0.21844067462947783, |
| "learning_rate": 3.0375223347230497e-05, |
| "loss": 0.3921, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.3614457831325302, |
| "grad_norm": 0.23535442333385953, |
| "learning_rate": 3.0345443716497916e-05, |
| "loss": 0.3811, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.3630522088353414, |
| "grad_norm": 0.2428521987029711, |
| "learning_rate": 3.031566408576534e-05, |
| "loss": 0.3566, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.3646586345381526, |
| "grad_norm": 0.2354500934566656, |
| "learning_rate": 3.028588445503276e-05, |
| "loss": 0.3474, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.3662650602409638, |
| "grad_norm": 0.2380376127583646, |
| "learning_rate": 3.025610482430018e-05, |
| "loss": 0.3645, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.367871485943775, |
| "grad_norm": 0.24030027929429704, |
| "learning_rate": 3.0226325193567602e-05, |
| "loss": 0.3644, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.3694779116465863, |
| "grad_norm": 0.22628103813644257, |
| "learning_rate": 3.019654556283502e-05, |
| "loss": 0.3616, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.3710843373493975, |
| "grad_norm": 0.2481381906601932, |
| "learning_rate": 3.0166765932102443e-05, |
| "loss": 0.3686, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.372690763052209, |
| "grad_norm": 0.22932123919593422, |
| "learning_rate": 3.0136986301369862e-05, |
| "loss": 0.3513, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.3742971887550202, |
| "grad_norm": 0.232607475833928, |
| "learning_rate": 3.0107206670637284e-05, |
| "loss": 0.356, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.3759036144578314, |
| "grad_norm": 0.23919609411368228, |
| "learning_rate": 3.0077427039904703e-05, |
| "loss": 0.3628, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.3775100401606426, |
| "grad_norm": 0.24735662052608867, |
| "learning_rate": 3.0047647409172125e-05, |
| "loss": 0.38, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.3791164658634538, |
| "grad_norm": 0.24312498440342986, |
| "learning_rate": 3.001786777843955e-05, |
| "loss": 0.3824, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.380722891566265, |
| "grad_norm": 0.24614164137683403, |
| "learning_rate": 2.9988088147706973e-05, |
| "loss": 0.3669, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.3823293172690763, |
| "grad_norm": 0.23007440185768782, |
| "learning_rate": 2.9958308516974392e-05, |
| "loss": 0.3823, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.3839357429718875, |
| "grad_norm": 0.23503403725119434, |
| "learning_rate": 2.9928528886241814e-05, |
| "loss": 0.3547, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.3855421686746987, |
| "grad_norm": 0.26213313448474795, |
| "learning_rate": 2.9898749255509233e-05, |
| "loss": 0.3902, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.3871485943775101, |
| "grad_norm": 0.27248878472797816, |
| "learning_rate": 2.9868969624776655e-05, |
| "loss": 0.3638, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.3887550200803214, |
| "grad_norm": 0.21904082404496375, |
| "learning_rate": 2.9839189994044074e-05, |
| "loss": 0.3781, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.3903614457831326, |
| "grad_norm": 0.23427228859116567, |
| "learning_rate": 2.9809410363311496e-05, |
| "loss": 0.3681, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.3919678714859438, |
| "grad_norm": 0.25377813900510793, |
| "learning_rate": 2.977963073257892e-05, |
| "loss": 0.3519, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.393574297188755, |
| "grad_norm": 0.20959477667017692, |
| "learning_rate": 2.9749851101846337e-05, |
| "loss": 0.3822, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.3951807228915662, |
| "grad_norm": 0.22810492747487612, |
| "learning_rate": 2.972007147111376e-05, |
| "loss": 0.3637, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.3967871485943775, |
| "grad_norm": 0.2474576219838295, |
| "learning_rate": 2.969029184038118e-05, |
| "loss": 0.381, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.3983935742971887, |
| "grad_norm": 0.21986525317161276, |
| "learning_rate": 2.96605122096486e-05, |
| "loss": 0.363, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.21908538162021823, |
| "learning_rate": 2.963073257891602e-05, |
| "loss": 0.3608, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.4016064257028114, |
| "grad_norm": 0.23107734597072727, |
| "learning_rate": 2.9600952948183442e-05, |
| "loss": 0.3856, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.4032128514056224, |
| "grad_norm": 0.2162366677993556, |
| "learning_rate": 2.9571173317450868e-05, |
| "loss": 0.3601, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.4048192771084338, |
| "grad_norm": 0.23032507693748092, |
| "learning_rate": 2.954139368671829e-05, |
| "loss": 0.3668, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.406425702811245, |
| "grad_norm": 0.23005753209891203, |
| "learning_rate": 2.951161405598571e-05, |
| "loss": 0.3871, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.4080321285140562, |
| "grad_norm": 0.23665682010297576, |
| "learning_rate": 2.948183442525313e-05, |
| "loss": 0.3671, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.4096385542168675, |
| "grad_norm": 0.2123172712814544, |
| "learning_rate": 2.945205479452055e-05, |
| "loss": 0.3657, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.4112449799196787, |
| "grad_norm": 0.23159086396276327, |
| "learning_rate": 2.9422275163787972e-05, |
| "loss": 0.3569, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.41285140562249, |
| "grad_norm": 0.20610842351742073, |
| "learning_rate": 2.939249553305539e-05, |
| "loss": 0.3667, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.4144578313253011, |
| "grad_norm": 0.2095943517768558, |
| "learning_rate": 2.9362715902322813e-05, |
| "loss": 0.3594, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.4160642570281126, |
| "grad_norm": 0.20383729710674806, |
| "learning_rate": 2.9332936271590232e-05, |
| "loss": 0.3812, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.4176706827309236, |
| "grad_norm": 0.2148605870055888, |
| "learning_rate": 2.9303156640857654e-05, |
| "loss": 0.3574, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.419277108433735, |
| "grad_norm": 0.20538045682489592, |
| "learning_rate": 2.9273377010125076e-05, |
| "loss": 0.3593, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.4208835341365462, |
| "grad_norm": 0.22659131989469658, |
| "learning_rate": 2.9243597379392495e-05, |
| "loss": 0.3691, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.4224899598393574, |
| "grad_norm": 0.2369038481320581, |
| "learning_rate": 2.9213817748659918e-05, |
| "loss": 0.3549, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.4240963855421687, |
| "grad_norm": 0.21487405668840318, |
| "learning_rate": 2.9184038117927336e-05, |
| "loss": 0.3672, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.4257028112449799, |
| "grad_norm": 0.21882296224297848, |
| "learning_rate": 2.915425848719476e-05, |
| "loss": 0.3819, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.427309236947791, |
| "grad_norm": 0.2122041283250008, |
| "learning_rate": 2.9124478856462184e-05, |
| "loss": 0.3684, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.4289156626506023, |
| "grad_norm": 0.2100792740049444, |
| "learning_rate": 2.9094699225729603e-05, |
| "loss": 0.3691, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.4305220883534138, |
| "grad_norm": 0.21515277117579934, |
| "learning_rate": 2.9064919594997025e-05, |
| "loss": 0.3558, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.4321285140562248, |
| "grad_norm": 0.19445546811223982, |
| "learning_rate": 2.9035139964264448e-05, |
| "loss": 0.3556, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.4337349397590362, |
| "grad_norm": 0.22756377734983396, |
| "learning_rate": 2.9005360333531867e-05, |
| "loss": 0.3652, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.4353413654618474, |
| "grad_norm": 0.2149240696923472, |
| "learning_rate": 2.897558070279929e-05, |
| "loss": 0.3656, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.4369477911646586, |
| "grad_norm": 0.20526738516022955, |
| "learning_rate": 2.8945801072066708e-05, |
| "loss": 0.3529, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.4385542168674699, |
| "grad_norm": 0.2429605718067559, |
| "learning_rate": 2.891602144133413e-05, |
| "loss": 0.3689, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.440160642570281, |
| "grad_norm": 0.2149812497343841, |
| "learning_rate": 2.888624181060155e-05, |
| "loss": 0.3979, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.4417670682730923, |
| "grad_norm": 0.23192601126786705, |
| "learning_rate": 2.885646217986897e-05, |
| "loss": 0.3912, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.4433734939759035, |
| "grad_norm": 0.22698204921354354, |
| "learning_rate": 2.882668254913639e-05, |
| "loss": 0.3546, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.444979919678715, |
| "grad_norm": 0.22205606795192698, |
| "learning_rate": 2.8796902918403812e-05, |
| "loss": 0.3692, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.446586345381526, |
| "grad_norm": 0.2111212056897702, |
| "learning_rate": 2.8767123287671234e-05, |
| "loss": 0.3787, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.4481927710843374, |
| "grad_norm": 0.23052767402474444, |
| "learning_rate": 2.8737343656938653e-05, |
| "loss": 0.372, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.4497991967871486, |
| "grad_norm": 0.2424302535235894, |
| "learning_rate": 2.8707564026206075e-05, |
| "loss": 0.3748, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.4514056224899599, |
| "grad_norm": 0.20197438070774545, |
| "learning_rate": 2.8677784395473494e-05, |
| "loss": 0.3635, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.453012048192771, |
| "grad_norm": 0.2084681253188935, |
| "learning_rate": 2.864800476474092e-05, |
| "loss": 0.3925, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.4546184738955823, |
| "grad_norm": 0.2072242386279762, |
| "learning_rate": 2.8618225134008342e-05, |
| "loss": 0.3672, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.4562248995983935, |
| "grad_norm": 0.21284756895769136, |
| "learning_rate": 2.858844550327576e-05, |
| "loss": 0.3685, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.4578313253012047, |
| "grad_norm": 0.20227667670803087, |
| "learning_rate": 2.8558665872543183e-05, |
| "loss": 0.3597, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.4594377510040162, |
| "grad_norm": 0.21127860676616467, |
| "learning_rate": 2.8528886241810606e-05, |
| "loss": 0.3488, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.4610441767068272, |
| "grad_norm": 0.19786787955222504, |
| "learning_rate": 2.8499106611078024e-05, |
| "loss": 0.3725, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.4626506024096386, |
| "grad_norm": 0.21027102248989119, |
| "learning_rate": 2.8469326980345447e-05, |
| "loss": 0.3727, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.4642570281124498, |
| "grad_norm": 0.2219598070879816, |
| "learning_rate": 2.8439547349612866e-05, |
| "loss": 0.385, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.465863453815261, |
| "grad_norm": 0.21792884213216784, |
| "learning_rate": 2.8409767718880288e-05, |
| "loss": 0.3659, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.4674698795180723, |
| "grad_norm": 0.23692472629136208, |
| "learning_rate": 2.8379988088147707e-05, |
| "loss": 0.3716, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.4690763052208835, |
| "grad_norm": 0.22574261052333702, |
| "learning_rate": 2.835020845741513e-05, |
| "loss": 0.3884, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.4706827309236947, |
| "grad_norm": 0.26972786984809455, |
| "learning_rate": 2.8320428826682548e-05, |
| "loss": 0.3477, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.472289156626506, |
| "grad_norm": 0.20622807458145592, |
| "learning_rate": 2.829064919594997e-05, |
| "loss": 0.373, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.4738955823293174, |
| "grad_norm": 0.22586468226366713, |
| "learning_rate": 2.826086956521739e-05, |
| "loss": 0.3612, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.4755020080321284, |
| "grad_norm": 0.22600368188790596, |
| "learning_rate": 2.823108993448481e-05, |
| "loss": 0.3624, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.4771084337349398, |
| "grad_norm": 0.20939787587771433, |
| "learning_rate": 2.8201310303752237e-05, |
| "loss": 0.3628, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.478714859437751, |
| "grad_norm": 0.21476583410976668, |
| "learning_rate": 2.817153067301966e-05, |
| "loss": 0.3607, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.4803212851405623, |
| "grad_norm": 0.21734844652117358, |
| "learning_rate": 2.8141751042287078e-05, |
| "loss": 0.3635, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.4819277108433735, |
| "grad_norm": 0.2144356999561757, |
| "learning_rate": 2.81119714115545e-05, |
| "loss": 0.3555, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.4835341365461847, |
| "grad_norm": 0.234862318388648, |
| "learning_rate": 2.808219178082192e-05, |
| "loss": 0.3663, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.485140562248996, |
| "grad_norm": 0.23031757949966822, |
| "learning_rate": 2.805241215008934e-05, |
| "loss": 0.3713, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.4867469879518072, |
| "grad_norm": 0.2157553880030866, |
| "learning_rate": 2.8022632519356763e-05, |
| "loss": 0.3756, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.4883534136546186, |
| "grad_norm": 0.2276004175355454, |
| "learning_rate": 2.7992852888624182e-05, |
| "loss": 0.3695, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.4899598393574296, |
| "grad_norm": 0.218014308267801, |
| "learning_rate": 2.7963073257891605e-05, |
| "loss": 0.343, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.491566265060241, |
| "grad_norm": 0.21269986535144367, |
| "learning_rate": 2.7933293627159023e-05, |
| "loss": 0.3783, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.4931726907630523, |
| "grad_norm": 0.20789162882212137, |
| "learning_rate": 2.7903513996426446e-05, |
| "loss": 0.3755, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.4947791164658635, |
| "grad_norm": 0.2166785940860413, |
| "learning_rate": 2.7873734365693864e-05, |
| "loss": 0.351, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.4963855421686747, |
| "grad_norm": 0.21274777459162647, |
| "learning_rate": 2.7843954734961287e-05, |
| "loss": 0.3676, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.497991967871486, |
| "grad_norm": 0.2219183771458503, |
| "learning_rate": 2.7814175104228706e-05, |
| "loss": 0.377, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.4995983935742971, |
| "grad_norm": 0.22762225908211084, |
| "learning_rate": 2.7784395473496128e-05, |
| "loss": 0.3715, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.5012048192771084, |
| "grad_norm": 0.2388083493092833, |
| "learning_rate": 2.7754615842763547e-05, |
| "loss": 0.3473, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.5028112449799198, |
| "grad_norm": 0.20429686311494033, |
| "learning_rate": 2.7724836212030976e-05, |
| "loss": 0.3548, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.5044176706827308, |
| "grad_norm": 0.23481779789206692, |
| "learning_rate": 2.7695056581298395e-05, |
| "loss": 0.3702, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.5060240963855422, |
| "grad_norm": 0.25347931950970687, |
| "learning_rate": 2.7665276950565817e-05, |
| "loss": 0.3549, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.5076305220883535, |
| "grad_norm": 0.2530466881443676, |
| "learning_rate": 2.7635497319833236e-05, |
| "loss": 0.3652, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.5092369477911647, |
| "grad_norm": 0.2099481634380821, |
| "learning_rate": 2.7605717689100658e-05, |
| "loss": 0.3698, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.510843373493976, |
| "grad_norm": 0.22383646269194873, |
| "learning_rate": 2.7575938058368077e-05, |
| "loss": 0.3473, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.5124497991967871, |
| "grad_norm": 0.23510008704046406, |
| "learning_rate": 2.75461584276355e-05, |
| "loss": 0.3642, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.5140562248995983, |
| "grad_norm": 0.2365960136406408, |
| "learning_rate": 2.751637879690292e-05, |
| "loss": 0.3587, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.5156626506024096, |
| "grad_norm": 0.20194874258388565, |
| "learning_rate": 2.748659916617034e-05, |
| "loss": 0.3421, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.517269076305221, |
| "grad_norm": 0.21419410413526951, |
| "learning_rate": 2.7456819535437762e-05, |
| "loss": 0.3642, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.518875502008032, |
| "grad_norm": 0.23503233240382296, |
| "learning_rate": 2.742703990470518e-05, |
| "loss": 0.3639, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.5204819277108435, |
| "grad_norm": 0.19989894199559718, |
| "learning_rate": 2.7397260273972603e-05, |
| "loss": 0.3547, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.5220883534136547, |
| "grad_norm": 0.22996814320536904, |
| "learning_rate": 2.7367480643240022e-05, |
| "loss": 0.393, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.523694779116466, |
| "grad_norm": 0.23563323680885603, |
| "learning_rate": 2.7337701012507445e-05, |
| "loss": 0.3789, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.5253012048192771, |
| "grad_norm": 0.22544780319410296, |
| "learning_rate": 2.7307921381774863e-05, |
| "loss": 0.346, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.5269076305220883, |
| "grad_norm": 0.21358695063820807, |
| "learning_rate": 2.7278141751042292e-05, |
| "loss": 0.3591, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.5285140562248996, |
| "grad_norm": 0.23704356976175486, |
| "learning_rate": 2.724836212030971e-05, |
| "loss": 0.3839, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.5301204819277108, |
| "grad_norm": 0.27475630912830123, |
| "learning_rate": 2.7218582489577134e-05, |
| "loss": 0.3553, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.5317269076305222, |
| "grad_norm": 0.2015377422723893, |
| "learning_rate": 2.7188802858844552e-05, |
| "loss": 0.3523, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.5333333333333332, |
| "grad_norm": 0.21028984207219548, |
| "learning_rate": 2.7159023228111975e-05, |
| "loss": 0.3636, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.5349397590361447, |
| "grad_norm": 0.2761749899383388, |
| "learning_rate": 2.7129243597379394e-05, |
| "loss": 0.3714, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.5365461847389559, |
| "grad_norm": 0.24211596663596374, |
| "learning_rate": 2.7099463966646816e-05, |
| "loss": 0.3663, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.538152610441767, |
| "grad_norm": 0.22792483992791712, |
| "learning_rate": 2.7069684335914235e-05, |
| "loss": 0.364, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.5397590361445783, |
| "grad_norm": 0.24171640195202226, |
| "learning_rate": 2.7039904705181657e-05, |
| "loss": 0.3732, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.5413654618473895, |
| "grad_norm": 0.24911482394201456, |
| "learning_rate": 2.701012507444908e-05, |
| "loss": 0.355, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.5429718875502008, |
| "grad_norm": 0.2319688639910931, |
| "learning_rate": 2.6980345443716498e-05, |
| "loss": 0.365, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.544578313253012, |
| "grad_norm": 0.23455520769533375, |
| "learning_rate": 2.695056581298392e-05, |
| "loss": 0.3874, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.5461847389558234, |
| "grad_norm": 0.23995257002178408, |
| "learning_rate": 2.692078618225134e-05, |
| "loss": 0.3595, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.5477911646586344, |
| "grad_norm": 0.23535956707436564, |
| "learning_rate": 2.689100655151876e-05, |
| "loss": 0.3456, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.5493975903614459, |
| "grad_norm": 0.21283048878661445, |
| "learning_rate": 2.686122692078618e-05, |
| "loss": 0.3794, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.5510040160642569, |
| "grad_norm": 0.27485947364881846, |
| "learning_rate": 2.6831447290053606e-05, |
| "loss": 0.3886, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.5526104417670683, |
| "grad_norm": 0.25782677358973055, |
| "learning_rate": 2.6801667659321028e-05, |
| "loss": 0.3842, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.5542168674698795, |
| "grad_norm": 0.22391534287768072, |
| "learning_rate": 2.677188802858845e-05, |
| "loss": 0.364, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.5558232931726907, |
| "grad_norm": 0.20344190008680924, |
| "learning_rate": 2.674210839785587e-05, |
| "loss": 0.3648, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.557429718875502, |
| "grad_norm": 0.2632931728366601, |
| "learning_rate": 2.671232876712329e-05, |
| "loss": 0.345, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.5590361445783132, |
| "grad_norm": 0.22477219142520344, |
| "learning_rate": 2.668254913639071e-05, |
| "loss": 0.3616, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.5606425702811246, |
| "grad_norm": 0.20847250954768615, |
| "learning_rate": 2.6652769505658133e-05, |
| "loss": 0.3869, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.5622489959839356, |
| "grad_norm": 0.2105481288432062, |
| "learning_rate": 2.662298987492555e-05, |
| "loss": 0.3641, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.563855421686747, |
| "grad_norm": 0.22889309732013546, |
| "learning_rate": 2.6593210244192974e-05, |
| "loss": 0.3535, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.565461847389558, |
| "grad_norm": 0.20431010393481647, |
| "learning_rate": 2.6563430613460393e-05, |
| "loss": 0.3637, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.5670682730923695, |
| "grad_norm": 0.21790437848384872, |
| "learning_rate": 2.6533650982727815e-05, |
| "loss": 0.3682, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.5686746987951807, |
| "grad_norm": 0.20127235332587692, |
| "learning_rate": 2.6503871351995234e-05, |
| "loss": 0.3668, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.570281124497992, |
| "grad_norm": 0.209472785322983, |
| "learning_rate": 2.6474091721262656e-05, |
| "loss": 0.3483, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.5718875502008032, |
| "grad_norm": 0.2076162814357429, |
| "learning_rate": 2.6444312090530078e-05, |
| "loss": 0.3795, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.5734939759036144, |
| "grad_norm": 0.21586409436702833, |
| "learning_rate": 2.6414532459797497e-05, |
| "loss": 0.3928, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.5751004016064258, |
| "grad_norm": 0.22452727262006167, |
| "learning_rate": 2.638475282906492e-05, |
| "loss": 0.3734, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.5767068273092368, |
| "grad_norm": 0.21841183925586227, |
| "learning_rate": 2.6354973198332345e-05, |
| "loss": 0.349, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.5783132530120483, |
| "grad_norm": 0.21497213489470451, |
| "learning_rate": 2.6325193567599764e-05, |
| "loss": 0.3579, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.5799196787148593, |
| "grad_norm": 0.21853135828760742, |
| "learning_rate": 2.6295413936867186e-05, |
| "loss": 0.3757, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.5815261044176707, |
| "grad_norm": 0.22156628370310655, |
| "learning_rate": 2.6265634306134608e-05, |
| "loss": 0.3758, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.583132530120482, |
| "grad_norm": 0.25664803260355046, |
| "learning_rate": 2.6235854675402027e-05, |
| "loss": 0.3524, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.5847389558232932, |
| "grad_norm": 0.21833937858097816, |
| "learning_rate": 2.620607504466945e-05, |
| "loss": 0.3668, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.5863453815261044, |
| "grad_norm": 0.2174828652080452, |
| "learning_rate": 2.6176295413936868e-05, |
| "loss": 0.3613, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.5879518072289156, |
| "grad_norm": 0.2353983024978749, |
| "learning_rate": 2.614651578320429e-05, |
| "loss": 0.3589, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.589558232931727, |
| "grad_norm": 0.2174023484648924, |
| "learning_rate": 2.611673615247171e-05, |
| "loss": 0.3724, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.591164658634538, |
| "grad_norm": 0.23618451910653723, |
| "learning_rate": 2.608695652173913e-05, |
| "loss": 0.3671, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.5927710843373495, |
| "grad_norm": 0.20739430564186828, |
| "learning_rate": 2.605717689100655e-05, |
| "loss": 0.3625, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.5943775100401605, |
| "grad_norm": 0.21228651394370981, |
| "learning_rate": 2.6027397260273973e-05, |
| "loss": 0.35, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.595983935742972, |
| "grad_norm": 0.2085036082140482, |
| "learning_rate": 2.599761762954139e-05, |
| "loss": 0.371, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.5975903614457831, |
| "grad_norm": 0.23282178824648542, |
| "learning_rate": 2.5967837998808814e-05, |
| "loss": 0.3612, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.5991967871485944, |
| "grad_norm": 0.2290510109947705, |
| "learning_rate": 2.5938058368076236e-05, |
| "loss": 0.3748, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.6008032128514056, |
| "grad_norm": 0.22200176495038842, |
| "learning_rate": 2.590827873734366e-05, |
| "loss": 0.3825, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.6024096385542168, |
| "grad_norm": 0.22439874377419233, |
| "learning_rate": 2.587849910661108e-05, |
| "loss": 0.3539, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.6040160642570283, |
| "grad_norm": 0.23407001058299182, |
| "learning_rate": 2.5848719475878503e-05, |
| "loss": 0.3781, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.6056224899598392, |
| "grad_norm": 0.21886484223211836, |
| "learning_rate": 2.581893984514592e-05, |
| "loss": 0.3739, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.6072289156626507, |
| "grad_norm": 0.2530388270263431, |
| "learning_rate": 2.5789160214413344e-05, |
| "loss": 0.3684, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.6088353413654617, |
| "grad_norm": 0.2036275976702559, |
| "learning_rate": 2.5759380583680766e-05, |
| "loss": 0.3607, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.6104417670682731, |
| "grad_norm": 0.23121711876497358, |
| "learning_rate": 2.5729600952948185e-05, |
| "loss": 0.351, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.6120481927710844, |
| "grad_norm": 0.21462527213179255, |
| "learning_rate": 2.5699821322215607e-05, |
| "loss": 0.3575, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.6136546184738956, |
| "grad_norm": 0.20052158615851726, |
| "learning_rate": 2.5670041691483026e-05, |
| "loss": 0.3754, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.6152610441767068, |
| "grad_norm": 0.22616020302746706, |
| "learning_rate": 2.5640262060750448e-05, |
| "loss": 0.3677, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.616867469879518, |
| "grad_norm": 0.2226715559894779, |
| "learning_rate": 2.5610482430017867e-05, |
| "loss": 0.3617, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.6184738955823295, |
| "grad_norm": 0.20836554253125414, |
| "learning_rate": 2.558070279928529e-05, |
| "loss": 0.3724, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.6200803212851405, |
| "grad_norm": 0.22463188594728506, |
| "learning_rate": 2.5550923168552708e-05, |
| "loss": 0.3634, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.621686746987952, |
| "grad_norm": 0.21187153182745774, |
| "learning_rate": 2.552114353782013e-05, |
| "loss": 0.3709, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.623293172690763, |
| "grad_norm": 0.20305345286193022, |
| "learning_rate": 2.549136390708755e-05, |
| "loss": 0.3547, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.6248995983935743, |
| "grad_norm": 0.18531268063010506, |
| "learning_rate": 2.546158427635497e-05, |
| "loss": 0.3707, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.6265060240963856, |
| "grad_norm": 0.2041856506255866, |
| "learning_rate": 2.5431804645622397e-05, |
| "loss": 0.3715, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.6281124497991968, |
| "grad_norm": 0.2134984077840653, |
| "learning_rate": 2.540202501488982e-05, |
| "loss": 0.3497, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.629718875502008, |
| "grad_norm": 0.1952331371500388, |
| "learning_rate": 2.537224538415724e-05, |
| "loss": 0.3727, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.6313253012048192, |
| "grad_norm": 0.22469385469458966, |
| "learning_rate": 2.534246575342466e-05, |
| "loss": 0.3511, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.6329317269076307, |
| "grad_norm": 0.19794746408070624, |
| "learning_rate": 2.531268612269208e-05, |
| "loss": 0.3779, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.6345381526104417, |
| "grad_norm": 0.19967397274850174, |
| "learning_rate": 2.5282906491959502e-05, |
| "loss": 0.3747, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.636144578313253, |
| "grad_norm": 0.22538749173025088, |
| "learning_rate": 2.5253126861226924e-05, |
| "loss": 0.352, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.637751004016064, |
| "grad_norm": 0.22398508134124584, |
| "learning_rate": 2.5223347230494343e-05, |
| "loss": 0.3625, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.6393574297188755, |
| "grad_norm": 0.1883127548632391, |
| "learning_rate": 2.5193567599761765e-05, |
| "loss": 0.3322, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.6409638554216868, |
| "grad_norm": 0.20492467820901458, |
| "learning_rate": 2.5163787969029184e-05, |
| "loss": 0.3416, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.642570281124498, |
| "grad_norm": 0.19950205181583336, |
| "learning_rate": 2.5134008338296606e-05, |
| "loss": 0.356, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.6441767068273092, |
| "grad_norm": 0.20706482851814978, |
| "learning_rate": 2.5104228707564025e-05, |
| "loss": 0.3681, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.6457831325301204, |
| "grad_norm": 0.1960488866576575, |
| "learning_rate": 2.5074449076831447e-05, |
| "loss": 0.3611, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.6473895582329319, |
| "grad_norm": 0.21928337797665387, |
| "learning_rate": 2.5044669446098866e-05, |
| "loss": 0.354, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.6489959839357429, |
| "grad_norm": 0.2049749443964654, |
| "learning_rate": 2.501488981536629e-05, |
| "loss": 0.3483, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.6506024096385543, |
| "grad_norm": 0.21975575711160597, |
| "learning_rate": 2.498511018463371e-05, |
| "loss": 0.3617, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.6522088353413653, |
| "grad_norm": 0.25435722039840947, |
| "learning_rate": 2.4955330553901133e-05, |
| "loss": 0.3595, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.6538152610441768, |
| "grad_norm": 0.21053271933640544, |
| "learning_rate": 2.4925550923168552e-05, |
| "loss": 0.3538, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.655421686746988, |
| "grad_norm": 0.21092960698173877, |
| "learning_rate": 2.4895771292435974e-05, |
| "loss": 0.3708, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.6570281124497992, |
| "grad_norm": 0.25037893658218124, |
| "learning_rate": 2.4865991661703396e-05, |
| "loss": 0.3572, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.6586345381526104, |
| "grad_norm": 0.21614435230428244, |
| "learning_rate": 2.483621203097082e-05, |
| "loss": 0.3616, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.6602409638554216, |
| "grad_norm": 0.21762495218791192, |
| "learning_rate": 2.4806432400238237e-05, |
| "loss": 0.3541, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.661847389558233, |
| "grad_norm": 0.21227742351106402, |
| "learning_rate": 2.477665276950566e-05, |
| "loss": 0.3489, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.663453815261044, |
| "grad_norm": 0.22051322792778752, |
| "learning_rate": 2.474687313877308e-05, |
| "loss": 0.3788, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.6650602409638555, |
| "grad_norm": 0.1967781801087776, |
| "learning_rate": 2.47170935080405e-05, |
| "loss": 0.345, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.21567566845974007, |
| "learning_rate": 2.4687313877307923e-05, |
| "loss": 0.3706, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.668273092369478, |
| "grad_norm": 0.2056270298713853, |
| "learning_rate": 2.4657534246575342e-05, |
| "loss": 0.3504, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.6698795180722892, |
| "grad_norm": 0.21004166292669, |
| "learning_rate": 2.4627754615842767e-05, |
| "loss": 0.3633, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.6714859437751004, |
| "grad_norm": 0.21043296463452388, |
| "learning_rate": 2.4597974985110186e-05, |
| "loss": 0.3668, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.6730923694779116, |
| "grad_norm": 0.21326051661342343, |
| "learning_rate": 2.456819535437761e-05, |
| "loss": 0.3598, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.6746987951807228, |
| "grad_norm": 0.21795643765282252, |
| "learning_rate": 2.4538415723645027e-05, |
| "loss": 0.3798, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.6763052208835343, |
| "grad_norm": 0.18662735508569567, |
| "learning_rate": 2.450863609291245e-05, |
| "loss": 0.344, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.6779116465863453, |
| "grad_norm": 0.18983173307642734, |
| "learning_rate": 2.447885646217987e-05, |
| "loss": 0.3537, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.6795180722891567, |
| "grad_norm": 0.20726848993601318, |
| "learning_rate": 2.444907683144729e-05, |
| "loss": 0.3826, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.6811244979919677, |
| "grad_norm": 0.20680344779356397, |
| "learning_rate": 2.4419297200714713e-05, |
| "loss": 0.3468, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.6827309236947792, |
| "grad_norm": 0.21615630603434186, |
| "learning_rate": 2.4389517569982135e-05, |
| "loss": 0.3592, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.6843373493975904, |
| "grad_norm": 0.1952368775804799, |
| "learning_rate": 2.4359737939249554e-05, |
| "loss": 0.3604, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.6859437751004016, |
| "grad_norm": 0.2100215577635232, |
| "learning_rate": 2.4329958308516976e-05, |
| "loss": 0.3589, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.6875502008032128, |
| "grad_norm": 0.21355439868810364, |
| "learning_rate": 2.4300178677784395e-05, |
| "loss": 0.3501, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.689156626506024, |
| "grad_norm": 0.23457242221702107, |
| "learning_rate": 2.4270399047051817e-05, |
| "loss": 0.3713, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.6907630522088355, |
| "grad_norm": 0.1965153502918042, |
| "learning_rate": 2.4240619416319236e-05, |
| "loss": 0.3668, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.6923694779116465, |
| "grad_norm": 0.22811037493361058, |
| "learning_rate": 2.421083978558666e-05, |
| "loss": 0.3588, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.693975903614458, |
| "grad_norm": 0.19865555229011633, |
| "learning_rate": 2.418106015485408e-05, |
| "loss": 0.3601, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.695582329317269, |
| "grad_norm": 0.22658627459573066, |
| "learning_rate": 2.4151280524121503e-05, |
| "loss": 0.3523, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.6971887550200804, |
| "grad_norm": 0.23275848437999672, |
| "learning_rate": 2.4121500893388925e-05, |
| "loss": 0.3554, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.6987951807228916, |
| "grad_norm": 0.22113612954878686, |
| "learning_rate": 2.4091721262656344e-05, |
| "loss": 0.3725, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.7004016064257028, |
| "grad_norm": 0.224473754437892, |
| "learning_rate": 2.4061941631923766e-05, |
| "loss": 0.3776, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.702008032128514, |
| "grad_norm": 0.2295222836812493, |
| "learning_rate": 2.4032162001191185e-05, |
| "loss": 0.3581, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.7036144578313253, |
| "grad_norm": 0.24314433625472318, |
| "learning_rate": 2.4002382370458608e-05, |
| "loss": 0.3568, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.7052208835341367, |
| "grad_norm": 0.34175260472943086, |
| "learning_rate": 2.3972602739726026e-05, |
| "loss": 0.3416, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.7068273092369477, |
| "grad_norm": 0.2514460521674827, |
| "learning_rate": 2.3942823108993452e-05, |
| "loss": 0.3724, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.7084337349397591, |
| "grad_norm": 0.2276723687182961, |
| "learning_rate": 2.391304347826087e-05, |
| "loss": 0.3665, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.7100401606425701, |
| "grad_norm": 0.21923249265172579, |
| "learning_rate": 2.3883263847528293e-05, |
| "loss": 0.356, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.7116465863453816, |
| "grad_norm": 0.20314212509455468, |
| "learning_rate": 2.3853484216795712e-05, |
| "loss": 0.3619, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.7132530120481928, |
| "grad_norm": 0.22203074761564148, |
| "learning_rate": 2.3823704586063134e-05, |
| "loss": 0.3657, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.714859437751004, |
| "grad_norm": 0.20207033700363516, |
| "learning_rate": 2.3793924955330553e-05, |
| "loss": 0.3589, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.7164658634538152, |
| "grad_norm": 0.2209021600211984, |
| "learning_rate": 2.3764145324597975e-05, |
| "loss": 0.369, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.7180722891566265, |
| "grad_norm": 0.20768196432692904, |
| "learning_rate": 2.3734365693865398e-05, |
| "loss": 0.3643, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.719678714859438, |
| "grad_norm": 0.2098995668547178, |
| "learning_rate": 2.370458606313282e-05, |
| "loss": 0.3578, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.721285140562249, |
| "grad_norm": 0.20500363490217155, |
| "learning_rate": 2.367480643240024e-05, |
| "loss": 0.3455, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.7228915662650603, |
| "grad_norm": 0.20815589579222898, |
| "learning_rate": 2.364502680166766e-05, |
| "loss": 0.3551, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.7244979919678713, |
| "grad_norm": 0.218238069485204, |
| "learning_rate": 2.361524717093508e-05, |
| "loss": 0.3572, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.7261044176706828, |
| "grad_norm": 0.21261831858723657, |
| "learning_rate": 2.3585467540202502e-05, |
| "loss": 0.3611, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.727710843373494, |
| "grad_norm": 0.218199623953151, |
| "learning_rate": 2.3555687909469924e-05, |
| "loss": 0.3732, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.7293172690763052, |
| "grad_norm": 0.21077091020641125, |
| "learning_rate": 2.3525908278737343e-05, |
| "loss": 0.3589, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.7309236947791165, |
| "grad_norm": 0.20811140385555477, |
| "learning_rate": 2.349612864800477e-05, |
| "loss": 0.3499, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.7325301204819277, |
| "grad_norm": 0.224004592451593, |
| "learning_rate": 2.3466349017272188e-05, |
| "loss": 0.3656, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.7341365461847391, |
| "grad_norm": 0.20937722059078773, |
| "learning_rate": 2.343656938653961e-05, |
| "loss": 0.3875, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.7357429718875501, |
| "grad_norm": 0.19918778480602867, |
| "learning_rate": 2.340678975580703e-05, |
| "loss": 0.3566, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.7373493975903616, |
| "grad_norm": 0.21098302310101957, |
| "learning_rate": 2.337701012507445e-05, |
| "loss": 0.3716, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.7389558232931726, |
| "grad_norm": 0.2093261199980322, |
| "learning_rate": 2.334723049434187e-05, |
| "loss": 0.3576, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.740562248995984, |
| "grad_norm": 0.1970208286024692, |
| "learning_rate": 2.3317450863609292e-05, |
| "loss": 0.3714, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.7421686746987952, |
| "grad_norm": 0.2016557441886884, |
| "learning_rate": 2.328767123287671e-05, |
| "loss": 0.3655, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.7437751004016064, |
| "grad_norm": 0.21169338027090684, |
| "learning_rate": 2.3257891602144137e-05, |
| "loss": 0.3483, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.7453815261044177, |
| "grad_norm": 0.20200737567041832, |
| "learning_rate": 2.3228111971411555e-05, |
| "loss": 0.3745, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.7469879518072289, |
| "grad_norm": 0.22578155521015078, |
| "learning_rate": 2.3198332340678978e-05, |
| "loss": 0.3745, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.7485943775100403, |
| "grad_norm": 0.19093185238889984, |
| "learning_rate": 2.3168552709946397e-05, |
| "loss": 0.3627, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.7502008032128513, |
| "grad_norm": 0.25205916180047044, |
| "learning_rate": 2.313877307921382e-05, |
| "loss": 0.3631, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.7518072289156628, |
| "grad_norm": 0.20141539644570866, |
| "learning_rate": 2.3108993448481238e-05, |
| "loss": 0.3684, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.7534136546184738, |
| "grad_norm": 0.21474090300526755, |
| "learning_rate": 2.307921381774866e-05, |
| "loss": 0.3624, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.7550200803212852, |
| "grad_norm": 0.2054655600297508, |
| "learning_rate": 2.3049434187016082e-05, |
| "loss": 0.3553, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.7566265060240964, |
| "grad_norm": 0.20955065103071374, |
| "learning_rate": 2.3019654556283504e-05, |
| "loss": 0.3633, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.7582329317269076, |
| "grad_norm": 0.19589875551410302, |
| "learning_rate": 2.2989874925550923e-05, |
| "loss": 0.3289, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.7598393574297189, |
| "grad_norm": 0.2361099076001025, |
| "learning_rate": 2.2960095294818345e-05, |
| "loss": 0.353, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.76144578313253, |
| "grad_norm": 0.21498956022370994, |
| "learning_rate": 2.2930315664085768e-05, |
| "loss": 0.3828, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.7630522088353415, |
| "grad_norm": 0.21650382135153187, |
| "learning_rate": 2.2900536033353187e-05, |
| "loss": 0.368, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.7646586345381525, |
| "grad_norm": 0.20893736406148616, |
| "learning_rate": 2.287075640262061e-05, |
| "loss": 0.3539, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.766265060240964, |
| "grad_norm": 0.23082011015119872, |
| "learning_rate": 2.2840976771888028e-05, |
| "loss": 0.3494, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.767871485943775, |
| "grad_norm": 0.19694576672664207, |
| "learning_rate": 2.2811197141155453e-05, |
| "loss": 0.3629, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.7694779116465864, |
| "grad_norm": 0.20193978071730798, |
| "learning_rate": 2.2781417510422872e-05, |
| "loss": 0.3859, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.7710843373493976, |
| "grad_norm": 0.23092561419314273, |
| "learning_rate": 2.2751637879690294e-05, |
| "loss": 0.3516, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.7726907630522089, |
| "grad_norm": 0.20832654839220274, |
| "learning_rate": 2.2721858248957713e-05, |
| "loss": 0.3478, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.77429718875502, |
| "grad_norm": 0.22535285467644003, |
| "learning_rate": 2.2692078618225136e-05, |
| "loss": 0.3649, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.7759036144578313, |
| "grad_norm": 0.20726821679440688, |
| "learning_rate": 2.2662298987492554e-05, |
| "loss": 0.3571, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.7775100401606427, |
| "grad_norm": 0.22444597601238636, |
| "learning_rate": 2.2632519356759977e-05, |
| "loss": 0.3645, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.7791164658634537, |
| "grad_norm": 0.2009713091668314, |
| "learning_rate": 2.2602739726027396e-05, |
| "loss": 0.3519, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.7807228915662652, |
| "grad_norm": 0.21711474490245392, |
| "learning_rate": 2.257296009529482e-05, |
| "loss": 0.3583, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.7823293172690762, |
| "grad_norm": 0.22516044847286923, |
| "learning_rate": 2.254318046456224e-05, |
| "loss": 0.3799, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.7839357429718876, |
| "grad_norm": 0.2073918693034101, |
| "learning_rate": 2.2513400833829662e-05, |
| "loss": 0.363, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.7855421686746988, |
| "grad_norm": 0.22634597043012206, |
| "learning_rate": 2.248362120309708e-05, |
| "loss": 0.3535, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.78714859437751, |
| "grad_norm": 0.20760649450417196, |
| "learning_rate": 2.2453841572364503e-05, |
| "loss": 0.3762, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.7887550200803213, |
| "grad_norm": 0.24727436296116567, |
| "learning_rate": 2.2424061941631926e-05, |
| "loss": 0.3698, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.7903614457831325, |
| "grad_norm": 0.19266851010173805, |
| "learning_rate": 2.2394282310899344e-05, |
| "loss": 0.3444, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.7919678714859437, |
| "grad_norm": 0.2211464369383969, |
| "learning_rate": 2.2364502680166767e-05, |
| "loss": 0.3663, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.793574297188755, |
| "grad_norm": 0.20631058168368754, |
| "learning_rate": 2.233472304943419e-05, |
| "loss": 0.3519, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.7951807228915664, |
| "grad_norm": 0.19331297422580224, |
| "learning_rate": 2.230494341870161e-05, |
| "loss": 0.3652, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.7967871485943774, |
| "grad_norm": 0.2200641847902077, |
| "learning_rate": 2.227516378796903e-05, |
| "loss": 0.3621, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.7983935742971888, |
| "grad_norm": 0.20279217114814665, |
| "learning_rate": 2.2245384157236452e-05, |
| "loss": 0.3801, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.21870788059156782, |
| "learning_rate": 2.221560452650387e-05, |
| "loss": 0.3457, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.8016064257028113, |
| "grad_norm": 0.21189789954147503, |
| "learning_rate": 2.2185824895771293e-05, |
| "loss": 0.3564, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.8032128514056225, |
| "grad_norm": 0.20086799106080055, |
| "learning_rate": 2.2156045265038712e-05, |
| "loss": 0.3733, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.8048192771084337, |
| "grad_norm": 0.1986048299225642, |
| "learning_rate": 2.2126265634306138e-05, |
| "loss": 0.3671, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.806425702811245, |
| "grad_norm": 0.2083000604689775, |
| "learning_rate": 2.2096486003573557e-05, |
| "loss": 0.3455, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.8080321285140561, |
| "grad_norm": 0.19057526404306752, |
| "learning_rate": 2.206670637284098e-05, |
| "loss": 0.3665, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.8096385542168676, |
| "grad_norm": 0.21925743385948493, |
| "learning_rate": 2.2036926742108398e-05, |
| "loss": 0.3673, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.8112449799196786, |
| "grad_norm": 0.2068789882250303, |
| "learning_rate": 2.200714711137582e-05, |
| "loss": 0.3459, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.81285140562249, |
| "grad_norm": 0.19327520335944015, |
| "learning_rate": 2.197736748064324e-05, |
| "loss": 0.3659, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.8144578313253013, |
| "grad_norm": 0.2078167143831951, |
| "learning_rate": 2.194758784991066e-05, |
| "loss": 0.3727, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.8160642570281125, |
| "grad_norm": 0.18858502029684826, |
| "learning_rate": 2.1917808219178083e-05, |
| "loss": 0.3556, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.8176706827309237, |
| "grad_norm": 0.21278063382666992, |
| "learning_rate": 2.1888028588445506e-05, |
| "loss": 0.3568, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.819277108433735, |
| "grad_norm": 0.2180291628817106, |
| "learning_rate": 2.1858248957712925e-05, |
| "loss": 0.3643, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.8208835341365461, |
| "grad_norm": 0.18948521028304013, |
| "learning_rate": 2.1828469326980347e-05, |
| "loss": 0.3528, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.8224899598393574, |
| "grad_norm": 0.20853568994549157, |
| "learning_rate": 2.179868969624777e-05, |
| "loss": 0.3467, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.8240963855421688, |
| "grad_norm": 0.19566677865636536, |
| "learning_rate": 2.1768910065515188e-05, |
| "loss": 0.3732, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.8257028112449798, |
| "grad_norm": 0.19445461615137893, |
| "learning_rate": 2.173913043478261e-05, |
| "loss": 0.3488, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.8273092369477912, |
| "grad_norm": 0.19624375302208613, |
| "learning_rate": 2.170935080405003e-05, |
| "loss": 0.3624, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.8289156626506025, |
| "grad_norm": 0.2040335887516193, |
| "learning_rate": 2.167957117331745e-05, |
| "loss": 0.3729, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.8305220883534137, |
| "grad_norm": 0.20102650556518836, |
| "learning_rate": 2.1649791542584874e-05, |
| "loss": 0.3466, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.832128514056225, |
| "grad_norm": 0.21005309955955231, |
| "learning_rate": 2.1620011911852296e-05, |
| "loss": 0.3632, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.8337349397590361, |
| "grad_norm": 0.20305833408054538, |
| "learning_rate": 2.1590232281119715e-05, |
| "loss": 0.3534, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.8353413654618473, |
| "grad_norm": 0.20124327635765527, |
| "learning_rate": 2.1560452650387137e-05, |
| "loss": 0.3466, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.8369477911646586, |
| "grad_norm": 0.1832866917039283, |
| "learning_rate": 2.1530673019654556e-05, |
| "loss": 0.3716, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.83855421686747, |
| "grad_norm": 0.20990146281914185, |
| "learning_rate": 2.1500893388921978e-05, |
| "loss": 0.3616, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.840160642570281, |
| "grad_norm": 0.20672268930674145, |
| "learning_rate": 2.1471113758189397e-05, |
| "loss": 0.3608, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.8417670682730924, |
| "grad_norm": 0.2107260397722923, |
| "learning_rate": 2.1441334127456822e-05, |
| "loss": 0.3706, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.8433734939759037, |
| "grad_norm": 0.19672011468893483, |
| "learning_rate": 2.141155449672424e-05, |
| "loss": 0.3596, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.8449799196787149, |
| "grad_norm": 0.1926451723956918, |
| "learning_rate": 2.1381774865991664e-05, |
| "loss": 0.3772, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.846586345381526, |
| "grad_norm": 0.21932150933370484, |
| "learning_rate": 2.1351995235259082e-05, |
| "loss": 0.3532, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.8481927710843373, |
| "grad_norm": 0.23147096635393158, |
| "learning_rate": 2.1322215604526505e-05, |
| "loss": 0.3664, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.8497991967871485, |
| "grad_norm": 0.19934971134196694, |
| "learning_rate": 2.1292435973793927e-05, |
| "loss": 0.3711, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.8514056224899598, |
| "grad_norm": 0.2272191026129902, |
| "learning_rate": 2.1262656343061346e-05, |
| "loss": 0.3504, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.8530120481927712, |
| "grad_norm": 0.21777434254774575, |
| "learning_rate": 2.1232876712328768e-05, |
| "loss": 0.3498, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.8546184738955822, |
| "grad_norm": 0.2124750716341292, |
| "learning_rate": 2.120309708159619e-05, |
| "loss": 0.3522, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.8562248995983937, |
| "grad_norm": 0.22228255865646168, |
| "learning_rate": 2.1173317450863613e-05, |
| "loss": 0.3578, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.8578313253012049, |
| "grad_norm": 0.22231705030783963, |
| "learning_rate": 2.114353782013103e-05, |
| "loss": 0.3394, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.859437751004016, |
| "grad_norm": 0.22285029234412623, |
| "learning_rate": 2.1113758189398454e-05, |
| "loss": 0.3584, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.8610441767068273, |
| "grad_norm": 0.20482179387652538, |
| "learning_rate": 2.1083978558665873e-05, |
| "loss": 0.3525, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.8626506024096385, |
| "grad_norm": 0.24264556011157912, |
| "learning_rate": 2.1054198927933295e-05, |
| "loss": 0.3688, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.8642570281124498, |
| "grad_norm": 0.1957672647540786, |
| "learning_rate": 2.1024419297200714e-05, |
| "loss": 0.3363, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.865863453815261, |
| "grad_norm": 0.2108725681692963, |
| "learning_rate": 2.0994639666468136e-05, |
| "loss": 0.3664, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.8674698795180724, |
| "grad_norm": 0.21125049983033603, |
| "learning_rate": 2.0964860035735558e-05, |
| "loss": 0.3728, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.8690763052208834, |
| "grad_norm": 0.21069255508441262, |
| "learning_rate": 2.093508040500298e-05, |
| "loss": 0.3624, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.8706827309236949, |
| "grad_norm": 0.20365274655393906, |
| "learning_rate": 2.09053007742704e-05, |
| "loss": 0.3452, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.872289156626506, |
| "grad_norm": 0.2195954631278454, |
| "learning_rate": 2.087552114353782e-05, |
| "loss": 0.3615, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.8738955823293173, |
| "grad_norm": 0.21517553233692346, |
| "learning_rate": 2.084574151280524e-05, |
| "loss": 0.3608, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.8755020080321285, |
| "grad_norm": 0.20756181473622315, |
| "learning_rate": 2.0815961882072663e-05, |
| "loss": 0.3512, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.8771084337349397, |
| "grad_norm": 0.22271333786027447, |
| "learning_rate": 2.0786182251340085e-05, |
| "loss": 0.3497, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.878714859437751, |
| "grad_norm": 0.21853129021119494, |
| "learning_rate": 2.0756402620607507e-05, |
| "loss": 0.3574, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.8803212851405622, |
| "grad_norm": 0.21585643665601728, |
| "learning_rate": 2.0726622989874926e-05, |
| "loss": 0.3594, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.8819277108433736, |
| "grad_norm": 0.20331060293257677, |
| "learning_rate": 2.0696843359142348e-05, |
| "loss": 0.3666, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.8835341365461846, |
| "grad_norm": 0.2565749022088396, |
| "learning_rate": 2.066706372840977e-05, |
| "loss": 0.3526, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.885140562248996, |
| "grad_norm": 0.20758070527927017, |
| "learning_rate": 2.063728409767719e-05, |
| "loss": 0.3827, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.886746987951807, |
| "grad_norm": 0.22210523230297302, |
| "learning_rate": 2.060750446694461e-05, |
| "loss": 0.3588, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.8883534136546185, |
| "grad_norm": 0.21091506966574514, |
| "learning_rate": 2.057772483621203e-05, |
| "loss": 0.3531, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.8899598393574297, |
| "grad_norm": 0.20149717266780776, |
| "learning_rate": 2.0547945205479453e-05, |
| "loss": 0.3576, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.891566265060241, |
| "grad_norm": 0.22621131534500763, |
| "learning_rate": 2.0518165574746875e-05, |
| "loss": 0.3751, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.8931726907630522, |
| "grad_norm": 0.20613955089295916, |
| "learning_rate": 2.0488385944014297e-05, |
| "loss": 0.3589, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.8947791164658634, |
| "grad_norm": 0.19083016481113835, |
| "learning_rate": 2.0458606313281716e-05, |
| "loss": 0.3426, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.8963855421686748, |
| "grad_norm": 0.22126022453936145, |
| "learning_rate": 2.0428826682549138e-05, |
| "loss": 0.3505, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.8979919678714858, |
| "grad_norm": 0.20323408257329473, |
| "learning_rate": 2.0399047051816557e-05, |
| "loss": 0.3469, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.8995983935742973, |
| "grad_norm": 0.20039417903514725, |
| "learning_rate": 2.036926742108398e-05, |
| "loss": 0.3618, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.9012048192771083, |
| "grad_norm": 0.21296230999792612, |
| "learning_rate": 2.0339487790351398e-05, |
| "loss": 0.3583, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.9028112449799197, |
| "grad_norm": 0.20265904210834976, |
| "learning_rate": 2.030970815961882e-05, |
| "loss": 0.3613, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.904417670682731, |
| "grad_norm": 0.21826476667287842, |
| "learning_rate": 2.0279928528886243e-05, |
| "loss": 0.3663, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.9060240963855422, |
| "grad_norm": 0.22207635068754514, |
| "learning_rate": 2.0250148898153665e-05, |
| "loss": 0.3482, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.9076305220883534, |
| "grad_norm": 0.22381259846748205, |
| "learning_rate": 2.0220369267421084e-05, |
| "loss": 0.3667, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.9092369477911646, |
| "grad_norm": 0.19895299553105997, |
| "learning_rate": 2.0190589636688506e-05, |
| "loss": 0.3443, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.910843373493976, |
| "grad_norm": 0.21122074307230462, |
| "learning_rate": 2.0160810005955928e-05, |
| "loss": 0.366, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.912449799196787, |
| "grad_norm": 0.21887960654596514, |
| "learning_rate": 2.0131030375223347e-05, |
| "loss": 0.3553, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.9140562248995985, |
| "grad_norm": 0.21709717023702313, |
| "learning_rate": 2.010125074449077e-05, |
| "loss": 0.3556, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.9156626506024095, |
| "grad_norm": 0.2170193651130133, |
| "learning_rate": 2.0071471113758188e-05, |
| "loss": 0.3674, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.917269076305221, |
| "grad_norm": 0.2029081551011821, |
| "learning_rate": 2.0041691483025614e-05, |
| "loss": 0.3549, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.9188755020080321, |
| "grad_norm": 0.19745275159480208, |
| "learning_rate": 2.0011911852293033e-05, |
| "loss": 0.359, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.9204819277108434, |
| "grad_norm": 0.19719600657200462, |
| "learning_rate": 1.9982132221560455e-05, |
| "loss": 0.3481, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.9220883534136546, |
| "grad_norm": 0.20721127133510045, |
| "learning_rate": 1.9952352590827874e-05, |
| "loss": 0.3832, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.9236947791164658, |
| "grad_norm": 0.1956761467782788, |
| "learning_rate": 1.9922572960095296e-05, |
| "loss": 0.3506, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.9253012048192772, |
| "grad_norm": 0.19780280846955045, |
| "learning_rate": 1.9892793329362715e-05, |
| "loss": 0.3578, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.9269076305220882, |
| "grad_norm": 0.20566585919858663, |
| "learning_rate": 1.9863013698630137e-05, |
| "loss": 0.364, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.9285140562248997, |
| "grad_norm": 0.1991383339452239, |
| "learning_rate": 1.983323406789756e-05, |
| "loss": 0.3541, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.9301204819277107, |
| "grad_norm": 0.2155869647709782, |
| "learning_rate": 1.980345443716498e-05, |
| "loss": 0.3441, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.9317269076305221, |
| "grad_norm": 0.2047880536730749, |
| "learning_rate": 1.97736748064324e-05, |
| "loss": 0.3601, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.9333333333333333, |
| "grad_norm": 0.20606661706770807, |
| "learning_rate": 1.9743895175699823e-05, |
| "loss": 0.3643, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.9349397590361446, |
| "grad_norm": 0.19908190373445847, |
| "learning_rate": 1.971411554496724e-05, |
| "loss": 0.3736, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.9365461847389558, |
| "grad_norm": 0.2308674494926228, |
| "learning_rate": 1.9684335914234664e-05, |
| "loss": 0.3631, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.938152610441767, |
| "grad_norm": 0.2066133059057739, |
| "learning_rate": 1.9654556283502086e-05, |
| "loss": 0.356, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.9397590361445785, |
| "grad_norm": 0.20384597819015435, |
| "learning_rate": 1.9624776652769505e-05, |
| "loss": 0.3571, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.9413654618473895, |
| "grad_norm": 0.2347153369190148, |
| "learning_rate": 1.9594997022036927e-05, |
| "loss": 0.35, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.942971887550201, |
| "grad_norm": 0.20581681587401848, |
| "learning_rate": 1.956521739130435e-05, |
| "loss": 0.346, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.944578313253012, |
| "grad_norm": 0.21019457597339639, |
| "learning_rate": 1.9535437760571772e-05, |
| "loss": 0.3546, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.9461847389558233, |
| "grad_norm": 0.20152245518746795, |
| "learning_rate": 1.950565812983919e-05, |
| "loss": 0.3314, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.9477911646586346, |
| "grad_norm": 0.22313459443419506, |
| "learning_rate": 1.9475878499106613e-05, |
| "loss": 0.3433, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.9493975903614458, |
| "grad_norm": 0.20031556726002392, |
| "learning_rate": 1.9446098868374032e-05, |
| "loss": 0.3699, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.951004016064257, |
| "grad_norm": 0.21068240359744894, |
| "learning_rate": 1.9416319237641454e-05, |
| "loss": 0.3717, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.9526104417670682, |
| "grad_norm": 0.18049575736825463, |
| "learning_rate": 1.9386539606908873e-05, |
| "loss": 0.3355, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.9542168674698797, |
| "grad_norm": 0.19194566457192386, |
| "learning_rate": 1.93567599761763e-05, |
| "loss": 0.3523, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.9558232931726907, |
| "grad_norm": 0.21603061394583895, |
| "learning_rate": 1.9326980345443717e-05, |
| "loss": 0.3438, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.957429718875502, |
| "grad_norm": 0.20014961841231446, |
| "learning_rate": 1.929720071471114e-05, |
| "loss": 0.3375, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.959036144578313, |
| "grad_norm": 0.17999062215530368, |
| "learning_rate": 1.926742108397856e-05, |
| "loss": 0.3636, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.9606425702811245, |
| "grad_norm": 0.20451266412122704, |
| "learning_rate": 1.923764145324598e-05, |
| "loss": 0.3569, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.9622489959839358, |
| "grad_norm": 0.204885341355471, |
| "learning_rate": 1.92078618225134e-05, |
| "loss": 0.3664, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.963855421686747, |
| "grad_norm": 0.20325761883578033, |
| "learning_rate": 1.9178082191780822e-05, |
| "loss": 0.3653, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.9654618473895582, |
| "grad_norm": 0.19011063209861206, |
| "learning_rate": 1.9148302561048244e-05, |
| "loss": 0.3452, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.9670682730923694, |
| "grad_norm": 0.19018854040935476, |
| "learning_rate": 1.9118522930315666e-05, |
| "loss": 0.3619, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.9686746987951809, |
| "grad_norm": 0.20318943026957367, |
| "learning_rate": 1.9088743299583085e-05, |
| "loss": 0.3503, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.9702811244979919, |
| "grad_norm": 0.2036147004934244, |
| "learning_rate": 1.9058963668850507e-05, |
| "loss": 0.3768, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.9718875502008033, |
| "grad_norm": 0.2031461574425529, |
| "learning_rate": 1.902918403811793e-05, |
| "loss": 0.3607, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.9734939759036143, |
| "grad_norm": 0.20194319934341554, |
| "learning_rate": 1.899940440738535e-05, |
| "loss": 0.3552, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.9751004016064257, |
| "grad_norm": 0.2159941288822771, |
| "learning_rate": 1.896962477665277e-05, |
| "loss": 0.3401, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.976706827309237, |
| "grad_norm": 0.19857621324762853, |
| "learning_rate": 1.893984514592019e-05, |
| "loss": 0.3759, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.9783132530120482, |
| "grad_norm": 0.21210683481011638, |
| "learning_rate": 1.8910065515187615e-05, |
| "loss": 0.3661, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.9799196787148594, |
| "grad_norm": 0.21464726089214603, |
| "learning_rate": 1.8880285884455034e-05, |
| "loss": 0.3749, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.9815261044176706, |
| "grad_norm": 0.19853232290078882, |
| "learning_rate": 1.8850506253722456e-05, |
| "loss": 0.334, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.983132530120482, |
| "grad_norm": 0.20279986647636403, |
| "learning_rate": 1.8820726622989875e-05, |
| "loss": 0.3449, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.984738955823293, |
| "grad_norm": 0.2068529080085828, |
| "learning_rate": 1.8790946992257297e-05, |
| "loss": 0.3469, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.9863453815261045, |
| "grad_norm": 0.19233002925201434, |
| "learning_rate": 1.8761167361524716e-05, |
| "loss": 0.3386, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.9879518072289155, |
| "grad_norm": 0.20185567313273858, |
| "learning_rate": 1.873138773079214e-05, |
| "loss": 0.3432, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.989558232931727, |
| "grad_norm": 0.21079326889696912, |
| "learning_rate": 1.8701608100059557e-05, |
| "loss": 0.3734, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.9911646586345382, |
| "grad_norm": 0.23227970805229742, |
| "learning_rate": 1.8671828469326983e-05, |
| "loss": 0.3526, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.9927710843373494, |
| "grad_norm": 0.18597898236125074, |
| "learning_rate": 1.8642048838594402e-05, |
| "loss": 0.3483, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.9943775100401606, |
| "grad_norm": 0.21314270191104023, |
| "learning_rate": 1.8612269207861824e-05, |
| "loss": 0.3784, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.9959839357429718, |
| "grad_norm": 0.23543703444673417, |
| "learning_rate": 1.8582489577129243e-05, |
| "loss": 0.3406, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.9975903614457833, |
| "grad_norm": 0.17715702141617948, |
| "learning_rate": 1.8552709946396665e-05, |
| "loss": 0.3423, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.9991967871485943, |
| "grad_norm": 0.19746388647741975, |
| "learning_rate": 1.8522930315664087e-05, |
| "loss": 0.3691, |
| "step": 1245 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.3174062639755838, |
| "learning_rate": 1.8493150684931506e-05, |
| "loss": 0.2983, |
| "step": 1246 |
| }, |
| { |
| "epoch": 2.0016064257028114, |
| "grad_norm": 0.23522152993607706, |
| "learning_rate": 1.846337105419893e-05, |
| "loss": 0.2727, |
| "step": 1247 |
| }, |
| { |
| "epoch": 2.0032128514056224, |
| "grad_norm": 0.20737054864646431, |
| "learning_rate": 1.843359142346635e-05, |
| "loss": 0.2767, |
| "step": 1248 |
| }, |
| { |
| "epoch": 2.004819277108434, |
| "grad_norm": 0.2400288684257797, |
| "learning_rate": 1.8403811792733773e-05, |
| "loss": 0.2795, |
| "step": 1249 |
| }, |
| { |
| "epoch": 2.006425702811245, |
| "grad_norm": 0.2630539207789394, |
| "learning_rate": 1.8374032162001192e-05, |
| "loss": 0.2916, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.0080321285140563, |
| "grad_norm": 0.2201641599575612, |
| "learning_rate": 1.8344252531268614e-05, |
| "loss": 0.2813, |
| "step": 1251 |
| }, |
| { |
| "epoch": 2.0096385542168673, |
| "grad_norm": 0.220685005990676, |
| "learning_rate": 1.8314472900536033e-05, |
| "loss": 0.2824, |
| "step": 1252 |
| }, |
| { |
| "epoch": 2.0112449799196788, |
| "grad_norm": 0.26390181703132115, |
| "learning_rate": 1.8284693269803455e-05, |
| "loss": 0.2861, |
| "step": 1253 |
| }, |
| { |
| "epoch": 2.0128514056224898, |
| "grad_norm": 0.23610994889531656, |
| "learning_rate": 1.8254913639070874e-05, |
| "loss": 0.301, |
| "step": 1254 |
| }, |
| { |
| "epoch": 2.014457831325301, |
| "grad_norm": 0.2383546717776889, |
| "learning_rate": 1.82251340083383e-05, |
| "loss": 0.2908, |
| "step": 1255 |
| }, |
| { |
| "epoch": 2.0160642570281126, |
| "grad_norm": 1.7848307825699223, |
| "learning_rate": 1.819535437760572e-05, |
| "loss": 0.3335, |
| "step": 1256 |
| }, |
| { |
| "epoch": 2.0176706827309236, |
| "grad_norm": 0.23696784503136437, |
| "learning_rate": 1.816557474687314e-05, |
| "loss": 0.2669, |
| "step": 1257 |
| }, |
| { |
| "epoch": 2.019277108433735, |
| "grad_norm": 0.21888481866636475, |
| "learning_rate": 1.813579511614056e-05, |
| "loss": 0.2909, |
| "step": 1258 |
| }, |
| { |
| "epoch": 2.020883534136546, |
| "grad_norm": 0.20332382254985962, |
| "learning_rate": 1.8106015485407982e-05, |
| "loss": 0.2782, |
| "step": 1259 |
| }, |
| { |
| "epoch": 2.0224899598393575, |
| "grad_norm": 0.20809561245158395, |
| "learning_rate": 1.80762358546754e-05, |
| "loss": 0.2951, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.0240963855421685, |
| "grad_norm": 0.21814457062033693, |
| "learning_rate": 1.8046456223942823e-05, |
| "loss": 0.2784, |
| "step": 1261 |
| }, |
| { |
| "epoch": 2.02570281124498, |
| "grad_norm": 0.21872054164235094, |
| "learning_rate": 1.8016676593210245e-05, |
| "loss": 0.2836, |
| "step": 1262 |
| }, |
| { |
| "epoch": 2.027309236947791, |
| "grad_norm": 0.19842979874754635, |
| "learning_rate": 1.7986896962477668e-05, |
| "loss": 0.2803, |
| "step": 1263 |
| }, |
| { |
| "epoch": 2.0289156626506024, |
| "grad_norm": 0.21881186415450113, |
| "learning_rate": 1.7957117331745086e-05, |
| "loss": 0.2881, |
| "step": 1264 |
| }, |
| { |
| "epoch": 2.030522088353414, |
| "grad_norm": 0.22443328960794542, |
| "learning_rate": 1.792733770101251e-05, |
| "loss": 0.2941, |
| "step": 1265 |
| }, |
| { |
| "epoch": 2.032128514056225, |
| "grad_norm": 0.21587260600025693, |
| "learning_rate": 1.789755807027993e-05, |
| "loss": 0.2757, |
| "step": 1266 |
| }, |
| { |
| "epoch": 2.0337349397590363, |
| "grad_norm": 0.20115770456663162, |
| "learning_rate": 1.786777843954735e-05, |
| "loss": 0.2787, |
| "step": 1267 |
| }, |
| { |
| "epoch": 2.0353413654618473, |
| "grad_norm": 0.2287670251960821, |
| "learning_rate": 1.7837998808814772e-05, |
| "loss": 0.2681, |
| "step": 1268 |
| }, |
| { |
| "epoch": 2.0369477911646587, |
| "grad_norm": 0.22109238751154517, |
| "learning_rate": 1.780821917808219e-05, |
| "loss": 0.2833, |
| "step": 1269 |
| }, |
| { |
| "epoch": 2.0385542168674697, |
| "grad_norm": 0.20007219850099878, |
| "learning_rate": 1.7778439547349613e-05, |
| "loss": 0.2862, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.040160642570281, |
| "grad_norm": 0.2161847201043952, |
| "learning_rate": 1.7748659916617035e-05, |
| "loss": 0.2616, |
| "step": 1271 |
| }, |
| { |
| "epoch": 2.041767068273092, |
| "grad_norm": 0.19586347077594904, |
| "learning_rate": 1.7718880285884458e-05, |
| "loss": 0.2744, |
| "step": 1272 |
| }, |
| { |
| "epoch": 2.0433734939759036, |
| "grad_norm": 0.1977339318614, |
| "learning_rate": 1.7689100655151877e-05, |
| "loss": 0.2709, |
| "step": 1273 |
| }, |
| { |
| "epoch": 2.044979919678715, |
| "grad_norm": 0.21405570213223185, |
| "learning_rate": 1.76593210244193e-05, |
| "loss": 0.2684, |
| "step": 1274 |
| }, |
| { |
| "epoch": 2.046586345381526, |
| "grad_norm": 0.19886670005088752, |
| "learning_rate": 1.7629541393686718e-05, |
| "loss": 0.2812, |
| "step": 1275 |
| }, |
| { |
| "epoch": 2.0481927710843375, |
| "grad_norm": 0.20121731499762072, |
| "learning_rate": 1.759976176295414e-05, |
| "loss": 0.2583, |
| "step": 1276 |
| }, |
| { |
| "epoch": 2.0497991967871485, |
| "grad_norm": 0.22436613750052545, |
| "learning_rate": 1.756998213222156e-05, |
| "loss": 0.292, |
| "step": 1277 |
| }, |
| { |
| "epoch": 2.05140562248996, |
| "grad_norm": 0.2104802982109658, |
| "learning_rate": 1.7540202501488984e-05, |
| "loss": 0.2886, |
| "step": 1278 |
| }, |
| { |
| "epoch": 2.053012048192771, |
| "grad_norm": 0.1856838445294004, |
| "learning_rate": 1.7510422870756403e-05, |
| "loss": 0.2853, |
| "step": 1279 |
| }, |
| { |
| "epoch": 2.0546184738955824, |
| "grad_norm": 0.23394464779780705, |
| "learning_rate": 1.7480643240023825e-05, |
| "loss": 0.2841, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.0562248995983934, |
| "grad_norm": 0.21119273123023982, |
| "learning_rate": 1.7450863609291244e-05, |
| "loss": 0.2775, |
| "step": 1281 |
| }, |
| { |
| "epoch": 2.057831325301205, |
| "grad_norm": 0.19989610493561433, |
| "learning_rate": 1.7421083978558667e-05, |
| "loss": 0.2734, |
| "step": 1282 |
| }, |
| { |
| "epoch": 2.0594377510040163, |
| "grad_norm": 0.2049461925085804, |
| "learning_rate": 1.739130434782609e-05, |
| "loss": 0.2793, |
| "step": 1283 |
| }, |
| { |
| "epoch": 2.0610441767068273, |
| "grad_norm": 0.21027456687662827, |
| "learning_rate": 1.7361524717093508e-05, |
| "loss": 0.284, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.0626506024096387, |
| "grad_norm": 0.20592602304519445, |
| "learning_rate": 1.733174508636093e-05, |
| "loss": 0.2876, |
| "step": 1285 |
| }, |
| { |
| "epoch": 2.0642570281124497, |
| "grad_norm": 0.20045695701272662, |
| "learning_rate": 1.7301965455628352e-05, |
| "loss": 0.2821, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.065863453815261, |
| "grad_norm": 0.2030737729176968, |
| "learning_rate": 1.7272185824895774e-05, |
| "loss": 0.2751, |
| "step": 1287 |
| }, |
| { |
| "epoch": 2.067469879518072, |
| "grad_norm": 0.20606854023821006, |
| "learning_rate": 1.7242406194163193e-05, |
| "loss": 0.2786, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.0690763052208836, |
| "grad_norm": 0.19050518539243924, |
| "learning_rate": 1.7212626563430616e-05, |
| "loss": 0.2806, |
| "step": 1289 |
| }, |
| { |
| "epoch": 2.0706827309236946, |
| "grad_norm": 0.2016324872075261, |
| "learning_rate": 1.7182846932698034e-05, |
| "loss": 0.2912, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.072289156626506, |
| "grad_norm": 0.19937464734252766, |
| "learning_rate": 1.7153067301965457e-05, |
| "loss": 0.292, |
| "step": 1291 |
| }, |
| { |
| "epoch": 2.0738955823293175, |
| "grad_norm": 0.2030184865559162, |
| "learning_rate": 1.7123287671232875e-05, |
| "loss": 0.2782, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.0755020080321285, |
| "grad_norm": 0.19153969529826628, |
| "learning_rate": 1.7093508040500298e-05, |
| "loss": 0.281, |
| "step": 1293 |
| }, |
| { |
| "epoch": 2.07710843373494, |
| "grad_norm": 0.20342913607352534, |
| "learning_rate": 1.706372840976772e-05, |
| "loss": 0.2819, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.078714859437751, |
| "grad_norm": 0.19249096532827192, |
| "learning_rate": 1.7033948779035142e-05, |
| "loss": 0.2815, |
| "step": 1295 |
| }, |
| { |
| "epoch": 2.0803212851405624, |
| "grad_norm": 0.19416016554818794, |
| "learning_rate": 1.700416914830256e-05, |
| "loss": 0.2837, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.0819277108433734, |
| "grad_norm": 0.20169308055930382, |
| "learning_rate": 1.6974389517569983e-05, |
| "loss": 0.2735, |
| "step": 1297 |
| }, |
| { |
| "epoch": 2.083534136546185, |
| "grad_norm": 0.20000708163336223, |
| "learning_rate": 1.6944609886837402e-05, |
| "loss": 0.2771, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.085140562248996, |
| "grad_norm": 0.22245446601955285, |
| "learning_rate": 1.6914830256104824e-05, |
| "loss": 0.2744, |
| "step": 1299 |
| }, |
| { |
| "epoch": 2.0867469879518072, |
| "grad_norm": 0.20367878601775039, |
| "learning_rate": 1.6885050625372247e-05, |
| "loss": 0.2703, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.0883534136546187, |
| "grad_norm": 0.21379513521178495, |
| "learning_rate": 1.685527099463967e-05, |
| "loss": 0.2854, |
| "step": 1301 |
| }, |
| { |
| "epoch": 2.0899598393574297, |
| "grad_norm": 0.2124703802627289, |
| "learning_rate": 1.6825491363907088e-05, |
| "loss": 0.2791, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.091566265060241, |
| "grad_norm": 0.19744090340380965, |
| "learning_rate": 1.679571173317451e-05, |
| "loss": 0.2831, |
| "step": 1303 |
| }, |
| { |
| "epoch": 2.093172690763052, |
| "grad_norm": 0.2030029634127148, |
| "learning_rate": 1.6765932102441932e-05, |
| "loss": 0.2677, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.0947791164658636, |
| "grad_norm": 0.2180252902147147, |
| "learning_rate": 1.673615247170935e-05, |
| "loss": 0.2821, |
| "step": 1305 |
| }, |
| { |
| "epoch": 2.0963855421686746, |
| "grad_norm": 0.19691447044091656, |
| "learning_rate": 1.6706372840976773e-05, |
| "loss": 0.2864, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.097991967871486, |
| "grad_norm": 0.19265504935281613, |
| "learning_rate": 1.6676593210244192e-05, |
| "loss": 0.2927, |
| "step": 1307 |
| }, |
| { |
| "epoch": 2.099598393574297, |
| "grad_norm": 0.19508824514066675, |
| "learning_rate": 1.6646813579511615e-05, |
| "loss": 0.283, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.1012048192771084, |
| "grad_norm": 0.20194415821250655, |
| "learning_rate": 1.6617033948779037e-05, |
| "loss": 0.2768, |
| "step": 1309 |
| }, |
| { |
| "epoch": 2.10281124497992, |
| "grad_norm": 0.19516202353557202, |
| "learning_rate": 1.658725431804646e-05, |
| "loss": 0.2684, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.104417670682731, |
| "grad_norm": 0.19986830870005645, |
| "learning_rate": 1.6557474687313878e-05, |
| "loss": 0.2767, |
| "step": 1311 |
| }, |
| { |
| "epoch": 2.1060240963855423, |
| "grad_norm": 0.2055813489906199, |
| "learning_rate": 1.65276950565813e-05, |
| "loss": 0.2884, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.1076305220883533, |
| "grad_norm": 0.20050715479446987, |
| "learning_rate": 1.649791542584872e-05, |
| "loss": 0.2758, |
| "step": 1313 |
| }, |
| { |
| "epoch": 2.1092369477911648, |
| "grad_norm": 0.19236285983507476, |
| "learning_rate": 1.646813579511614e-05, |
| "loss": 0.2719, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.1108433734939758, |
| "grad_norm": 0.18775008327406353, |
| "learning_rate": 1.643835616438356e-05, |
| "loss": 0.2769, |
| "step": 1315 |
| }, |
| { |
| "epoch": 2.112449799196787, |
| "grad_norm": 0.21571708856539357, |
| "learning_rate": 1.6408576533650982e-05, |
| "loss": 0.2925, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.114056224899598, |
| "grad_norm": 0.1936950743747594, |
| "learning_rate": 1.6378796902918405e-05, |
| "loss": 0.28, |
| "step": 1317 |
| }, |
| { |
| "epoch": 2.1156626506024097, |
| "grad_norm": 0.2199167999394077, |
| "learning_rate": 1.6349017272185827e-05, |
| "loss": 0.293, |
| "step": 1318 |
| }, |
| { |
| "epoch": 2.117269076305221, |
| "grad_norm": 0.195144728617892, |
| "learning_rate": 1.6319237641453246e-05, |
| "loss": 0.2745, |
| "step": 1319 |
| }, |
| { |
| "epoch": 2.118875502008032, |
| "grad_norm": 0.2080366011261388, |
| "learning_rate": 1.6289458010720668e-05, |
| "loss": 0.2958, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.1204819277108435, |
| "grad_norm": 0.20834567278969648, |
| "learning_rate": 1.625967837998809e-05, |
| "loss": 0.2919, |
| "step": 1321 |
| }, |
| { |
| "epoch": 2.1220883534136545, |
| "grad_norm": 0.20330161112032538, |
| "learning_rate": 1.622989874925551e-05, |
| "loss": 0.295, |
| "step": 1322 |
| }, |
| { |
| "epoch": 2.123694779116466, |
| "grad_norm": 0.19423024126446417, |
| "learning_rate": 1.620011911852293e-05, |
| "loss": 0.2705, |
| "step": 1323 |
| }, |
| { |
| "epoch": 2.125301204819277, |
| "grad_norm": 0.2074284814583773, |
| "learning_rate": 1.617033948779035e-05, |
| "loss": 0.2942, |
| "step": 1324 |
| }, |
| { |
| "epoch": 2.1269076305220884, |
| "grad_norm": 0.2224622605312662, |
| "learning_rate": 1.6140559857057776e-05, |
| "loss": 0.2948, |
| "step": 1325 |
| }, |
| { |
| "epoch": 2.1285140562248994, |
| "grad_norm": 0.18843797510856755, |
| "learning_rate": 1.6110780226325195e-05, |
| "loss": 0.2855, |
| "step": 1326 |
| }, |
| { |
| "epoch": 2.130120481927711, |
| "grad_norm": 0.3675079769449738, |
| "learning_rate": 1.6081000595592617e-05, |
| "loss": 0.2731, |
| "step": 1327 |
| }, |
| { |
| "epoch": 2.1317269076305223, |
| "grad_norm": 0.22536574097578618, |
| "learning_rate": 1.6051220964860036e-05, |
| "loss": 0.2885, |
| "step": 1328 |
| }, |
| { |
| "epoch": 2.1333333333333333, |
| "grad_norm": 0.20765939969070893, |
| "learning_rate": 1.6021441334127458e-05, |
| "loss": 0.2737, |
| "step": 1329 |
| }, |
| { |
| "epoch": 2.1349397590361447, |
| "grad_norm": 0.1917959472732135, |
| "learning_rate": 1.5991661703394877e-05, |
| "loss": 0.2698, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.1365461847389557, |
| "grad_norm": 0.20833088178715453, |
| "learning_rate": 1.59618820726623e-05, |
| "loss": 0.2812, |
| "step": 1331 |
| }, |
| { |
| "epoch": 2.138152610441767, |
| "grad_norm": 0.2266944112068008, |
| "learning_rate": 1.593210244192972e-05, |
| "loss": 0.2925, |
| "step": 1332 |
| }, |
| { |
| "epoch": 2.139759036144578, |
| "grad_norm": 0.19873930251384897, |
| "learning_rate": 1.5902322811197144e-05, |
| "loss": 0.2832, |
| "step": 1333 |
| }, |
| { |
| "epoch": 2.1413654618473896, |
| "grad_norm": 0.211257087356807, |
| "learning_rate": 1.5872543180464562e-05, |
| "loss": 0.2927, |
| "step": 1334 |
| }, |
| { |
| "epoch": 2.1429718875502006, |
| "grad_norm": 0.22087096040863327, |
| "learning_rate": 1.5842763549731985e-05, |
| "loss": 0.2749, |
| "step": 1335 |
| }, |
| { |
| "epoch": 2.144578313253012, |
| "grad_norm": 0.2137852392339819, |
| "learning_rate": 1.5812983918999404e-05, |
| "loss": 0.2746, |
| "step": 1336 |
| }, |
| { |
| "epoch": 2.1461847389558235, |
| "grad_norm": 0.21811352320657051, |
| "learning_rate": 1.5783204288266826e-05, |
| "loss": 0.2873, |
| "step": 1337 |
| }, |
| { |
| "epoch": 2.1477911646586345, |
| "grad_norm": 0.20957211886331442, |
| "learning_rate": 1.5753424657534248e-05, |
| "loss": 0.276, |
| "step": 1338 |
| }, |
| { |
| "epoch": 2.149397590361446, |
| "grad_norm": 0.22560776369291363, |
| "learning_rate": 1.5723645026801667e-05, |
| "loss": 0.3123, |
| "step": 1339 |
| }, |
| { |
| "epoch": 2.151004016064257, |
| "grad_norm": 0.22339377515881612, |
| "learning_rate": 1.569386539606909e-05, |
| "loss": 0.285, |
| "step": 1340 |
| }, |
| { |
| "epoch": 2.1526104417670684, |
| "grad_norm": 0.20990067995837738, |
| "learning_rate": 1.566408576533651e-05, |
| "loss": 0.2857, |
| "step": 1341 |
| }, |
| { |
| "epoch": 2.1542168674698794, |
| "grad_norm": 0.19663492374905836, |
| "learning_rate": 1.5634306134603934e-05, |
| "loss": 0.2718, |
| "step": 1342 |
| }, |
| { |
| "epoch": 2.155823293172691, |
| "grad_norm": 0.21682519090898078, |
| "learning_rate": 1.5604526503871352e-05, |
| "loss": 0.2743, |
| "step": 1343 |
| }, |
| { |
| "epoch": 2.157429718875502, |
| "grad_norm": 0.2171805234137457, |
| "learning_rate": 1.5574746873138775e-05, |
| "loss": 0.2804, |
| "step": 1344 |
| }, |
| { |
| "epoch": 2.1590361445783133, |
| "grad_norm": 0.2140861366836785, |
| "learning_rate": 1.5544967242406194e-05, |
| "loss": 0.2808, |
| "step": 1345 |
| }, |
| { |
| "epoch": 2.1606425702811247, |
| "grad_norm": 0.21635984908185377, |
| "learning_rate": 1.5515187611673616e-05, |
| "loss": 0.2799, |
| "step": 1346 |
| }, |
| { |
| "epoch": 2.1622489959839357, |
| "grad_norm": 0.20798428133406957, |
| "learning_rate": 1.5485407980941035e-05, |
| "loss": 0.2671, |
| "step": 1347 |
| }, |
| { |
| "epoch": 2.163855421686747, |
| "grad_norm": 0.22593474257136462, |
| "learning_rate": 1.545562835020846e-05, |
| "loss": 0.2809, |
| "step": 1348 |
| }, |
| { |
| "epoch": 2.165461847389558, |
| "grad_norm": 0.22565128373678292, |
| "learning_rate": 1.542584871947588e-05, |
| "loss": 0.2898, |
| "step": 1349 |
| }, |
| { |
| "epoch": 2.1670682730923696, |
| "grad_norm": 0.21453394498328968, |
| "learning_rate": 1.53960690887433e-05, |
| "loss": 0.2669, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.1686746987951806, |
| "grad_norm": 0.21565741702247793, |
| "learning_rate": 1.536628945801072e-05, |
| "loss": 0.2888, |
| "step": 1351 |
| }, |
| { |
| "epoch": 2.170281124497992, |
| "grad_norm": 0.20920208759066525, |
| "learning_rate": 1.5336509827278143e-05, |
| "loss": 0.2793, |
| "step": 1352 |
| }, |
| { |
| "epoch": 2.171887550200803, |
| "grad_norm": 0.2139532549748069, |
| "learning_rate": 1.530673019654556e-05, |
| "loss": 0.2816, |
| "step": 1353 |
| }, |
| { |
| "epoch": 2.1734939759036145, |
| "grad_norm": 0.2170410666396211, |
| "learning_rate": 1.5276950565812984e-05, |
| "loss": 0.2877, |
| "step": 1354 |
| }, |
| { |
| "epoch": 2.175100401606426, |
| "grad_norm": 0.19884229667013362, |
| "learning_rate": 1.5247170935080408e-05, |
| "loss": 0.2842, |
| "step": 1355 |
| }, |
| { |
| "epoch": 2.176706827309237, |
| "grad_norm": 0.19604489046763632, |
| "learning_rate": 1.5217391304347828e-05, |
| "loss": 0.2833, |
| "step": 1356 |
| }, |
| { |
| "epoch": 2.1783132530120484, |
| "grad_norm": 0.2143224822362246, |
| "learning_rate": 1.5187611673615249e-05, |
| "loss": 0.2787, |
| "step": 1357 |
| }, |
| { |
| "epoch": 2.1799196787148594, |
| "grad_norm": 0.19359948352798684, |
| "learning_rate": 1.515783204288267e-05, |
| "loss": 0.2734, |
| "step": 1358 |
| }, |
| { |
| "epoch": 2.181526104417671, |
| "grad_norm": 0.19451280888655836, |
| "learning_rate": 1.512805241215009e-05, |
| "loss": 0.2826, |
| "step": 1359 |
| }, |
| { |
| "epoch": 2.183132530120482, |
| "grad_norm": 0.20343592326016344, |
| "learning_rate": 1.509827278141751e-05, |
| "loss": 0.2725, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.1847389558232932, |
| "grad_norm": 0.20372631119579326, |
| "learning_rate": 1.5068493150684931e-05, |
| "loss": 0.2804, |
| "step": 1361 |
| }, |
| { |
| "epoch": 2.1863453815261042, |
| "grad_norm": 0.19976055476333732, |
| "learning_rate": 1.5038713519952351e-05, |
| "loss": 0.2847, |
| "step": 1362 |
| }, |
| { |
| "epoch": 2.1879518072289157, |
| "grad_norm": 0.2126072933668119, |
| "learning_rate": 1.5008933889219775e-05, |
| "loss": 0.2801, |
| "step": 1363 |
| }, |
| { |
| "epoch": 2.189558232931727, |
| "grad_norm": 0.21783118111222857, |
| "learning_rate": 1.4979154258487196e-05, |
| "loss": 0.2762, |
| "step": 1364 |
| }, |
| { |
| "epoch": 2.191164658634538, |
| "grad_norm": 0.22424590850184806, |
| "learning_rate": 1.4949374627754617e-05, |
| "loss": 0.2695, |
| "step": 1365 |
| }, |
| { |
| "epoch": 2.1927710843373496, |
| "grad_norm": 0.20898807650299586, |
| "learning_rate": 1.4919594997022037e-05, |
| "loss": 0.2866, |
| "step": 1366 |
| }, |
| { |
| "epoch": 2.1943775100401606, |
| "grad_norm": 0.2024609480649088, |
| "learning_rate": 1.488981536628946e-05, |
| "loss": 0.2593, |
| "step": 1367 |
| }, |
| { |
| "epoch": 2.195983935742972, |
| "grad_norm": 0.2318390578922345, |
| "learning_rate": 1.486003573555688e-05, |
| "loss": 0.2852, |
| "step": 1368 |
| }, |
| { |
| "epoch": 2.197590361445783, |
| "grad_norm": 0.2234576266891764, |
| "learning_rate": 1.48302561048243e-05, |
| "loss": 0.2979, |
| "step": 1369 |
| }, |
| { |
| "epoch": 2.1991967871485945, |
| "grad_norm": 0.25087746453484566, |
| "learning_rate": 1.4800476474091721e-05, |
| "loss": 0.2952, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.2008032128514055, |
| "grad_norm": 0.20779299268249266, |
| "learning_rate": 1.4770696843359145e-05, |
| "loss": 0.291, |
| "step": 1371 |
| }, |
| { |
| "epoch": 2.202409638554217, |
| "grad_norm": 0.19503797552144309, |
| "learning_rate": 1.4740917212626565e-05, |
| "loss": 0.2889, |
| "step": 1372 |
| }, |
| { |
| "epoch": 2.2040160642570283, |
| "grad_norm": 0.22183786011491854, |
| "learning_rate": 1.4711137581893986e-05, |
| "loss": 0.2876, |
| "step": 1373 |
| }, |
| { |
| "epoch": 2.2056224899598393, |
| "grad_norm": 0.20644315848684305, |
| "learning_rate": 1.4681357951161407e-05, |
| "loss": 0.2823, |
| "step": 1374 |
| }, |
| { |
| "epoch": 2.207228915662651, |
| "grad_norm": 0.18929674842471658, |
| "learning_rate": 1.4651578320428827e-05, |
| "loss": 0.2783, |
| "step": 1375 |
| }, |
| { |
| "epoch": 2.208835341365462, |
| "grad_norm": 0.21308959207082992, |
| "learning_rate": 1.4621798689696248e-05, |
| "loss": 0.288, |
| "step": 1376 |
| }, |
| { |
| "epoch": 2.2104417670682732, |
| "grad_norm": 0.20666635961218438, |
| "learning_rate": 1.4592019058963668e-05, |
| "loss": 0.2738, |
| "step": 1377 |
| }, |
| { |
| "epoch": 2.212048192771084, |
| "grad_norm": 0.2304141465628837, |
| "learning_rate": 1.4562239428231092e-05, |
| "loss": 0.2898, |
| "step": 1378 |
| }, |
| { |
| "epoch": 2.2136546184738957, |
| "grad_norm": 0.20788763343300304, |
| "learning_rate": 1.4532459797498513e-05, |
| "loss": 0.2878, |
| "step": 1379 |
| }, |
| { |
| "epoch": 2.2152610441767067, |
| "grad_norm": 0.20588499147123118, |
| "learning_rate": 1.4502680166765933e-05, |
| "loss": 0.2657, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.216867469879518, |
| "grad_norm": 0.20418894178510408, |
| "learning_rate": 1.4472900536033354e-05, |
| "loss": 0.2779, |
| "step": 1381 |
| }, |
| { |
| "epoch": 2.2184738955823295, |
| "grad_norm": 0.20491189247655886, |
| "learning_rate": 1.4443120905300774e-05, |
| "loss": 0.2753, |
| "step": 1382 |
| }, |
| { |
| "epoch": 2.2200803212851405, |
| "grad_norm": 0.19753400988709927, |
| "learning_rate": 1.4413341274568195e-05, |
| "loss": 0.2829, |
| "step": 1383 |
| }, |
| { |
| "epoch": 2.221686746987952, |
| "grad_norm": 0.44309112407887496, |
| "learning_rate": 1.4383561643835617e-05, |
| "loss": 0.2795, |
| "step": 1384 |
| }, |
| { |
| "epoch": 2.223293172690763, |
| "grad_norm": 0.195193110652276, |
| "learning_rate": 1.4353782013103038e-05, |
| "loss": 0.2857, |
| "step": 1385 |
| }, |
| { |
| "epoch": 2.2248995983935744, |
| "grad_norm": 0.1821677494511547, |
| "learning_rate": 1.432400238237046e-05, |
| "loss": 0.2772, |
| "step": 1386 |
| }, |
| { |
| "epoch": 2.2265060240963854, |
| "grad_norm": 0.20054081495677123, |
| "learning_rate": 1.429422275163788e-05, |
| "loss": 0.2915, |
| "step": 1387 |
| }, |
| { |
| "epoch": 2.228112449799197, |
| "grad_norm": 0.1912911850679274, |
| "learning_rate": 1.4264443120905303e-05, |
| "loss": 0.2752, |
| "step": 1388 |
| }, |
| { |
| "epoch": 2.229718875502008, |
| "grad_norm": 0.203267085985269, |
| "learning_rate": 1.4234663490172723e-05, |
| "loss": 0.2828, |
| "step": 1389 |
| }, |
| { |
| "epoch": 2.2313253012048193, |
| "grad_norm": 0.1905952075992503, |
| "learning_rate": 1.4204883859440144e-05, |
| "loss": 0.286, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.2329317269076308, |
| "grad_norm": 0.195695539557944, |
| "learning_rate": 1.4175104228707564e-05, |
| "loss": 0.2835, |
| "step": 1391 |
| }, |
| { |
| "epoch": 2.2345381526104418, |
| "grad_norm": 0.18676759927959077, |
| "learning_rate": 1.4145324597974985e-05, |
| "loss": 0.2796, |
| "step": 1392 |
| }, |
| { |
| "epoch": 2.236144578313253, |
| "grad_norm": 0.20852779760290824, |
| "learning_rate": 1.4115544967242406e-05, |
| "loss": 0.2945, |
| "step": 1393 |
| }, |
| { |
| "epoch": 2.237751004016064, |
| "grad_norm": 0.1948314886127861, |
| "learning_rate": 1.408576533650983e-05, |
| "loss": 0.287, |
| "step": 1394 |
| }, |
| { |
| "epoch": 2.2393574297188756, |
| "grad_norm": 0.19534317098927167, |
| "learning_rate": 1.405598570577725e-05, |
| "loss": 0.2907, |
| "step": 1395 |
| }, |
| { |
| "epoch": 2.2409638554216866, |
| "grad_norm": 0.1851818001811194, |
| "learning_rate": 1.402620607504467e-05, |
| "loss": 0.2705, |
| "step": 1396 |
| }, |
| { |
| "epoch": 2.242570281124498, |
| "grad_norm": 0.19855454405040163, |
| "learning_rate": 1.3996426444312091e-05, |
| "loss": 0.2743, |
| "step": 1397 |
| }, |
| { |
| "epoch": 2.244176706827309, |
| "grad_norm": 0.18856697121755286, |
| "learning_rate": 1.3966646813579512e-05, |
| "loss": 0.2742, |
| "step": 1398 |
| }, |
| { |
| "epoch": 2.2457831325301205, |
| "grad_norm": 0.20949440819860526, |
| "learning_rate": 1.3936867182846932e-05, |
| "loss": 0.2983, |
| "step": 1399 |
| }, |
| { |
| "epoch": 2.247389558232932, |
| "grad_norm": 0.18799229527821282, |
| "learning_rate": 1.3907087552114353e-05, |
| "loss": 0.2777, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.248995983935743, |
| "grad_norm": 0.20817802926054224, |
| "learning_rate": 1.3877307921381773e-05, |
| "loss": 0.2748, |
| "step": 1401 |
| }, |
| { |
| "epoch": 2.2506024096385544, |
| "grad_norm": 0.19971412125106477, |
| "learning_rate": 1.3847528290649197e-05, |
| "loss": 0.2697, |
| "step": 1402 |
| }, |
| { |
| "epoch": 2.2522088353413654, |
| "grad_norm": 0.20260285826421598, |
| "learning_rate": 1.3817748659916618e-05, |
| "loss": 0.2824, |
| "step": 1403 |
| }, |
| { |
| "epoch": 2.253815261044177, |
| "grad_norm": 0.195358413178943, |
| "learning_rate": 1.3787969029184038e-05, |
| "loss": 0.2685, |
| "step": 1404 |
| }, |
| { |
| "epoch": 2.255421686746988, |
| "grad_norm": 0.20138950773431835, |
| "learning_rate": 1.375818939845146e-05, |
| "loss": 0.2883, |
| "step": 1405 |
| }, |
| { |
| "epoch": 2.2570281124497993, |
| "grad_norm": 0.20198637318846824, |
| "learning_rate": 1.3728409767718881e-05, |
| "loss": 0.2917, |
| "step": 1406 |
| }, |
| { |
| "epoch": 2.2586345381526103, |
| "grad_norm": 0.19903212810034507, |
| "learning_rate": 1.3698630136986302e-05, |
| "loss": 0.2727, |
| "step": 1407 |
| }, |
| { |
| "epoch": 2.2602409638554217, |
| "grad_norm": 0.2014873051634944, |
| "learning_rate": 1.3668850506253722e-05, |
| "loss": 0.27, |
| "step": 1408 |
| }, |
| { |
| "epoch": 2.261847389558233, |
| "grad_norm": 0.20301125119497848, |
| "learning_rate": 1.3639070875521146e-05, |
| "loss": 0.2888, |
| "step": 1409 |
| }, |
| { |
| "epoch": 2.263453815261044, |
| "grad_norm": 0.18947958537365203, |
| "learning_rate": 1.3609291244788567e-05, |
| "loss": 0.2851, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.2650602409638556, |
| "grad_norm": 0.19886368646745592, |
| "learning_rate": 1.3579511614055987e-05, |
| "loss": 0.2887, |
| "step": 1411 |
| }, |
| { |
| "epoch": 2.2666666666666666, |
| "grad_norm": 0.19329147766946875, |
| "learning_rate": 1.3549731983323408e-05, |
| "loss": 0.2923, |
| "step": 1412 |
| }, |
| { |
| "epoch": 2.268273092369478, |
| "grad_norm": 0.1888562779617201, |
| "learning_rate": 1.3519952352590828e-05, |
| "loss": 0.292, |
| "step": 1413 |
| }, |
| { |
| "epoch": 2.269879518072289, |
| "grad_norm": 0.19788625383031824, |
| "learning_rate": 1.3490172721858249e-05, |
| "loss": 0.286, |
| "step": 1414 |
| }, |
| { |
| "epoch": 2.2714859437751005, |
| "grad_norm": 0.21236176682280908, |
| "learning_rate": 1.346039309112567e-05, |
| "loss": 0.2908, |
| "step": 1415 |
| }, |
| { |
| "epoch": 2.2730923694779115, |
| "grad_norm": 0.1876516240958428, |
| "learning_rate": 1.343061346039309e-05, |
| "loss": 0.2804, |
| "step": 1416 |
| }, |
| { |
| "epoch": 2.274698795180723, |
| "grad_norm": 0.1831457191251782, |
| "learning_rate": 1.3400833829660514e-05, |
| "loss": 0.2762, |
| "step": 1417 |
| }, |
| { |
| "epoch": 2.2763052208835344, |
| "grad_norm": 0.20772669907762084, |
| "learning_rate": 1.3371054198927935e-05, |
| "loss": 0.2893, |
| "step": 1418 |
| }, |
| { |
| "epoch": 2.2779116465863454, |
| "grad_norm": 0.20745250121591247, |
| "learning_rate": 1.3341274568195355e-05, |
| "loss": 0.2813, |
| "step": 1419 |
| }, |
| { |
| "epoch": 2.279518072289157, |
| "grad_norm": 0.19787394808531827, |
| "learning_rate": 1.3311494937462776e-05, |
| "loss": 0.2786, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.281124497991968, |
| "grad_norm": 0.19693061759333913, |
| "learning_rate": 1.3281715306730196e-05, |
| "loss": 0.2731, |
| "step": 1421 |
| }, |
| { |
| "epoch": 2.2827309236947793, |
| "grad_norm": 0.200179553066319, |
| "learning_rate": 1.3251935675997617e-05, |
| "loss": 0.2712, |
| "step": 1422 |
| }, |
| { |
| "epoch": 2.2843373493975903, |
| "grad_norm": 0.20538459255470326, |
| "learning_rate": 1.3222156045265039e-05, |
| "loss": 0.2937, |
| "step": 1423 |
| }, |
| { |
| "epoch": 2.2859437751004017, |
| "grad_norm": 0.20540469526629412, |
| "learning_rate": 1.319237641453246e-05, |
| "loss": 0.2867, |
| "step": 1424 |
| }, |
| { |
| "epoch": 2.2875502008032127, |
| "grad_norm": 0.19424062293228397, |
| "learning_rate": 1.3162596783799882e-05, |
| "loss": 0.2848, |
| "step": 1425 |
| }, |
| { |
| "epoch": 2.289156626506024, |
| "grad_norm": 0.19772370771483927, |
| "learning_rate": 1.3132817153067304e-05, |
| "loss": 0.2761, |
| "step": 1426 |
| }, |
| { |
| "epoch": 2.2907630522088356, |
| "grad_norm": 0.19070855889734556, |
| "learning_rate": 1.3103037522334725e-05, |
| "loss": 0.287, |
| "step": 1427 |
| }, |
| { |
| "epoch": 2.2923694779116466, |
| "grad_norm": 0.19207092404570347, |
| "learning_rate": 1.3073257891602145e-05, |
| "loss": 0.2766, |
| "step": 1428 |
| }, |
| { |
| "epoch": 2.293975903614458, |
| "grad_norm": 0.18790104378715738, |
| "learning_rate": 1.3043478260869566e-05, |
| "loss": 0.2707, |
| "step": 1429 |
| }, |
| { |
| "epoch": 2.295582329317269, |
| "grad_norm": 0.1990292808311974, |
| "learning_rate": 1.3013698630136986e-05, |
| "loss": 0.2849, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.2971887550200805, |
| "grad_norm": 0.22742348746658536, |
| "learning_rate": 1.2983918999404407e-05, |
| "loss": 0.2966, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.2987951807228915, |
| "grad_norm": 0.19765174567815721, |
| "learning_rate": 1.295413936867183e-05, |
| "loss": 0.2851, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.300401606425703, |
| "grad_norm": 0.18088850697034875, |
| "learning_rate": 1.2924359737939251e-05, |
| "loss": 0.2657, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.302008032128514, |
| "grad_norm": 0.19373049223838326, |
| "learning_rate": 1.2894580107206672e-05, |
| "loss": 0.2839, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.3036144578313253, |
| "grad_norm": 0.20654452155770917, |
| "learning_rate": 1.2864800476474092e-05, |
| "loss": 0.2896, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.305220883534137, |
| "grad_norm": 0.20375004192296206, |
| "learning_rate": 1.2835020845741513e-05, |
| "loss": 0.2899, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.306827309236948, |
| "grad_norm": 0.2057218215028387, |
| "learning_rate": 1.2805241215008934e-05, |
| "loss": 0.2871, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.3084337349397592, |
| "grad_norm": 0.1955208741700446, |
| "learning_rate": 1.2775461584276354e-05, |
| "loss": 0.2726, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.3100401606425702, |
| "grad_norm": 0.21049631645666206, |
| "learning_rate": 1.2745681953543775e-05, |
| "loss": 0.2948, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.3116465863453817, |
| "grad_norm": 0.20512260944309865, |
| "learning_rate": 1.2715902322811199e-05, |
| "loss": 0.2776, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.3132530120481927, |
| "grad_norm": 0.18114258763003166, |
| "learning_rate": 1.268612269207862e-05, |
| "loss": 0.2732, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.314859437751004, |
| "grad_norm": 0.1959388399118363, |
| "learning_rate": 1.265634306134604e-05, |
| "loss": 0.2812, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.316465863453815, |
| "grad_norm": 0.18349205225454254, |
| "learning_rate": 1.2626563430613462e-05, |
| "loss": 0.2612, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.3180722891566266, |
| "grad_norm": 0.1849928114980666, |
| "learning_rate": 1.2596783799880883e-05, |
| "loss": 0.2763, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.319678714859438, |
| "grad_norm": 0.1839742763610793, |
| "learning_rate": 1.2567004169148303e-05, |
| "loss": 0.2741, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.321285140562249, |
| "grad_norm": 0.1936421692979358, |
| "learning_rate": 1.2537224538415724e-05, |
| "loss": 0.2878, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.32289156626506, |
| "grad_norm": 0.19257864798083466, |
| "learning_rate": 1.2507444907683144e-05, |
| "loss": 0.2827, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.3244979919678714, |
| "grad_norm": 0.1989288231161012, |
| "learning_rate": 1.2477665276950566e-05, |
| "loss": 0.2785, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.326104417670683, |
| "grad_norm": 0.19337325518819184, |
| "learning_rate": 1.2447885646217987e-05, |
| "loss": 0.2779, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.327710843373494, |
| "grad_norm": 0.18727054478458935, |
| "learning_rate": 1.241810601548541e-05, |
| "loss": 0.2787, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.3293172690763053, |
| "grad_norm": 0.1967745024997371, |
| "learning_rate": 1.238832638475283e-05, |
| "loss": 0.2886, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.3309236947791163, |
| "grad_norm": 0.20060793013447148, |
| "learning_rate": 1.235854675402025e-05, |
| "loss": 0.2695, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.3325301204819278, |
| "grad_norm": 0.2013438687767352, |
| "learning_rate": 1.2328767123287671e-05, |
| "loss": 0.2658, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.334136546184739, |
| "grad_norm": 0.18529352907025987, |
| "learning_rate": 1.2298987492555093e-05, |
| "loss": 0.2772, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.33574297188755, |
| "grad_norm": 0.19042540749718512, |
| "learning_rate": 1.2269207861822514e-05, |
| "loss": 0.2908, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.337349397590361, |
| "grad_norm": 0.18922713550520592, |
| "learning_rate": 1.2239428231089934e-05, |
| "loss": 0.2882, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.3389558232931726, |
| "grad_norm": 0.20939876918201103, |
| "learning_rate": 1.2209648600357356e-05, |
| "loss": 0.2794, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.340562248995984, |
| "grad_norm": 0.18838061426751868, |
| "learning_rate": 1.2179868969624777e-05, |
| "loss": 0.2883, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.342168674698795, |
| "grad_norm": 0.18809567266436786, |
| "learning_rate": 1.2150089338892198e-05, |
| "loss": 0.2868, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.3437751004016065, |
| "grad_norm": 0.18724157835249255, |
| "learning_rate": 1.2120309708159618e-05, |
| "loss": 0.3008, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.3453815261044175, |
| "grad_norm": 0.18753869589680505, |
| "learning_rate": 1.209053007742704e-05, |
| "loss": 0.2907, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.346987951807229, |
| "grad_norm": 0.18196888532868866, |
| "learning_rate": 1.2060750446694463e-05, |
| "loss": 0.2741, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.3485943775100404, |
| "grad_norm": 0.18421309904049457, |
| "learning_rate": 1.2030970815961883e-05, |
| "loss": 0.2814, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.3502008032128514, |
| "grad_norm": 0.18040962820252937, |
| "learning_rate": 1.2001191185229304e-05, |
| "loss": 0.2851, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.3518072289156624, |
| "grad_norm": 0.19206627034468482, |
| "learning_rate": 1.1971411554496726e-05, |
| "loss": 0.2771, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.353413654618474, |
| "grad_norm": 0.1892992889850213, |
| "learning_rate": 1.1941631923764147e-05, |
| "loss": 0.278, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.3550200803212853, |
| "grad_norm": 0.18386969475637321, |
| "learning_rate": 1.1911852293031567e-05, |
| "loss": 0.2849, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.3566265060240963, |
| "grad_norm": 0.18682928678552546, |
| "learning_rate": 1.1882072662298988e-05, |
| "loss": 0.2849, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.3582329317269077, |
| "grad_norm": 0.194393928066698, |
| "learning_rate": 1.185229303156641e-05, |
| "loss": 0.3004, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.3598393574297187, |
| "grad_norm": 0.18579247490094822, |
| "learning_rate": 1.182251340083383e-05, |
| "loss": 0.2809, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.36144578313253, |
| "grad_norm": 0.18682075099005277, |
| "learning_rate": 1.1792733770101251e-05, |
| "loss": 0.2924, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.363052208835341, |
| "grad_norm": 0.18642046974686882, |
| "learning_rate": 1.1762954139368672e-05, |
| "loss": 0.2806, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.3646586345381526, |
| "grad_norm": 0.18866799498275091, |
| "learning_rate": 1.1733174508636094e-05, |
| "loss": 0.2892, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.3662650602409636, |
| "grad_norm": 0.1890942207554411, |
| "learning_rate": 1.1703394877903514e-05, |
| "loss": 0.2942, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.367871485943775, |
| "grad_norm": 0.18048434103136618, |
| "learning_rate": 1.1673615247170935e-05, |
| "loss": 0.2833, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.3694779116465865, |
| "grad_norm": 0.19632406720450757, |
| "learning_rate": 1.1643835616438355e-05, |
| "loss": 0.2848, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.3710843373493975, |
| "grad_norm": 0.18719345956190125, |
| "learning_rate": 1.1614055985705778e-05, |
| "loss": 0.2874, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.372690763052209, |
| "grad_norm": 0.19841712109494833, |
| "learning_rate": 1.1584276354973198e-05, |
| "loss": 0.2839, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.37429718875502, |
| "grad_norm": 0.18642681858163637, |
| "learning_rate": 1.1554496724240619e-05, |
| "loss": 0.2792, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.3759036144578314, |
| "grad_norm": 0.1792405548472351, |
| "learning_rate": 1.1524717093508041e-05, |
| "loss": 0.2688, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.3775100401606424, |
| "grad_norm": 0.19477960567033956, |
| "learning_rate": 1.1494937462775462e-05, |
| "loss": 0.2808, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.379116465863454, |
| "grad_norm": 0.19812997269342178, |
| "learning_rate": 1.1465157832042884e-05, |
| "loss": 0.2983, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.380722891566265, |
| "grad_norm": 0.19755348401845638, |
| "learning_rate": 1.1435378201310304e-05, |
| "loss": 0.2949, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.3823293172690763, |
| "grad_norm": 0.19900905996115784, |
| "learning_rate": 1.1405598570577727e-05, |
| "loss": 0.2938, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.3839357429718877, |
| "grad_norm": 0.19677337928790387, |
| "learning_rate": 1.1375818939845147e-05, |
| "loss": 0.2967, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.3855421686746987, |
| "grad_norm": 0.19364024075681263, |
| "learning_rate": 1.1346039309112568e-05, |
| "loss": 0.2815, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.38714859437751, |
| "grad_norm": 0.19161486254413096, |
| "learning_rate": 1.1316259678379988e-05, |
| "loss": 0.2808, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.388755020080321, |
| "grad_norm": 0.20890719417104178, |
| "learning_rate": 1.128648004764741e-05, |
| "loss": 0.2819, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.3903614457831326, |
| "grad_norm": 0.18146426376706648, |
| "learning_rate": 1.1256700416914831e-05, |
| "loss": 0.2693, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.3919678714859436, |
| "grad_norm": 0.1921463250218022, |
| "learning_rate": 1.1226920786182252e-05, |
| "loss": 0.2933, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.393574297188755, |
| "grad_norm": 0.18715864298714197, |
| "learning_rate": 1.1197141155449672e-05, |
| "loss": 0.2667, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.395180722891566, |
| "grad_norm": 0.18764049048200124, |
| "learning_rate": 1.1167361524717094e-05, |
| "loss": 0.2841, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.3967871485943775, |
| "grad_norm": 0.18633386696731774, |
| "learning_rate": 1.1137581893984515e-05, |
| "loss": 0.283, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.398393574297189, |
| "grad_norm": 0.18102587136896592, |
| "learning_rate": 1.1107802263251936e-05, |
| "loss": 0.2775, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.18499734311638247, |
| "learning_rate": 1.1078022632519356e-05, |
| "loss": 0.2815, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.4016064257028114, |
| "grad_norm": 0.18726230604101202, |
| "learning_rate": 1.1048243001786778e-05, |
| "loss": 0.2755, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.4032128514056224, |
| "grad_norm": 0.19396755026779727, |
| "learning_rate": 1.1018463371054199e-05, |
| "loss": 0.2746, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.404819277108434, |
| "grad_norm": 0.19963469527589364, |
| "learning_rate": 1.098868374032162e-05, |
| "loss": 0.2784, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.406425702811245, |
| "grad_norm": 0.19452591890113338, |
| "learning_rate": 1.0958904109589042e-05, |
| "loss": 0.278, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.4080321285140562, |
| "grad_norm": 0.18801090615789337, |
| "learning_rate": 1.0929124478856462e-05, |
| "loss": 0.2758, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.4096385542168672, |
| "grad_norm": 0.20654038553918738, |
| "learning_rate": 1.0899344848123885e-05, |
| "loss": 0.2949, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.4112449799196787, |
| "grad_norm": 0.18834258722356456, |
| "learning_rate": 1.0869565217391305e-05, |
| "loss": 0.271, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.41285140562249, |
| "grad_norm": 0.19237206886647512, |
| "learning_rate": 1.0839785586658726e-05, |
| "loss": 0.2702, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.414457831325301, |
| "grad_norm": 0.18993808278326732, |
| "learning_rate": 1.0810005955926148e-05, |
| "loss": 0.2784, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.4160642570281126, |
| "grad_norm": 0.20480903011469814, |
| "learning_rate": 1.0780226325193568e-05, |
| "loss": 0.2995, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.4176706827309236, |
| "grad_norm": 0.208136543604581, |
| "learning_rate": 1.0750446694460989e-05, |
| "loss": 0.3076, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.419277108433735, |
| "grad_norm": 0.18244357017251567, |
| "learning_rate": 1.0720667063728411e-05, |
| "loss": 0.2682, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.420883534136546, |
| "grad_norm": 0.16971261958781741, |
| "learning_rate": 1.0690887432995832e-05, |
| "loss": 0.2579, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.4224899598393574, |
| "grad_norm": 0.18274700243771452, |
| "learning_rate": 1.0661107802263252e-05, |
| "loss": 0.2746, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.4240963855421684, |
| "grad_norm": 0.19118813600451662, |
| "learning_rate": 1.0631328171530673e-05, |
| "loss": 0.2797, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.42570281124498, |
| "grad_norm": 0.18303626521117353, |
| "learning_rate": 1.0601548540798095e-05, |
| "loss": 0.2896, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.4273092369477913, |
| "grad_norm": 0.18305566120895966, |
| "learning_rate": 1.0571768910065516e-05, |
| "loss": 0.2675, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.4289156626506023, |
| "grad_norm": 0.1893884338871087, |
| "learning_rate": 1.0541989279332936e-05, |
| "loss": 0.2714, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.4305220883534138, |
| "grad_norm": 0.18491595866432944, |
| "learning_rate": 1.0512209648600357e-05, |
| "loss": 0.2917, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.4321285140562248, |
| "grad_norm": 0.18918777323863992, |
| "learning_rate": 1.0482430017867779e-05, |
| "loss": 0.2888, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.433734939759036, |
| "grad_norm": 0.1850411963758118, |
| "learning_rate": 1.04526503871352e-05, |
| "loss": 0.286, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.435341365461847, |
| "grad_norm": 0.1848750327219421, |
| "learning_rate": 1.042287075640262e-05, |
| "loss": 0.274, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.4369477911646586, |
| "grad_norm": 0.2004762828981936, |
| "learning_rate": 1.0393091125670042e-05, |
| "loss": 0.2819, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.4385542168674696, |
| "grad_norm": 0.18728371181745848, |
| "learning_rate": 1.0363311494937463e-05, |
| "loss": 0.2845, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.440160642570281, |
| "grad_norm": 0.18628971274859396, |
| "learning_rate": 1.0333531864204885e-05, |
| "loss": 0.291, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.4417670682730925, |
| "grad_norm": 0.1897329118843444, |
| "learning_rate": 1.0303752233472306e-05, |
| "loss": 0.3044, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.4433734939759035, |
| "grad_norm": 0.18472072022984715, |
| "learning_rate": 1.0273972602739726e-05, |
| "loss": 0.2896, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.444979919678715, |
| "grad_norm": 0.1956552598842094, |
| "learning_rate": 1.0244192972007149e-05, |
| "loss": 0.2855, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.446586345381526, |
| "grad_norm": 0.1958651207777498, |
| "learning_rate": 1.0214413341274569e-05, |
| "loss": 0.2951, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.4481927710843374, |
| "grad_norm": 0.19318712685154302, |
| "learning_rate": 1.018463371054199e-05, |
| "loss": 0.281, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.4497991967871484, |
| "grad_norm": 0.17614917193183735, |
| "learning_rate": 1.015485407980941e-05, |
| "loss": 0.268, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.45140562248996, |
| "grad_norm": 0.1955853333253292, |
| "learning_rate": 1.0125074449076832e-05, |
| "loss": 0.2775, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.453012048192771, |
| "grad_norm": 0.19608910484702638, |
| "learning_rate": 1.0095294818344253e-05, |
| "loss": 0.2811, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.4546184738955823, |
| "grad_norm": 0.20332125720916808, |
| "learning_rate": 1.0065515187611674e-05, |
| "loss": 0.2936, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.4562248995983937, |
| "grad_norm": 0.19947230185866924, |
| "learning_rate": 1.0035735556879094e-05, |
| "loss": 0.2694, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.4578313253012047, |
| "grad_norm": 0.1879744700611198, |
| "learning_rate": 1.0005955926146516e-05, |
| "loss": 0.2822, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.459437751004016, |
| "grad_norm": 0.19580220039910437, |
| "learning_rate": 9.976176295413937e-06, |
| "loss": 0.2879, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.461044176706827, |
| "grad_norm": 0.19902210658108496, |
| "learning_rate": 9.946396664681357e-06, |
| "loss": 0.2744, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.4626506024096386, |
| "grad_norm": 0.19067665389503385, |
| "learning_rate": 9.91661703394878e-06, |
| "loss": 0.2764, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.4642570281124496, |
| "grad_norm": 0.1872003159605713, |
| "learning_rate": 9.8868374032162e-06, |
| "loss": 0.2758, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.465863453815261, |
| "grad_norm": 0.18018037288858052, |
| "learning_rate": 9.85705777248362e-06, |
| "loss": 0.2716, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.467469879518072, |
| "grad_norm": 0.20592796772981525, |
| "learning_rate": 9.827278141751043e-06, |
| "loss": 0.2901, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.4690763052208835, |
| "grad_norm": 0.19330384391872885, |
| "learning_rate": 9.797498511018464e-06, |
| "loss": 0.2878, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.470682730923695, |
| "grad_norm": 0.18564767070391436, |
| "learning_rate": 9.767718880285886e-06, |
| "loss": 0.2743, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.472289156626506, |
| "grad_norm": 0.1948652825643195, |
| "learning_rate": 9.737939249553306e-06, |
| "loss": 0.2895, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.4738955823293174, |
| "grad_norm": 0.19144249683042172, |
| "learning_rate": 9.708159618820727e-06, |
| "loss": 0.2806, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.4755020080321284, |
| "grad_norm": 0.2029879781027749, |
| "learning_rate": 9.67837998808815e-06, |
| "loss": 0.2903, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.47710843373494, |
| "grad_norm": 0.18251041716955976, |
| "learning_rate": 9.64860035735557e-06, |
| "loss": 0.2733, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.478714859437751, |
| "grad_norm": 0.18821726631570496, |
| "learning_rate": 9.61882072662299e-06, |
| "loss": 0.2811, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.4803212851405623, |
| "grad_norm": 0.1890188257297131, |
| "learning_rate": 9.589041095890411e-06, |
| "loss": 0.2848, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.4819277108433733, |
| "grad_norm": 0.18495983065658275, |
| "learning_rate": 9.559261465157833e-06, |
| "loss": 0.2776, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.4835341365461847, |
| "grad_norm": 0.18760223528588468, |
| "learning_rate": 9.529481834425254e-06, |
| "loss": 0.288, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.485140562248996, |
| "grad_norm": 0.18121608702361014, |
| "learning_rate": 9.499702203692674e-06, |
| "loss": 0.279, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.486746987951807, |
| "grad_norm": 0.1851818964587047, |
| "learning_rate": 9.469922572960095e-06, |
| "loss": 0.2834, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.4883534136546186, |
| "grad_norm": 0.20877119041333475, |
| "learning_rate": 9.440142942227517e-06, |
| "loss": 0.2799, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.4899598393574296, |
| "grad_norm": 0.18641603386738315, |
| "learning_rate": 9.410363311494938e-06, |
| "loss": 0.2857, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.491566265060241, |
| "grad_norm": 0.17930127671625415, |
| "learning_rate": 9.380583680762358e-06, |
| "loss": 0.2809, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.493172690763052, |
| "grad_norm": 0.19095375958840477, |
| "learning_rate": 9.350804050029779e-06, |
| "loss": 0.2892, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.4947791164658635, |
| "grad_norm": 0.2208696008778948, |
| "learning_rate": 9.321024419297201e-06, |
| "loss": 0.29, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.4963855421686745, |
| "grad_norm": 0.17527580643972474, |
| "learning_rate": 9.291244788564621e-06, |
| "loss": 0.267, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.497991967871486, |
| "grad_norm": 0.17976690381454716, |
| "learning_rate": 9.261465157832044e-06, |
| "loss": 0.2735, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.4995983935742974, |
| "grad_norm": 0.20219422951098998, |
| "learning_rate": 9.231685527099464e-06, |
| "loss": 0.2809, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.5012048192771084, |
| "grad_norm": 0.18319632216230614, |
| "learning_rate": 9.201905896366887e-06, |
| "loss": 0.2847, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.50281124497992, |
| "grad_norm": 0.19027327482072806, |
| "learning_rate": 9.172126265634307e-06, |
| "loss": 0.2798, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.504417670682731, |
| "grad_norm": 0.17500345115805288, |
| "learning_rate": 9.142346634901728e-06, |
| "loss": 0.2822, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.5060240963855422, |
| "grad_norm": 0.19553868356390833, |
| "learning_rate": 9.11256700416915e-06, |
| "loss": 0.2881, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.5076305220883537, |
| "grad_norm": 0.1982913757337272, |
| "learning_rate": 9.08278737343657e-06, |
| "loss": 0.2689, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.5092369477911647, |
| "grad_norm": 0.17241622331403286, |
| "learning_rate": 9.053007742703991e-06, |
| "loss": 0.265, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.5108433734939757, |
| "grad_norm": 0.20400008665717287, |
| "learning_rate": 9.023228111971412e-06, |
| "loss": 0.2861, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.512449799196787, |
| "grad_norm": 0.18947077833730427, |
| "learning_rate": 8.993448481238834e-06, |
| "loss": 0.29, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.5140562248995986, |
| "grad_norm": 0.1931313038797478, |
| "learning_rate": 8.963668850506254e-06, |
| "loss": 0.2825, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.5156626506024096, |
| "grad_norm": 0.1924361737065017, |
| "learning_rate": 8.933889219773675e-06, |
| "loss": 0.2914, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.517269076305221, |
| "grad_norm": 0.20161485896708153, |
| "learning_rate": 8.904109589041095e-06, |
| "loss": 0.2844, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.518875502008032, |
| "grad_norm": 0.1938947469685224, |
| "learning_rate": 8.874329958308518e-06, |
| "loss": 0.2809, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.5204819277108435, |
| "grad_norm": 0.1981657020185443, |
| "learning_rate": 8.844550327575938e-06, |
| "loss": 0.2776, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.522088353413655, |
| "grad_norm": 0.1852544079685128, |
| "learning_rate": 8.814770696843359e-06, |
| "loss": 0.283, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.523694779116466, |
| "grad_norm": 0.18257009414972816, |
| "learning_rate": 8.78499106611078e-06, |
| "loss": 0.2777, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.525301204819277, |
| "grad_norm": 0.20960854561551556, |
| "learning_rate": 8.755211435378202e-06, |
| "loss": 0.2774, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.5269076305220883, |
| "grad_norm": 0.21428861794421236, |
| "learning_rate": 8.725431804645622e-06, |
| "loss": 0.2798, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.5285140562248998, |
| "grad_norm": 0.2148320921752406, |
| "learning_rate": 8.695652173913044e-06, |
| "loss": 0.2922, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.5301204819277108, |
| "grad_norm": 0.18357213594521285, |
| "learning_rate": 8.665872543180465e-06, |
| "loss": 0.2868, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.531726907630522, |
| "grad_norm": 0.19535424046278613, |
| "learning_rate": 8.636092912447887e-06, |
| "loss": 0.2743, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.533333333333333, |
| "grad_norm": 0.20462130886223842, |
| "learning_rate": 8.606313281715308e-06, |
| "loss": 0.2815, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.5349397590361447, |
| "grad_norm": 0.19449775734044208, |
| "learning_rate": 8.576533650982728e-06, |
| "loss": 0.2885, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.536546184738956, |
| "grad_norm": 0.19910861376895686, |
| "learning_rate": 8.546754020250149e-06, |
| "loss": 0.2866, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.538152610441767, |
| "grad_norm": 0.19417866975855358, |
| "learning_rate": 8.516974389517571e-06, |
| "loss": 0.2781, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.539759036144578, |
| "grad_norm": 0.18478437068715128, |
| "learning_rate": 8.487194758784992e-06, |
| "loss": 0.2826, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.5413654618473895, |
| "grad_norm": 0.19700676917349086, |
| "learning_rate": 8.457415128052412e-06, |
| "loss": 0.2783, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.542971887550201, |
| "grad_norm": 0.19512337491230494, |
| "learning_rate": 8.427635497319834e-06, |
| "loss": 0.2708, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.544578313253012, |
| "grad_norm": 0.1875799539925133, |
| "learning_rate": 8.397855866587255e-06, |
| "loss": 0.2773, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.5461847389558234, |
| "grad_norm": 0.18076364858709523, |
| "learning_rate": 8.368076235854676e-06, |
| "loss": 0.288, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.5477911646586344, |
| "grad_norm": 0.18428627299969846, |
| "learning_rate": 8.338296605122096e-06, |
| "loss": 0.2973, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.549397590361446, |
| "grad_norm": 0.18854751285290158, |
| "learning_rate": 8.308516974389518e-06, |
| "loss": 0.2815, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.551004016064257, |
| "grad_norm": 0.18930031139579048, |
| "learning_rate": 8.278737343656939e-06, |
| "loss": 0.2879, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.5526104417670683, |
| "grad_norm": 0.18859917440773347, |
| "learning_rate": 8.24895771292436e-06, |
| "loss": 0.282, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.5542168674698793, |
| "grad_norm": 0.18547151485453509, |
| "learning_rate": 8.21917808219178e-06, |
| "loss": 0.2921, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.5558232931726907, |
| "grad_norm": 0.18825341883711819, |
| "learning_rate": 8.189398451459202e-06, |
| "loss": 0.2694, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.557429718875502, |
| "grad_norm": 0.1804570932899742, |
| "learning_rate": 8.159618820726623e-06, |
| "loss": 0.2758, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.559036144578313, |
| "grad_norm": 0.19094162633691125, |
| "learning_rate": 8.129839189994045e-06, |
| "loss": 0.2676, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.5606425702811246, |
| "grad_norm": 0.19021680588033746, |
| "learning_rate": 8.100059559261466e-06, |
| "loss": 0.2768, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.5622489959839356, |
| "grad_norm": 0.21083832609587919, |
| "learning_rate": 8.070279928528888e-06, |
| "loss": 0.2983, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.563855421686747, |
| "grad_norm": 0.1830825332075518, |
| "learning_rate": 8.040500297796308e-06, |
| "loss": 0.2692, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.565461847389558, |
| "grad_norm": 0.18269990559890056, |
| "learning_rate": 8.010720667063729e-06, |
| "loss": 0.2925, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.5670682730923695, |
| "grad_norm": 0.19413648799393457, |
| "learning_rate": 7.98094103633115e-06, |
| "loss": 0.2847, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.5686746987951805, |
| "grad_norm": 0.19049420255904484, |
| "learning_rate": 7.951161405598572e-06, |
| "loss": 0.2784, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.570281124497992, |
| "grad_norm": 0.18580716065517364, |
| "learning_rate": 7.921381774865992e-06, |
| "loss": 0.2775, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.5718875502008034, |
| "grad_norm": 0.18486961647225106, |
| "learning_rate": 7.891602144133413e-06, |
| "loss": 0.2781, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.5734939759036144, |
| "grad_norm": 0.18132379562904336, |
| "learning_rate": 7.861822513400833e-06, |
| "loss": 0.2792, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.575100401606426, |
| "grad_norm": 0.18415948796334825, |
| "learning_rate": 7.832042882668256e-06, |
| "loss": 0.2769, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.576706827309237, |
| "grad_norm": 0.19136470915187226, |
| "learning_rate": 7.802263251935676e-06, |
| "loss": 0.2783, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.5783132530120483, |
| "grad_norm": 0.20649143995929636, |
| "learning_rate": 7.772483621203097e-06, |
| "loss": 0.2873, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.5799196787148593, |
| "grad_norm": 0.19068669901482754, |
| "learning_rate": 7.742703990470517e-06, |
| "loss": 0.2809, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.5815261044176707, |
| "grad_norm": 0.18357700581539366, |
| "learning_rate": 7.71292435973794e-06, |
| "loss": 0.2902, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.5831325301204817, |
| "grad_norm": 0.18669030669393102, |
| "learning_rate": 7.68314472900536e-06, |
| "loss": 0.292, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.584738955823293, |
| "grad_norm": 0.22347819315091055, |
| "learning_rate": 7.65336509827278e-06, |
| "loss": 0.2946, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.5863453815261046, |
| "grad_norm": 0.18607762756921192, |
| "learning_rate": 7.623585467540204e-06, |
| "loss": 0.2705, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.5879518072289156, |
| "grad_norm": 0.1958821500543021, |
| "learning_rate": 7.593805836807624e-06, |
| "loss": 0.2966, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.589558232931727, |
| "grad_norm": 0.1696731750041806, |
| "learning_rate": 7.564026206075045e-06, |
| "loss": 0.2552, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.591164658634538, |
| "grad_norm": 0.18225337475718914, |
| "learning_rate": 7.5342465753424655e-06, |
| "loss": 0.2722, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.5927710843373495, |
| "grad_norm": 0.19685522891076973, |
| "learning_rate": 7.504466944609888e-06, |
| "loss": 0.2832, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.5943775100401605, |
| "grad_norm": 0.17949581001712991, |
| "learning_rate": 7.474687313877308e-06, |
| "loss": 0.2733, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.595983935742972, |
| "grad_norm": 0.18660771363916165, |
| "learning_rate": 7.44490768314473e-06, |
| "loss": 0.2745, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.597590361445783, |
| "grad_norm": 0.18638889562090943, |
| "learning_rate": 7.41512805241215e-06, |
| "loss": 0.2689, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.5991967871485944, |
| "grad_norm": 0.18704008147122786, |
| "learning_rate": 7.3853484216795725e-06, |
| "loss": 0.2759, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.600803212851406, |
| "grad_norm": 0.18443469365017764, |
| "learning_rate": 7.355568790946993e-06, |
| "loss": 0.2658, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.602409638554217, |
| "grad_norm": 0.19062879189349496, |
| "learning_rate": 7.3257891602144136e-06, |
| "loss": 0.2788, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.6040160642570283, |
| "grad_norm": 0.1899150420248041, |
| "learning_rate": 7.296009529481834e-06, |
| "loss": 0.278, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.6056224899598392, |
| "grad_norm": 0.1949125724322077, |
| "learning_rate": 7.266229898749256e-06, |
| "loss": 0.2899, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.6072289156626507, |
| "grad_norm": 0.1699098074378404, |
| "learning_rate": 7.236450268016677e-06, |
| "loss": 0.2794, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.6088353413654617, |
| "grad_norm": 0.1915104653537304, |
| "learning_rate": 7.2066706372840975e-06, |
| "loss": 0.2787, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.610441767068273, |
| "grad_norm": 0.197531787181883, |
| "learning_rate": 7.176891006551519e-06, |
| "loss": 0.277, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.612048192771084, |
| "grad_norm": 0.1791632253109545, |
| "learning_rate": 7.14711137581894e-06, |
| "loss": 0.284, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.6136546184738956, |
| "grad_norm": 0.1882752270203241, |
| "learning_rate": 7.117331745086362e-06, |
| "loss": 0.2852, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.615261044176707, |
| "grad_norm": 0.17840672102386784, |
| "learning_rate": 7.087552114353782e-06, |
| "loss": 0.2613, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.616867469879518, |
| "grad_norm": 0.18391588960980307, |
| "learning_rate": 7.057772483621203e-06, |
| "loss": 0.2797, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.6184738955823295, |
| "grad_norm": 0.18819826058200362, |
| "learning_rate": 7.027992852888625e-06, |
| "loss": 0.2874, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.6200803212851405, |
| "grad_norm": 0.18523122078571658, |
| "learning_rate": 6.9982132221560456e-06, |
| "loss": 0.282, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.621686746987952, |
| "grad_norm": 0.17922978175589946, |
| "learning_rate": 6.968433591423466e-06, |
| "loss": 0.2735, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.623293172690763, |
| "grad_norm": 0.18372374618188894, |
| "learning_rate": 6.938653960690887e-06, |
| "loss": 0.2808, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.6248995983935743, |
| "grad_norm": 0.1990798931781952, |
| "learning_rate": 6.908874329958309e-06, |
| "loss": 0.2864, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.6265060240963853, |
| "grad_norm": 0.18455806479532721, |
| "learning_rate": 6.87909469922573e-06, |
| "loss": 0.2774, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.628112449799197, |
| "grad_norm": 0.1807836176874832, |
| "learning_rate": 6.849315068493151e-06, |
| "loss": 0.2879, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.6297188755020082, |
| "grad_norm": 0.19383858435079449, |
| "learning_rate": 6.819535437760573e-06, |
| "loss": 0.2858, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.6313253012048192, |
| "grad_norm": 0.19245116237163992, |
| "learning_rate": 6.789755807027994e-06, |
| "loss": 0.2831, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.6329317269076307, |
| "grad_norm": 0.17990454268365383, |
| "learning_rate": 6.759976176295414e-06, |
| "loss": 0.2803, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.6345381526104417, |
| "grad_norm": 0.17767290721583814, |
| "learning_rate": 6.730196545562835e-06, |
| "loss": 0.281, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.636144578313253, |
| "grad_norm": 0.19991589531881362, |
| "learning_rate": 6.700416914830257e-06, |
| "loss": 0.2835, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.637751004016064, |
| "grad_norm": 0.18699080009874672, |
| "learning_rate": 6.670637284097678e-06, |
| "loss": 0.284, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.6393574297188755, |
| "grad_norm": 0.17930928117500708, |
| "learning_rate": 6.640857653365098e-06, |
| "loss": 0.2786, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.6409638554216865, |
| "grad_norm": 0.19453857949440811, |
| "learning_rate": 6.6110780226325195e-06, |
| "loss": 0.2734, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.642570281124498, |
| "grad_norm": 0.17684358944960415, |
| "learning_rate": 6.581298391899941e-06, |
| "loss": 0.2666, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.6441767068273094, |
| "grad_norm": 0.17025227550260216, |
| "learning_rate": 6.551518761167362e-06, |
| "loss": 0.2845, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.6457831325301204, |
| "grad_norm": 0.17834204727479083, |
| "learning_rate": 6.521739130434783e-06, |
| "loss": 0.2673, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.647389558232932, |
| "grad_norm": 0.18912658642714128, |
| "learning_rate": 6.4919594997022034e-06, |
| "loss": 0.2819, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.648995983935743, |
| "grad_norm": 0.1737681616774698, |
| "learning_rate": 6.462179868969626e-06, |
| "loss": 0.2811, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.6506024096385543, |
| "grad_norm": 0.17286827101324814, |
| "learning_rate": 6.432400238237046e-06, |
| "loss": 0.2758, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.6522088353413653, |
| "grad_norm": 0.19212478696833551, |
| "learning_rate": 6.402620607504467e-06, |
| "loss": 0.2969, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.6538152610441768, |
| "grad_norm": 0.17906924074942082, |
| "learning_rate": 6.372840976771887e-06, |
| "loss": 0.2749, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.6554216867469878, |
| "grad_norm": 0.18947641365558332, |
| "learning_rate": 6.34306134603931e-06, |
| "loss": 0.2822, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.657028112449799, |
| "grad_norm": 0.17356368064329025, |
| "learning_rate": 6.313281715306731e-06, |
| "loss": 0.2825, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.6586345381526106, |
| "grad_norm": 0.18057458726583847, |
| "learning_rate": 6.2835020845741515e-06, |
| "loss": 0.2825, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.6602409638554216, |
| "grad_norm": 0.1877542933350748, |
| "learning_rate": 6.253722453841572e-06, |
| "loss": 0.2914, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.661847389558233, |
| "grad_norm": 0.17969877942256923, |
| "learning_rate": 6.2239428231089935e-06, |
| "loss": 0.2829, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.663453815261044, |
| "grad_norm": 0.17870162644623386, |
| "learning_rate": 6.194163192376415e-06, |
| "loss": 0.2639, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.6650602409638555, |
| "grad_norm": 0.17385453495340514, |
| "learning_rate": 6.1643835616438354e-06, |
| "loss": 0.2822, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.17484147276070983, |
| "learning_rate": 6.134603930911257e-06, |
| "loss": 0.268, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.668273092369478, |
| "grad_norm": 0.1825438106082261, |
| "learning_rate": 6.104824300178678e-06, |
| "loss": 0.2854, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.669879518072289, |
| "grad_norm": 0.1836659740160302, |
| "learning_rate": 6.075044669446099e-06, |
| "loss": 0.2762, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.6714859437751004, |
| "grad_norm": 0.19695563144999662, |
| "learning_rate": 6.04526503871352e-06, |
| "loss": 0.2793, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.673092369477912, |
| "grad_norm": 0.1791319681791505, |
| "learning_rate": 6.015485407980942e-06, |
| "loss": 0.2773, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.674698795180723, |
| "grad_norm": 0.18117849182348353, |
| "learning_rate": 5.985705777248363e-06, |
| "loss": 0.2782, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.6763052208835343, |
| "grad_norm": 0.18394861678586363, |
| "learning_rate": 5.9559261465157836e-06, |
| "loss": 0.2724, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.6779116465863453, |
| "grad_norm": 0.17558775366985108, |
| "learning_rate": 5.926146515783205e-06, |
| "loss": 0.2763, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.6795180722891567, |
| "grad_norm": 0.1817180623536754, |
| "learning_rate": 5.8963668850506255e-06, |
| "loss": 0.2812, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.6811244979919677, |
| "grad_norm": 0.1794961501282779, |
| "learning_rate": 5.866587254318047e-06, |
| "loss": 0.2808, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.682730923694779, |
| "grad_norm": 0.17953298862907435, |
| "learning_rate": 5.8368076235854675e-06, |
| "loss": 0.2897, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.68433734939759, |
| "grad_norm": 0.1781021623495441, |
| "learning_rate": 5.807027992852889e-06, |
| "loss": 0.2774, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.6859437751004016, |
| "grad_norm": 0.19580053060440614, |
| "learning_rate": 5.777248362120309e-06, |
| "loss": 0.2828, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.687550200803213, |
| "grad_norm": 0.18017972515436526, |
| "learning_rate": 5.747468731387731e-06, |
| "loss": 0.282, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.689156626506024, |
| "grad_norm": 0.17822514747203339, |
| "learning_rate": 5.717689100655152e-06, |
| "loss": 0.2773, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.6907630522088355, |
| "grad_norm": 0.18518154759023206, |
| "learning_rate": 5.687909469922574e-06, |
| "loss": 0.283, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.6923694779116465, |
| "grad_norm": 0.19759436455167156, |
| "learning_rate": 5.658129839189994e-06, |
| "loss": 0.2893, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.693975903614458, |
| "grad_norm": 0.17831832185569604, |
| "learning_rate": 5.6283502084574156e-06, |
| "loss": 0.2788, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.695582329317269, |
| "grad_norm": 0.1822729335393522, |
| "learning_rate": 5.598570577724836e-06, |
| "loss": 0.2815, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.6971887550200804, |
| "grad_norm": 0.18637284782160773, |
| "learning_rate": 5.5687909469922575e-06, |
| "loss": 0.2754, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.6987951807228914, |
| "grad_norm": 0.21541418445055555, |
| "learning_rate": 5.539011316259678e-06, |
| "loss": 0.2699, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.700401606425703, |
| "grad_norm": 0.184036491711288, |
| "learning_rate": 5.5092316855270995e-06, |
| "loss": 0.2719, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.7020080321285143, |
| "grad_norm": 0.18589778378254496, |
| "learning_rate": 5.479452054794521e-06, |
| "loss": 0.2707, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.7036144578313253, |
| "grad_norm": 0.19434906672779567, |
| "learning_rate": 5.449672424061942e-06, |
| "loss": 0.3102, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.7052208835341367, |
| "grad_norm": 0.19696554447531195, |
| "learning_rate": 5.419892793329363e-06, |
| "loss": 0.2913, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.7068273092369477, |
| "grad_norm": 0.1872426169089665, |
| "learning_rate": 5.390113162596784e-06, |
| "loss": 0.2812, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.708433734939759, |
| "grad_norm": 0.18592737019395528, |
| "learning_rate": 5.360333531864206e-06, |
| "loss": 0.2693, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.71004016064257, |
| "grad_norm": 0.19133953951267615, |
| "learning_rate": 5.330553901131626e-06, |
| "loss": 0.2874, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.7116465863453816, |
| "grad_norm": 0.1910105265401887, |
| "learning_rate": 5.3007742703990476e-06, |
| "loss": 0.2839, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.7132530120481926, |
| "grad_norm": 0.20107523425093335, |
| "learning_rate": 5.270994639666468e-06, |
| "loss": 0.2848, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.714859437751004, |
| "grad_norm": 0.18131315076806906, |
| "learning_rate": 5.2412150089338895e-06, |
| "loss": 0.2936, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.7164658634538155, |
| "grad_norm": 0.17998179499764488, |
| "learning_rate": 5.21143537820131e-06, |
| "loss": 0.2726, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.7180722891566265, |
| "grad_norm": 0.17958592327392905, |
| "learning_rate": 5.1816557474687315e-06, |
| "loss": 0.2852, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.719678714859438, |
| "grad_norm": 0.183308753969173, |
| "learning_rate": 5.151876116736153e-06, |
| "loss": 0.2844, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.721285140562249, |
| "grad_norm": 0.19652975978403583, |
| "learning_rate": 5.122096486003574e-06, |
| "loss": 0.276, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.7228915662650603, |
| "grad_norm": 0.1862766900571352, |
| "learning_rate": 5.092316855270995e-06, |
| "loss": 0.291, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.7244979919678713, |
| "grad_norm": 0.1789975019779833, |
| "learning_rate": 5.062537224538416e-06, |
| "loss": 0.2678, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.726104417670683, |
| "grad_norm": 0.1804473631478777, |
| "learning_rate": 5.032757593805837e-06, |
| "loss": 0.2768, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.727710843373494, |
| "grad_norm": 0.17484370027170906, |
| "learning_rate": 5.002977963073258e-06, |
| "loss": 0.2745, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.7293172690763052, |
| "grad_norm": 0.18840033620754235, |
| "learning_rate": 4.973198332340679e-06, |
| "loss": 0.2935, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.7309236947791167, |
| "grad_norm": 0.1774236454463638, |
| "learning_rate": 4.9434187016081e-06, |
| "loss": 0.2861, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.7325301204819277, |
| "grad_norm": 0.174204170118579, |
| "learning_rate": 4.9136390708755215e-06, |
| "loss": 0.2874, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.734136546184739, |
| "grad_norm": 0.18746916237309122, |
| "learning_rate": 4.883859440142943e-06, |
| "loss": 0.2988, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.73574297188755, |
| "grad_norm": 0.18760654275484376, |
| "learning_rate": 4.8540798094103635e-06, |
| "loss": 0.3052, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.7373493975903616, |
| "grad_norm": 0.18517560114201603, |
| "learning_rate": 4.824300178677785e-06, |
| "loss": 0.2854, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.7389558232931726, |
| "grad_norm": 0.1876520550357728, |
| "learning_rate": 4.7945205479452054e-06, |
| "loss": 0.2932, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.740562248995984, |
| "grad_norm": 0.17966388420192841, |
| "learning_rate": 4.764740917212627e-06, |
| "loss": 0.28, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.742168674698795, |
| "grad_norm": 0.179020115591498, |
| "learning_rate": 4.734961286480047e-06, |
| "loss": 0.2864, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.7437751004016064, |
| "grad_norm": 0.1817568004265401, |
| "learning_rate": 4.705181655747469e-06, |
| "loss": 0.2923, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.745381526104418, |
| "grad_norm": 0.177238610054722, |
| "learning_rate": 4.675402025014889e-06, |
| "loss": 0.2762, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.746987951807229, |
| "grad_norm": 0.1896915852118844, |
| "learning_rate": 4.645622394282311e-06, |
| "loss": 0.2784, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.7485943775100403, |
| "grad_norm": 0.1942534377604623, |
| "learning_rate": 4.615842763549732e-06, |
| "loss": 0.2856, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.7502008032128513, |
| "grad_norm": 0.18421086652363167, |
| "learning_rate": 4.5860631328171535e-06, |
| "loss": 0.2779, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.7518072289156628, |
| "grad_norm": 0.17539043641917704, |
| "learning_rate": 4.556283502084575e-06, |
| "loss": 0.2826, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.7534136546184738, |
| "grad_norm": 0.1744055331592788, |
| "learning_rate": 4.5265038713519955e-06, |
| "loss": 0.2731, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.755020080321285, |
| "grad_norm": 0.1724485729429514, |
| "learning_rate": 4.496724240619417e-06, |
| "loss": 0.2882, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.756626506024096, |
| "grad_norm": 0.18273987004893796, |
| "learning_rate": 4.4669446098868375e-06, |
| "loss": 0.2851, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.7582329317269076, |
| "grad_norm": 0.182839997686776, |
| "learning_rate": 4.437164979154259e-06, |
| "loss": 0.2718, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.759839357429719, |
| "grad_norm": 0.18031944958924873, |
| "learning_rate": 4.407385348421679e-06, |
| "loss": 0.2852, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.76144578313253, |
| "grad_norm": 0.18304611053583184, |
| "learning_rate": 4.377605717689101e-06, |
| "loss": 0.2753, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.7630522088353415, |
| "grad_norm": 0.18653131767935718, |
| "learning_rate": 4.347826086956522e-06, |
| "loss": 0.2912, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.7646586345381525, |
| "grad_norm": 0.18280709251927277, |
| "learning_rate": 4.318046456223944e-06, |
| "loss": 0.2814, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.766265060240964, |
| "grad_norm": 0.1730903954482395, |
| "learning_rate": 4.288266825491364e-06, |
| "loss": 0.2782, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.767871485943775, |
| "grad_norm": 0.1860699548269679, |
| "learning_rate": 4.2584871947587856e-06, |
| "loss": 0.273, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.7694779116465864, |
| "grad_norm": 0.1781795487696371, |
| "learning_rate": 4.228707564026206e-06, |
| "loss": 0.2807, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.7710843373493974, |
| "grad_norm": 0.18497222231295918, |
| "learning_rate": 4.1989279332936275e-06, |
| "loss": 0.2904, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.772690763052209, |
| "grad_norm": 0.20870406694142798, |
| "learning_rate": 4.169148302561048e-06, |
| "loss": 0.2924, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.7742971887550203, |
| "grad_norm": 0.18032970445124402, |
| "learning_rate": 4.1393686718284695e-06, |
| "loss": 0.2752, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.7759036144578313, |
| "grad_norm": 0.17508172159384913, |
| "learning_rate": 4.10958904109589e-06, |
| "loss": 0.2726, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.7775100401606427, |
| "grad_norm": 0.17555459903201376, |
| "learning_rate": 4.079809410363311e-06, |
| "loss": 0.2855, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.7791164658634537, |
| "grad_norm": 0.17205832766295667, |
| "learning_rate": 4.050029779630733e-06, |
| "loss": 0.2657, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.780722891566265, |
| "grad_norm": 0.1829609417989265, |
| "learning_rate": 4.020250148898154e-06, |
| "loss": 0.2855, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.782329317269076, |
| "grad_norm": 0.17878692408711844, |
| "learning_rate": 3.990470518165575e-06, |
| "loss": 0.2793, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.7839357429718876, |
| "grad_norm": 0.19508318768702912, |
| "learning_rate": 3.960690887432996e-06, |
| "loss": 0.2842, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.7855421686746986, |
| "grad_norm": 0.17987705205522847, |
| "learning_rate": 3.930911256700417e-06, |
| "loss": 0.2763, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.78714859437751, |
| "grad_norm": 0.17185264560334, |
| "learning_rate": 3.901131625967838e-06, |
| "loss": 0.2793, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.7887550200803215, |
| "grad_norm": 0.1798915631146029, |
| "learning_rate": 3.871351995235259e-06, |
| "loss": 0.2823, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.7903614457831325, |
| "grad_norm": 0.17936783365240172, |
| "learning_rate": 3.84157236450268e-06, |
| "loss": 0.2923, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.7919678714859435, |
| "grad_norm": 0.17881359287676532, |
| "learning_rate": 3.811792733770102e-06, |
| "loss": 0.283, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.793574297188755, |
| "grad_norm": 0.17750412281273836, |
| "learning_rate": 3.7820131030375225e-06, |
| "loss": 0.2774, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.7951807228915664, |
| "grad_norm": 0.17978912748152973, |
| "learning_rate": 3.752233472304944e-06, |
| "loss": 0.3089, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.7967871485943774, |
| "grad_norm": 0.1826655687491039, |
| "learning_rate": 3.722453841572365e-06, |
| "loss": 0.2709, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.798393574297189, |
| "grad_norm": 0.18001177870589455, |
| "learning_rate": 3.6926742108397862e-06, |
| "loss": 0.2737, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.17713047219916325, |
| "learning_rate": 3.6628945801072068e-06, |
| "loss": 0.2752, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.8016064257028113, |
| "grad_norm": 0.18298092591553738, |
| "learning_rate": 3.633114949374628e-06, |
| "loss": 0.2873, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.8032128514056227, |
| "grad_norm": 0.17757847821734857, |
| "learning_rate": 3.6033353186420487e-06, |
| "loss": 0.2846, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.8048192771084337, |
| "grad_norm": 0.18958642415753324, |
| "learning_rate": 3.57355568790947e-06, |
| "loss": 0.2926, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.8064257028112447, |
| "grad_norm": 0.18146180673271486, |
| "learning_rate": 3.543776057176891e-06, |
| "loss": 0.2791, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.808032128514056, |
| "grad_norm": 0.17453241970228875, |
| "learning_rate": 3.5139964264443125e-06, |
| "loss": 0.2724, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.8096385542168676, |
| "grad_norm": 0.17966834865117448, |
| "learning_rate": 3.484216795711733e-06, |
| "loss": 0.2787, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.8112449799196786, |
| "grad_norm": 0.1784681684891437, |
| "learning_rate": 3.4544371649791545e-06, |
| "loss": 0.2764, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.81285140562249, |
| "grad_norm": 0.18572317206096217, |
| "learning_rate": 3.4246575342465754e-06, |
| "loss": 0.2709, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.814457831325301, |
| "grad_norm": 0.1759488290957744, |
| "learning_rate": 3.394877903513997e-06, |
| "loss": 0.2815, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.8160642570281125, |
| "grad_norm": 0.19336096846491638, |
| "learning_rate": 3.3650982727814174e-06, |
| "loss": 0.2744, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.817670682730924, |
| "grad_norm": 0.18283745315959676, |
| "learning_rate": 3.335318642048839e-06, |
| "loss": 0.2921, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.819277108433735, |
| "grad_norm": 0.17472330034447986, |
| "learning_rate": 3.3055390113162598e-06, |
| "loss": 0.2756, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.820883534136546, |
| "grad_norm": 0.1800313759664309, |
| "learning_rate": 3.275759380583681e-06, |
| "loss": 0.2812, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.8224899598393574, |
| "grad_norm": 0.1756785630336977, |
| "learning_rate": 3.2459797498511017e-06, |
| "loss": 0.2587, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.824096385542169, |
| "grad_norm": 0.1932923197044465, |
| "learning_rate": 3.216200119118523e-06, |
| "loss": 0.2707, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.82570281124498, |
| "grad_norm": 0.1840245526187382, |
| "learning_rate": 3.1864204883859437e-06, |
| "loss": 0.2837, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.8273092369477912, |
| "grad_norm": 0.1766729183541744, |
| "learning_rate": 3.1566408576533655e-06, |
| "loss": 0.2845, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.8289156626506022, |
| "grad_norm": 0.1841631130200818, |
| "learning_rate": 3.126861226920786e-06, |
| "loss": 0.2809, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.8305220883534137, |
| "grad_norm": 0.17038920533561577, |
| "learning_rate": 3.0970815961882074e-06, |
| "loss": 0.2872, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.832128514056225, |
| "grad_norm": 0.18490513608282008, |
| "learning_rate": 3.0673019654556284e-06, |
| "loss": 0.2827, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.833734939759036, |
| "grad_norm": 0.1793068608455851, |
| "learning_rate": 3.0375223347230494e-06, |
| "loss": 0.2915, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.835341365461847, |
| "grad_norm": 0.18910291135979634, |
| "learning_rate": 3.007742703990471e-06, |
| "loss": 0.2954, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.8369477911646586, |
| "grad_norm": 0.1794649230471941, |
| "learning_rate": 2.9779630732578918e-06, |
| "loss": 0.282, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.83855421686747, |
| "grad_norm": 0.17996525722863402, |
| "learning_rate": 2.9481834425253128e-06, |
| "loss": 0.2905, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.840160642570281, |
| "grad_norm": 0.16999154285816043, |
| "learning_rate": 2.9184038117927337e-06, |
| "loss": 0.2645, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.8417670682730924, |
| "grad_norm": 0.1739829332617308, |
| "learning_rate": 2.8886241810601547e-06, |
| "loss": 0.2778, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.8433734939759034, |
| "grad_norm": 0.1862630294313089, |
| "learning_rate": 2.858844550327576e-06, |
| "loss": 0.2734, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.844979919678715, |
| "grad_norm": 0.1833390723721438, |
| "learning_rate": 2.829064919594997e-06, |
| "loss": 0.3016, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.8465863453815263, |
| "grad_norm": 0.17222764961420253, |
| "learning_rate": 2.799285288862418e-06, |
| "loss": 0.2616, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.8481927710843373, |
| "grad_norm": 0.17723986699960997, |
| "learning_rate": 2.769505658129839e-06, |
| "loss": 0.2816, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.8497991967871483, |
| "grad_norm": 0.17550492152209196, |
| "learning_rate": 2.7397260273972604e-06, |
| "loss": 0.2999, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.8514056224899598, |
| "grad_norm": 0.17567847530277153, |
| "learning_rate": 2.7099463966646814e-06, |
| "loss": 0.2903, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.853012048192771, |
| "grad_norm": 0.1825234418627062, |
| "learning_rate": 2.680166765932103e-06, |
| "loss": 0.2873, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.854618473895582, |
| "grad_norm": 0.17878324566094148, |
| "learning_rate": 2.6503871351995238e-06, |
| "loss": 0.2885, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.8562248995983937, |
| "grad_norm": 0.18249488651514392, |
| "learning_rate": 2.6206075044669448e-06, |
| "loss": 0.2871, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.8578313253012047, |
| "grad_norm": 0.17760052228984852, |
| "learning_rate": 2.5908278737343657e-06, |
| "loss": 0.2717, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.859437751004016, |
| "grad_norm": 0.18808062189817043, |
| "learning_rate": 2.561048243001787e-06, |
| "loss": 0.2876, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.8610441767068275, |
| "grad_norm": 0.1823780795570633, |
| "learning_rate": 2.531268612269208e-06, |
| "loss": 0.2867, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.8626506024096385, |
| "grad_norm": 0.1885706803388553, |
| "learning_rate": 2.501488981536629e-06, |
| "loss": 0.2897, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.8642570281124495, |
| "grad_norm": 0.17633385450538436, |
| "learning_rate": 2.47170935080405e-06, |
| "loss": 0.2786, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.865863453815261, |
| "grad_norm": 0.18185187408978562, |
| "learning_rate": 2.4419297200714715e-06, |
| "loss": 0.2918, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.8674698795180724, |
| "grad_norm": 0.18061017522143138, |
| "learning_rate": 2.4121500893388924e-06, |
| "loss": 0.282, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.8690763052208834, |
| "grad_norm": 0.1703671192800184, |
| "learning_rate": 2.3823704586063134e-06, |
| "loss": 0.2802, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.870682730923695, |
| "grad_norm": 0.18089410922349705, |
| "learning_rate": 2.3525908278737344e-06, |
| "loss": 0.2762, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.872289156626506, |
| "grad_norm": 0.18344826515624643, |
| "learning_rate": 2.3228111971411554e-06, |
| "loss": 0.2847, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.8738955823293173, |
| "grad_norm": 0.18790690808141583, |
| "learning_rate": 2.2930315664085768e-06, |
| "loss": 0.2832, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.8755020080321287, |
| "grad_norm": 0.19268300685339843, |
| "learning_rate": 2.2632519356759978e-06, |
| "loss": 0.2924, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.8771084337349397, |
| "grad_norm": 0.18108361744769685, |
| "learning_rate": 2.2334723049434187e-06, |
| "loss": 0.2666, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.8787148594377507, |
| "grad_norm": 0.1964453513606187, |
| "learning_rate": 2.2036926742108397e-06, |
| "loss": 0.2834, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.880321285140562, |
| "grad_norm": 0.1788510567768747, |
| "learning_rate": 2.173913043478261e-06, |
| "loss": 0.2884, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.8819277108433736, |
| "grad_norm": 0.17295138712409047, |
| "learning_rate": 2.144133412745682e-06, |
| "loss": 0.2711, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.8835341365461846, |
| "grad_norm": 0.17567031763593471, |
| "learning_rate": 2.114353782013103e-06, |
| "loss": 0.2717, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.885140562248996, |
| "grad_norm": 0.16678222696781173, |
| "learning_rate": 2.084574151280524e-06, |
| "loss": 0.2706, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.886746987951807, |
| "grad_norm": 0.17470142569680275, |
| "learning_rate": 2.054794520547945e-06, |
| "loss": 0.2843, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.8883534136546185, |
| "grad_norm": 0.1829261532971885, |
| "learning_rate": 2.0250148898153664e-06, |
| "loss": 0.2782, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.88995983935743, |
| "grad_norm": 0.1722757025350079, |
| "learning_rate": 1.9952352590827874e-06, |
| "loss": 0.2774, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.891566265060241, |
| "grad_norm": 0.18349308042009962, |
| "learning_rate": 1.9654556283502084e-06, |
| "loss": 0.2832, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.893172690763052, |
| "grad_norm": 0.1754837052446264, |
| "learning_rate": 1.9356759976176293e-06, |
| "loss": 0.2822, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.8947791164658634, |
| "grad_norm": 0.1792463120774883, |
| "learning_rate": 1.905896366885051e-06, |
| "loss": 0.295, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.896385542168675, |
| "grad_norm": 0.17685710971676047, |
| "learning_rate": 1.876116736152472e-06, |
| "loss": 0.2888, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.897991967871486, |
| "grad_norm": 0.17535217611636308, |
| "learning_rate": 1.8463371054198931e-06, |
| "loss": 0.2783, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.8995983935742973, |
| "grad_norm": 0.18039479385992338, |
| "learning_rate": 1.816557474687314e-06, |
| "loss": 0.272, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.9012048192771083, |
| "grad_norm": 0.17519948862975526, |
| "learning_rate": 1.786777843954735e-06, |
| "loss": 0.2683, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.9028112449799197, |
| "grad_norm": 0.17420513363314138, |
| "learning_rate": 1.7569982132221563e-06, |
| "loss": 0.2727, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.904417670682731, |
| "grad_norm": 0.1744433206516152, |
| "learning_rate": 1.7272185824895772e-06, |
| "loss": 0.2783, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.906024096385542, |
| "grad_norm": 0.1680639127482644, |
| "learning_rate": 1.6974389517569984e-06, |
| "loss": 0.2862, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.907630522088353, |
| "grad_norm": 0.17920150463037057, |
| "learning_rate": 1.6676593210244194e-06, |
| "loss": 0.2869, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.9092369477911646, |
| "grad_norm": 0.17136726709539002, |
| "learning_rate": 1.6378796902918406e-06, |
| "loss": 0.2847, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.910843373493976, |
| "grad_norm": 0.17658479204211133, |
| "learning_rate": 1.6081000595592616e-06, |
| "loss": 0.2788, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.912449799196787, |
| "grad_norm": 0.17962465801415045, |
| "learning_rate": 1.5783204288266827e-06, |
| "loss": 0.277, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.9140562248995985, |
| "grad_norm": 0.17585752402402058, |
| "learning_rate": 1.5485407980941037e-06, |
| "loss": 0.2822, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.9156626506024095, |
| "grad_norm": 0.20014161600135602, |
| "learning_rate": 1.5187611673615247e-06, |
| "loss": 0.27, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.917269076305221, |
| "grad_norm": 0.18031244115450326, |
| "learning_rate": 1.4889815366289459e-06, |
| "loss": 0.2776, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.9188755020080324, |
| "grad_norm": 0.17203231141150208, |
| "learning_rate": 1.4592019058963669e-06, |
| "loss": 0.2709, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.9204819277108434, |
| "grad_norm": 0.17632883689576834, |
| "learning_rate": 1.429422275163788e-06, |
| "loss": 0.2862, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.9220883534136544, |
| "grad_norm": 0.17158335200283492, |
| "learning_rate": 1.399642644431209e-06, |
| "loss": 0.2805, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.923694779116466, |
| "grad_norm": 0.1727754387032493, |
| "learning_rate": 1.3698630136986302e-06, |
| "loss": 0.2862, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.9253012048192772, |
| "grad_norm": 0.17308901484014633, |
| "learning_rate": 1.3400833829660514e-06, |
| "loss": 0.2757, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.9269076305220882, |
| "grad_norm": 0.17421930479432718, |
| "learning_rate": 1.3103037522334724e-06, |
| "loss": 0.2834, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.9285140562248997, |
| "grad_norm": 0.17882219625729937, |
| "learning_rate": 1.2805241215008936e-06, |
| "loss": 0.2643, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.9301204819277107, |
| "grad_norm": 0.1698144392762095, |
| "learning_rate": 1.2507444907683145e-06, |
| "loss": 0.2791, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.931726907630522, |
| "grad_norm": 0.176433464798481, |
| "learning_rate": 1.2209648600357357e-06, |
| "loss": 0.2885, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.9333333333333336, |
| "grad_norm": 0.17291091781017842, |
| "learning_rate": 1.1911852293031567e-06, |
| "loss": 0.2876, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.9349397590361446, |
| "grad_norm": 0.1745399160440178, |
| "learning_rate": 1.1614055985705777e-06, |
| "loss": 0.2804, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.9365461847389556, |
| "grad_norm": 0.17469125289305582, |
| "learning_rate": 1.1316259678379989e-06, |
| "loss": 0.2886, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.938152610441767, |
| "grad_norm": 0.17188888134373598, |
| "learning_rate": 1.1018463371054199e-06, |
| "loss": 0.2747, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.9397590361445785, |
| "grad_norm": 0.17315914977583619, |
| "learning_rate": 1.072066706372841e-06, |
| "loss": 0.2718, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.9413654618473895, |
| "grad_norm": 0.18424998554829677, |
| "learning_rate": 1.042287075640262e-06, |
| "loss": 0.2927, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.942971887550201, |
| "grad_norm": 0.17405613312679283, |
| "learning_rate": 1.0125074449076832e-06, |
| "loss": 0.2801, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.944578313253012, |
| "grad_norm": 0.17392389279259265, |
| "learning_rate": 9.827278141751042e-07, |
| "loss": 0.2672, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.9461847389558233, |
| "grad_norm": 0.1750803761886037, |
| "learning_rate": 9.529481834425255e-07, |
| "loss": 0.2753, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.9477911646586348, |
| "grad_norm": 0.17516743336571347, |
| "learning_rate": 9.231685527099466e-07, |
| "loss": 0.2898, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.9493975903614458, |
| "grad_norm": 0.17815036030448045, |
| "learning_rate": 8.933889219773675e-07, |
| "loss": 0.2843, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.9510040160642568, |
| "grad_norm": 0.17553406497898083, |
| "learning_rate": 8.636092912447886e-07, |
| "loss": 0.286, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.952610441767068, |
| "grad_norm": 0.17395921166402753, |
| "learning_rate": 8.338296605122097e-07, |
| "loss": 0.28, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.9542168674698797, |
| "grad_norm": 0.18520083979603033, |
| "learning_rate": 8.040500297796308e-07, |
| "loss": 0.3018, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.9558232931726907, |
| "grad_norm": 0.17913590766346535, |
| "learning_rate": 7.742703990470519e-07, |
| "loss": 0.2882, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.957429718875502, |
| "grad_norm": 0.177731379147974, |
| "learning_rate": 7.444907683144729e-07, |
| "loss": 0.291, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.959036144578313, |
| "grad_norm": 0.17932734702480063, |
| "learning_rate": 7.14711137581894e-07, |
| "loss": 0.2796, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.9606425702811245, |
| "grad_norm": 0.17871304503420032, |
| "learning_rate": 6.849315068493151e-07, |
| "loss": 0.2838, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.962248995983936, |
| "grad_norm": 0.17164070830208178, |
| "learning_rate": 6.551518761167362e-07, |
| "loss": 0.2827, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.963855421686747, |
| "grad_norm": 0.16835947713847804, |
| "learning_rate": 6.253722453841573e-07, |
| "loss": 0.2802, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.965461847389558, |
| "grad_norm": 0.1693631561465372, |
| "learning_rate": 5.955926146515784e-07, |
| "loss": 0.2768, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.9670682730923694, |
| "grad_norm": 0.17243093099494994, |
| "learning_rate": 5.658129839189994e-07, |
| "loss": 0.2757, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.968674698795181, |
| "grad_norm": 0.17605861774049406, |
| "learning_rate": 5.360333531864205e-07, |
| "loss": 0.2673, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.970281124497992, |
| "grad_norm": 0.1726383252083538, |
| "learning_rate": 5.062537224538416e-07, |
| "loss": 0.2873, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.9718875502008033, |
| "grad_norm": 0.1726403649864526, |
| "learning_rate": 4.7647409172126274e-07, |
| "loss": 0.2686, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.9734939759036143, |
| "grad_norm": 0.1809245764248369, |
| "learning_rate": 4.4669446098868377e-07, |
| "loss": 0.2838, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.9751004016064257, |
| "grad_norm": 0.17092605908352648, |
| "learning_rate": 4.1691483025610485e-07, |
| "loss": 0.2716, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.976706827309237, |
| "grad_norm": 0.17322511760116535, |
| "learning_rate": 3.8713519952352593e-07, |
| "loss": 0.2777, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.978313253012048, |
| "grad_norm": 0.18375085828597734, |
| "learning_rate": 3.57355568790947e-07, |
| "loss": 0.2794, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.979919678714859, |
| "grad_norm": 0.179038088679026, |
| "learning_rate": 3.275759380583681e-07, |
| "loss": 0.279, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.9815261044176706, |
| "grad_norm": 0.17767349005638372, |
| "learning_rate": 2.977963073257892e-07, |
| "loss": 0.2852, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.983132530120482, |
| "grad_norm": 0.18020745119137233, |
| "learning_rate": 2.6801667659321026e-07, |
| "loss": 0.3029, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.984738955823293, |
| "grad_norm": 0.17012300307211053, |
| "learning_rate": 2.3823704586063137e-07, |
| "loss": 0.2752, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.9863453815261045, |
| "grad_norm": 0.17381821055232685, |
| "learning_rate": 2.0845741512805242e-07, |
| "loss": 0.2859, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.9879518072289155, |
| "grad_norm": 0.17160169606492132, |
| "learning_rate": 1.786777843954735e-07, |
| "loss": 0.2707, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.989558232931727, |
| "grad_norm": 0.17490271990116033, |
| "learning_rate": 1.488981536628946e-07, |
| "loss": 0.289, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.9911646586345384, |
| "grad_norm": 0.17694062193658128, |
| "learning_rate": 1.1911852293031568e-07, |
| "loss": 0.2818, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.9927710843373494, |
| "grad_norm": 0.17511488153591193, |
| "learning_rate": 8.933889219773675e-08, |
| "loss": 0.2849, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.9943775100401604, |
| "grad_norm": 0.18167443886988274, |
| "learning_rate": 5.955926146515784e-08, |
| "loss": 0.2883, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.995983935742972, |
| "grad_norm": 0.1638730430540001, |
| "learning_rate": 2.977963073257892e-08, |
| "loss": 0.271, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.995983935742972, |
| "step": 1866, |
| "total_flos": 2.074186974392379e+19, |
| "train_loss": 0.0, |
| "train_runtime": 1.4956, |
| "train_samples_per_second": 19965.005, |
| "train_steps_per_second": 1247.687 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1866, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.074186974392379e+19, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|