| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9943851768669285, |
| "eval_steps": 500, |
| "global_step": 333, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008983717012914094, |
| "grad_norm": 5.839033078230819, |
| "learning_rate": 6.655117647058823e-06, |
| "loss": 0.851, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.017967434025828188, |
| "grad_norm": 5.796609591178738, |
| "learning_rate": 1.3310235294117646e-05, |
| "loss": 0.8589, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.02695115103874228, |
| "grad_norm": 4.022069478985531, |
| "learning_rate": 1.996535294117647e-05, |
| "loss": 0.8091, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.035934868051656375, |
| "grad_norm": 5.021116871190826, |
| "learning_rate": 2.6620470588235293e-05, |
| "loss": 0.8296, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.044918585064570464, |
| "grad_norm": 6.766543105088873, |
| "learning_rate": 3.3275588235294115e-05, |
| "loss": 0.8406, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.05390230207748456, |
| "grad_norm": 3.7488525352605198, |
| "learning_rate": 3.993070588235294e-05, |
| "loss": 0.7848, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.06288601909039865, |
| "grad_norm": 3.232161490369813, |
| "learning_rate": 4.658582352941176e-05, |
| "loss": 0.7269, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.07186973610331275, |
| "grad_norm": 3.5299607181225734, |
| "learning_rate": 5.3240941176470586e-05, |
| "loss": 0.7032, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.08085345311622684, |
| "grad_norm": 2.4635657277606833, |
| "learning_rate": 5.989605882352941e-05, |
| "loss": 0.686, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.08983717012914093, |
| "grad_norm": 3.5184074662375378, |
| "learning_rate": 6.655117647058823e-05, |
| "loss": 0.6731, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.09882088714205503, |
| "grad_norm": 2.034739373521881, |
| "learning_rate": 7.320629411764706e-05, |
| "loss": 0.6436, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.10780460415496912, |
| "grad_norm": 3.484093426894694, |
| "learning_rate": 7.986141176470588e-05, |
| "loss": 0.6546, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.11678832116788321, |
| "grad_norm": 1.7458659318282235, |
| "learning_rate": 8.65165294117647e-05, |
| "loss": 0.6243, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1257720381807973, |
| "grad_norm": 2.696049020472868, |
| "learning_rate": 9.317164705882352e-05, |
| "loss": 0.6431, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.13475575519371139, |
| "grad_norm": 2.7403846964581335, |
| "learning_rate": 9.982676470588235e-05, |
| "loss": 0.6337, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.1437394722066255, |
| "grad_norm": 1.9984598625107888, |
| "learning_rate": 0.00010648188235294117, |
| "loss": 0.6219, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.1527231892195396, |
| "grad_norm": 1.8055549976045306, |
| "learning_rate": 0.000113137, |
| "loss": 0.6009, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.16170690623245368, |
| "grad_norm": 1.5503606010043973, |
| "learning_rate": 0.00011979211764705882, |
| "loss": 0.5964, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.17069062324536777, |
| "grad_norm": 2.35861768584361, |
| "learning_rate": 0.00012644723529411765, |
| "loss": 0.6148, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.17967434025828186, |
| "grad_norm": 1.5396308193214678, |
| "learning_rate": 0.00013310235294117646, |
| "loss": 0.6018, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.18865805727119594, |
| "grad_norm": 1.6143825069098845, |
| "learning_rate": 0.0001397574705882353, |
| "loss": 0.5954, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.19764177428411006, |
| "grad_norm": 1.6144901661060405, |
| "learning_rate": 0.0001464125882352941, |
| "loss": 0.6004, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.20662549129702415, |
| "grad_norm": 3.3171185005484856, |
| "learning_rate": 0.00015306770588235295, |
| "loss": 0.6111, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.21560920830993824, |
| "grad_norm": 2.2411018078854936, |
| "learning_rate": 0.00015972282352941176, |
| "loss": 0.6119, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.22459292532285233, |
| "grad_norm": 8.161797640058992, |
| "learning_rate": 0.0001663779411764706, |
| "loss": 0.593, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.23357664233576642, |
| "grad_norm": 846.7701836327712, |
| "learning_rate": 0.0001730330588235294, |
| "loss": 5.673, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2425603593486805, |
| "grad_norm": 4.629361945191611, |
| "learning_rate": 0.00017968817647058823, |
| "loss": 0.7795, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.2515440763615946, |
| "grad_norm": 85.87349809516256, |
| "learning_rate": 0.00018634329411764704, |
| "loss": 0.7672, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.2605277933745087, |
| "grad_norm": 2.3423299747514212, |
| "learning_rate": 0.00019299841176470588, |
| "loss": 0.6615, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.26951151038742277, |
| "grad_norm": 2.629813380478519, |
| "learning_rate": 0.0001996535294117647, |
| "loss": 0.6385, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.2784952274003369, |
| "grad_norm": 1.2741888860891866, |
| "learning_rate": 0.00020630864705882353, |
| "loss": 0.612, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.287478944413251, |
| "grad_norm": 31.843678922273796, |
| "learning_rate": 0.00021296376470588234, |
| "loss": 0.6641, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.29646266142616506, |
| "grad_norm": 5.150042714870819, |
| "learning_rate": 0.00021961888235294118, |
| "loss": 0.9157, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.3054463784390792, |
| "grad_norm": 610.734122596688, |
| "learning_rate": 0.000226274, |
| "loss": 1.7373, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.31443009545199324, |
| "grad_norm": 46.58948260555198, |
| "learning_rate": 0.00022626775506336304, |
| "loss": 0.8771, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.32341381246490736, |
| "grad_norm": 6.962976785617738, |
| "learning_rate": 0.00022624902094286824, |
| "loss": 0.9851, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3323975294778215, |
| "grad_norm": 21.673571852019826, |
| "learning_rate": 0.00022621779970668783, |
| "loss": 0.9069, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.34138124649073553, |
| "grad_norm": 2.892556097499312, |
| "learning_rate": 0.00022617409480152153, |
| "loss": 0.8182, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.35036496350364965, |
| "grad_norm": 15.60543009302285, |
| "learning_rate": 0.00022611791105221654, |
| "loss": 0.9486, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.3593486805165637, |
| "grad_norm": 11.538473656686941, |
| "learning_rate": 0.00022604925466123451, |
| "loss": 0.8591, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.36833239752947783, |
| "grad_norm": 2.5218994606008907, |
| "learning_rate": 0.00022596813320796707, |
| "loss": 0.7781, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.3773161145423919, |
| "grad_norm": 4.662799540456364, |
| "learning_rate": 0.00022587455564789883, |
| "loss": 0.8526, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.386299831555306, |
| "grad_norm": 565.0656362499724, |
| "learning_rate": 0.00022576853231161902, |
| "loss": 1.738, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.3952835485682201, |
| "grad_norm": 7.6600755456980325, |
| "learning_rate": 0.00022565007490368076, |
| "loss": 0.8999, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.4042672655811342, |
| "grad_norm": 287.49302909066023, |
| "learning_rate": 0.00022551919650130918, |
| "loss": 7.1473, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.4132509825940483, |
| "grad_norm": 13.25846612995419, |
| "learning_rate": 0.00022537591155295756, |
| "loss": 1.5197, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.42223469960696236, |
| "grad_norm": 6.573021527190113, |
| "learning_rate": 0.00022522023587671233, |
| "loss": 1.0434, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.4312184166198765, |
| "grad_norm": 2.561584699671566, |
| "learning_rate": 0.0002250521866585469, |
| "loss": 0.8122, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.4402021336327906, |
| "grad_norm": 12.292027687573608, |
| "learning_rate": 0.00022487178245042422, |
| "loss": 0.9547, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.44918585064570465, |
| "grad_norm": 14.460331630727685, |
| "learning_rate": 0.0002246790431682489, |
| "loss": 1.026, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.45816956765861877, |
| "grad_norm": 4.017308882255896, |
| "learning_rate": 0.00022447399008966853, |
| "loss": 0.8958, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.46715328467153283, |
| "grad_norm": 1.8162087381759398, |
| "learning_rate": 0.0002242566458517245, |
| "loss": 0.7584, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.47613700168444695, |
| "grad_norm": 2.229007766044214, |
| "learning_rate": 0.0002240270344483534, |
| "loss": 0.7519, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.485120718697361, |
| "grad_norm": 1.275167700368963, |
| "learning_rate": 0.00022378518122773768, |
| "loss": 0.7169, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.4941044357102751, |
| "grad_norm": 1.2052448804014857, |
| "learning_rate": 0.00022353111288950776, |
| "loss": 0.7069, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.5030881527231892, |
| "grad_norm": 2.809114390406924, |
| "learning_rate": 0.00022326485748179416, |
| "loss": 0.6934, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.5120718697361033, |
| "grad_norm": 1.7295819486463446, |
| "learning_rate": 0.00022298644439813125, |
| "loss": 0.6997, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.5210555867490174, |
| "grad_norm": 1.176047538360094, |
| "learning_rate": 0.00022269590437421234, |
| "loss": 0.6587, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.5300393037619315, |
| "grad_norm": 0.9770345550489453, |
| "learning_rate": 0.0002223932694844966, |
| "loss": 0.6444, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.5390230207748455, |
| "grad_norm": 1.496641892403964, |
| "learning_rate": 0.00022207857313866796, |
| "loss": 0.6558, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.5480067377877597, |
| "grad_norm": 1.7254473728830027, |
| "learning_rate": 0.00022175185007794712, |
| "loss": 0.6647, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.5569904548006738, |
| "grad_norm": 1.148244331773896, |
| "learning_rate": 0.000221413136371256, |
| "loss": 0.6244, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.5659741718135879, |
| "grad_norm": 1.082240763317917, |
| "learning_rate": 0.00022106246941123604, |
| "loss": 0.616, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.574957888826502, |
| "grad_norm": 1.0463382313968703, |
| "learning_rate": 0.0002206998879101201, |
| "loss": 0.6097, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.583941605839416, |
| "grad_norm": 1.5314621991483217, |
| "learning_rate": 0.00022032543189545893, |
| "loss": 0.6261, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.5929253228523301, |
| "grad_norm": 0.8440358318436397, |
| "learning_rate": 0.00021993914270570204, |
| "loss": 0.5992, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.6019090398652442, |
| "grad_norm": 1.1721068603539129, |
| "learning_rate": 0.0002195410629856343, |
| "loss": 0.6003, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.6108927568781584, |
| "grad_norm": 1.0225694078862206, |
| "learning_rate": 0.00021913123668166815, |
| "loss": 0.5952, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.6198764738910725, |
| "grad_norm": 1.223954503187751, |
| "learning_rate": 0.00021870970903699184, |
| "loss": 0.5844, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.6288601909039865, |
| "grad_norm": 0.7517434069071857, |
| "learning_rate": 0.00021827652658657518, |
| "loss": 0.5736, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.6378439079169006, |
| "grad_norm": 0.542782981372059, |
| "learning_rate": 0.00021783173715203175, |
| "loss": 0.5677, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.6468276249298147, |
| "grad_norm": 0.8491645074764822, |
| "learning_rate": 0.00021737538983634003, |
| "loss": 0.5567, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.6558113419427288, |
| "grad_norm": 0.9512722570962481, |
| "learning_rate": 0.00021690753501842243, |
| "loss": 0.5652, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.664795058955643, |
| "grad_norm": 0.7420499518682178, |
| "learning_rate": 0.00021642822434758365, |
| "loss": 0.5521, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.673778775968557, |
| "grad_norm": 0.6023655209710193, |
| "learning_rate": 0.0002159375107378088, |
| "loss": 0.5484, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.6827624929814711, |
| "grad_norm": 0.6704929086374853, |
| "learning_rate": 0.00021543544836192198, |
| "loss": 0.5629, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.6917462099943852, |
| "grad_norm": 0.8511116323519582, |
| "learning_rate": 0.00021492209264560583, |
| "loss": 0.5477, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.7007299270072993, |
| "grad_norm": 0.8293067237834507, |
| "learning_rate": 0.00021439750026128255, |
| "loss": 0.5486, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.7097136440202133, |
| "grad_norm": 0.7951812792060676, |
| "learning_rate": 0.00021386172912185786, |
| "loss": 0.5386, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.7186973610331274, |
| "grad_norm": 0.5942026077567073, |
| "learning_rate": 0.0002133148383743272, |
| "loss": 0.5526, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.7276810780460415, |
| "grad_norm": 0.7074314677815655, |
| "learning_rate": 0.00021275688839324653, |
| "loss": 0.5355, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.7366647950589557, |
| "grad_norm": 1.0064759251882633, |
| "learning_rate": 0.00021218794077406699, |
| "loss": 0.5214, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.7456485120718698, |
| "grad_norm": 0.5936960428857333, |
| "learning_rate": 0.00021160805832633507, |
| "loss": 0.5334, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.7546322290847838, |
| "grad_norm": 0.5587606186465214, |
| "learning_rate": 0.00021101730506675872, |
| "loss": 0.5246, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.7636159460976979, |
| "grad_norm": 0.7563054405639342, |
| "learning_rate": 0.00021041574621214016, |
| "loss": 0.5211, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.772599663110612, |
| "grad_norm": 0.4650706352210191, |
| "learning_rate": 0.00020980344817217607, |
| "loss": 0.52, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.7815833801235261, |
| "grad_norm": 0.46292748233999875, |
| "learning_rate": 0.00020918047854212644, |
| "loss": 0.5086, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.7905670971364402, |
| "grad_norm": 0.45822776585768843, |
| "learning_rate": 0.00020854690609535217, |
| "loss": 0.5205, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.7995508141493542, |
| "grad_norm": 0.3680018488886641, |
| "learning_rate": 0.0002079028007757228, |
| "loss": 0.4995, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.8085345311622684, |
| "grad_norm": 0.3651907315130092, |
| "learning_rate": 0.000207248233689895, |
| "loss": 0.5108, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.8175182481751825, |
| "grad_norm": 0.38739099960496165, |
| "learning_rate": 0.00020658327709946274, |
| "loss": 0.5105, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.8265019651880966, |
| "grad_norm": 0.37587537297348633, |
| "learning_rate": 0.00020590800441297976, |
| "loss": 0.5043, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.8354856822010107, |
| "grad_norm": 0.5725177706736702, |
| "learning_rate": 0.00020522249017785566, |
| "loss": 0.5056, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.8444693992139247, |
| "grad_norm": 1.0428309894412633, |
| "learning_rate": 0.00020452681007212612, |
| "loss": 0.5189, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.8534531162268388, |
| "grad_norm": 1.1604159616874143, |
| "learning_rate": 0.0002038210408960984, |
| "loss": 0.5167, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.862436833239753, |
| "grad_norm": 0.5208537644478249, |
| "learning_rate": 0.0002031052605638728, |
| "loss": 0.5145, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.8714205502526671, |
| "grad_norm": 0.83804291321802, |
| "learning_rate": 0.00020237954809474134, |
| "loss": 0.5158, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.8804042672655812, |
| "grad_norm": 0.7064273962256518, |
| "learning_rate": 0.00020164398360446436, |
| "loss": 0.4931, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.8893879842784952, |
| "grad_norm": 0.5057800234712763, |
| "learning_rate": 0.00020089864829642596, |
| "loss": 0.5075, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.8983717012914093, |
| "grad_norm": 0.5940667934373025, |
| "learning_rate": 0.0002001436244526695, |
| "loss": 0.4907, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.9073554183043234, |
| "grad_norm": 0.5019525607511636, |
| "learning_rate": 0.00019937899542481408, |
| "loss": 0.5041, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.9163391353172375, |
| "grad_norm": 0.4491147515500972, |
| "learning_rate": 0.00019860484562485276, |
| "loss": 0.488, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.9253228523301515, |
| "grad_norm": 0.5412361814676229, |
| "learning_rate": 0.00019782126051583386, |
| "loss": 0.5023, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.9343065693430657, |
| "grad_norm": 0.3383672378854014, |
| "learning_rate": 0.00019702832660242615, |
| "loss": 0.489, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.9432902863559798, |
| "grad_norm": 0.36276664892975297, |
| "learning_rate": 0.0001962261314213691, |
| "loss": 0.4996, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.9522740033688939, |
| "grad_norm": 0.3454995598586516, |
| "learning_rate": 0.00019541476353180916, |
| "loss": 0.4856, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.961257720381808, |
| "grad_norm": 0.311134046456636, |
| "learning_rate": 0.00019459431250552317, |
| "loss": 0.4855, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.970241437394722, |
| "grad_norm": 0.3425949299888489, |
| "learning_rate": 0.0001937648689170301, |
| "loss": 0.4762, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.9792251544076361, |
| "grad_norm": 0.3496537132478162, |
| "learning_rate": 0.00019292652433359177, |
| "loss": 0.4834, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.9882088714205502, |
| "grad_norm": 0.4269583108093911, |
| "learning_rate": 0.00019207937130510442, |
| "loss": 0.4946, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.9971925884334644, |
| "grad_norm": 0.4147252598563634, |
| "learning_rate": 0.0001912235033538814, |
| "loss": 0.4846, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.0075800112296462, |
| "grad_norm": 0.8355251290758435, |
| "learning_rate": 0.00019035901496432887, |
| "loss": 0.8602, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.0165637282425604, |
| "grad_norm": 1.6156052899406323, |
| "learning_rate": 0.0001894860015725148, |
| "loss": 0.4935, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.0255474452554745, |
| "grad_norm": 0.46493272524584955, |
| "learning_rate": 0.0001886045595556335, |
| "loss": 0.4655, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.0345311622683886, |
| "grad_norm": 0.979120652286332, |
| "learning_rate": 0.00018771478622136586, |
| "loss": 0.4778, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.0435148792813027, |
| "grad_norm": 0.7656350182275576, |
| "learning_rate": 0.0001868167797971369, |
| "loss": 0.4786, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.0524985962942168, |
| "grad_norm": 0.5200988865180658, |
| "learning_rate": 0.00018591063941927197, |
| "loss": 0.4667, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.0614823133071307, |
| "grad_norm": 0.5373218112082903, |
| "learning_rate": 0.00018499646512205238, |
| "loss": 0.4698, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.0704660303200448, |
| "grad_norm": 0.3981583489798308, |
| "learning_rate": 0.00018407435782667207, |
| "loss": 0.4573, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.079449747332959, |
| "grad_norm": 0.4729814422997812, |
| "learning_rate": 0.0001831444193300964, |
| "loss": 0.4537, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.088433464345873, |
| "grad_norm": 0.40160292629773175, |
| "learning_rate": 0.00018220675229382407, |
| "loss": 0.4607, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.0974171813587872, |
| "grad_norm": 0.34191199159101826, |
| "learning_rate": 0.0001812614602325536, |
| "loss": 0.4491, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.1064008983717013, |
| "grad_norm": 0.38115499724656887, |
| "learning_rate": 0.00018030864750275598, |
| "loss": 0.4649, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.1153846153846154, |
| "grad_norm": 0.2939815268385644, |
| "learning_rate": 0.0001793484192911539, |
| "loss": 0.4405, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.1243683323975295, |
| "grad_norm": 0.38182282790459066, |
| "learning_rate": 0.00017838088160310975, |
| "loss": 0.4673, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.1333520494104437, |
| "grad_norm": 0.26951517499854655, |
| "learning_rate": 0.00017740614125092276, |
| "loss": 0.4366, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.1423357664233578, |
| "grad_norm": 0.32986266756899896, |
| "learning_rate": 0.00017642430584203776, |
| "loss": 0.4506, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.1513194834362717, |
| "grad_norm": 0.29552314624204684, |
| "learning_rate": 0.0001754354837671654, |
| "loss": 0.4409, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.1603032004491858, |
| "grad_norm": 0.2715172818791699, |
| "learning_rate": 0.0001744397841883164, |
| "loss": 0.455, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.1692869174621, |
| "grad_norm": 0.30375726886135684, |
| "learning_rate": 0.00017343731702675036, |
| "loss": 0.4453, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.178270634475014, |
| "grad_norm": 0.20896293317807582, |
| "learning_rate": 0.00017242819295084123, |
| "loss": 0.4447, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.1872543514879281, |
| "grad_norm": 0.26828433742607094, |
| "learning_rate": 0.0001714125233638595, |
| "loss": 0.444, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.1962380685008422, |
| "grad_norm": 0.21271544817391416, |
| "learning_rate": 0.00017039042039167416, |
| "loss": 0.4434, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.2052217855137564, |
| "grad_norm": 0.22231269773708437, |
| "learning_rate": 0.00016936199687037412, |
| "loss": 0.4392, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.2142055025266705, |
| "grad_norm": 0.21858395274334466, |
| "learning_rate": 0.00016832736633381174, |
| "loss": 0.44, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.2231892195395846, |
| "grad_norm": 0.2359281892515482, |
| "learning_rate": 0.00016728664300106905, |
| "loss": 0.4395, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.2321729365524985, |
| "grad_norm": 0.21457493342859105, |
| "learning_rate": 0.0001662399417638485, |
| "loss": 0.442, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.2411566535654126, |
| "grad_norm": 0.17832258654038546, |
| "learning_rate": 0.00016518737817378936, |
| "loss": 0.4379, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.2501403705783267, |
| "grad_norm": 0.20100339820701033, |
| "learning_rate": 0.0001641290684297112, |
| "loss": 0.4371, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.2591240875912408, |
| "grad_norm": 0.23303786372370514, |
| "learning_rate": 0.00016306512936478605, |
| "loss": 0.4353, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.268107804604155, |
| "grad_norm": 0.25687763850096407, |
| "learning_rate": 0.00016199567843364066, |
| "loss": 0.4379, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.277091521617069, |
| "grad_norm": 0.23974642432643137, |
| "learning_rate": 0.00016092083369938952, |
| "loss": 0.4364, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.2860752386299832, |
| "grad_norm": 0.20652369486142858, |
| "learning_rate": 0.00015984071382060174, |
| "loss": 0.4385, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.2950589556428973, |
| "grad_norm": 0.20585656880887795, |
| "learning_rate": 0.00015875543803820115, |
| "loss": 0.4425, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.3040426726558114, |
| "grad_norm": 0.22079211853825997, |
| "learning_rate": 0.00015766512616230286, |
| "loss": 0.4251, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.3130263896687255, |
| "grad_norm": 0.19534999766663985, |
| "learning_rate": 0.00015656989855898653, |
| "loss": 0.4374, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.3220101066816397, |
| "grad_norm": 0.19699056774469947, |
| "learning_rate": 0.00015546987613700858, |
| "loss": 0.4385, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.3309938236945535, |
| "grad_norm": 0.25025460629960233, |
| "learning_rate": 0.00015436518033445427, |
| "loss": 0.4414, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.3399775407074677, |
| "grad_norm": 0.22783106244726478, |
| "learning_rate": 0.00015325593310533135, |
| "loss": 0.4292, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.3489612577203818, |
| "grad_norm": 0.24800972153045137, |
| "learning_rate": 0.00015214225690610695, |
| "loss": 0.4367, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.357944974733296, |
| "grad_norm": 0.2755781135491422, |
| "learning_rate": 0.00015102427468218884, |
| "loss": 0.4343, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.36692869174621, |
| "grad_norm": 0.3302080679814042, |
| "learning_rate": 0.00014990210985435274, |
| "loss": 0.4225, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.3759124087591241, |
| "grad_norm": 0.3884646633170279, |
| "learning_rate": 0.00014877588630511702, |
| "loss": 0.4291, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.3848961257720382, |
| "grad_norm": 0.4487151204051441, |
| "learning_rate": 0.0001476457283650669, |
| "loss": 0.4314, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.3938798427849521, |
| "grad_norm": 0.4142847149164689, |
| "learning_rate": 0.00014651176079912844, |
| "loss": 0.4248, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.4028635597978663, |
| "grad_norm": 0.3852901369815094, |
| "learning_rate": 0.00014537410879279536, |
| "loss": 0.4233, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.4118472768107804, |
| "grad_norm": 0.364518339635796, |
| "learning_rate": 0.00014423289793830883, |
| "loss": 0.4362, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.4208309938236945, |
| "grad_norm": 0.35417387758842084, |
| "learning_rate": 0.00014308825422079265, |
| "loss": 0.4252, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.4298147108366086, |
| "grad_norm": 0.3557663368174752, |
| "learning_rate": 0.00014194030400434496, |
| "loss": 0.4395, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.4387984278495227, |
| "grad_norm": 0.34124968887701046, |
| "learning_rate": 0.00014078917401808824, |
| "loss": 0.4355, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.4477821448624368, |
| "grad_norm": 0.253304289924485, |
| "learning_rate": 0.0001396349913421788, |
| "loss": 0.4355, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.456765861875351, |
| "grad_norm": 0.2979898627616292, |
| "learning_rate": 0.00013847788339377788, |
| "loss": 0.4411, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.465749578888265, |
| "grad_norm": 0.2927807074804572, |
| "learning_rate": 0.0001373179779129849, |
| "loss": 0.434, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.4747332959011792, |
| "grad_norm": 0.23083501058351574, |
| "learning_rate": 0.00013615540294873585, |
| "loss": 0.4296, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.4837170129140933, |
| "grad_norm": 0.25440061192162694, |
| "learning_rate": 0.00013499028684466692, |
| "loss": 0.4316, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.4927007299270074, |
| "grad_norm": 0.3151406421517585, |
| "learning_rate": 0.00013382275822494612, |
| "loss": 0.4268, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.5016844469399215, |
| "grad_norm": 0.28632589462844177, |
| "learning_rate": 0.00013265294598007347, |
| "loss": 0.4338, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.5106681639528357, |
| "grad_norm": 0.20398795355439606, |
| "learning_rate": 0.00013148097925265212, |
| "loss": 0.4335, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.5196518809657495, |
| "grad_norm": 0.3190354573542016, |
| "learning_rate": 0.00013030698742313152, |
| "loss": 0.4206, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.5286355979786637, |
| "grad_norm": 0.3741573095309751, |
| "learning_rate": 0.00012913110009552428, |
| "loss": 0.4322, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.5376193149915778, |
| "grad_norm": 0.2617731827572085, |
| "learning_rate": 0.0001279534470830984, |
| "loss": 0.4282, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.546603032004492, |
| "grad_norm": 0.31625614240914174, |
| "learning_rate": 0.00012677415839404646, |
| "loss": 0.4315, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.5555867490174058, |
| "grad_norm": 0.3559962927646376, |
| "learning_rate": 0.00012559336421713333, |
| "loss": 0.4255, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.56457046603032, |
| "grad_norm": 0.23926556769719626, |
| "learning_rate": 0.00012441119490732357, |
| "loss": 0.4197, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.573554183043234, |
| "grad_norm": 0.19009337870934856, |
| "learning_rate": 0.00012322778097139102, |
| "loss": 0.4333, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.5825379000561481, |
| "grad_norm": 0.2467283593538461, |
| "learning_rate": 0.00012204325305351117, |
| "loss": 0.4327, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.5915216170690623, |
| "grad_norm": 0.20155153552210148, |
| "learning_rate": 0.00012085774192083878, |
| "loss": 0.424, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.6005053340819764, |
| "grad_norm": 0.2055516806210948, |
| "learning_rate": 0.00011967137844907157, |
| "loss": 0.4198, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.6094890510948905, |
| "grad_norm": 0.21888134195759792, |
| "learning_rate": 0.00011848429360800205, |
| "loss": 0.4279, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.6184727681078046, |
| "grad_norm": 0.18186305811122733, |
| "learning_rate": 0.00011729661844705912, |
| "loss": 0.4249, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.6274564851207187, |
| "grad_norm": 0.13875458201438734, |
| "learning_rate": 0.00011610848408084054, |
| "loss": 0.4275, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.6364402021336328, |
| "grad_norm": 0.1717023475061491, |
| "learning_rate": 0.0001149200216746385, |
| "loss": 0.4225, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.645423919146547, |
| "grad_norm": 0.15952136716834545, |
| "learning_rate": 0.00011373136242995958, |
| "loss": 0.4153, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.654407636159461, |
| "grad_norm": 0.15537668646212147, |
| "learning_rate": 0.00011254263757004044, |
| "loss": 0.4164, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.6633913531723752, |
| "grad_norm": 0.160282534660321, |
| "learning_rate": 0.00011135397832536151, |
| "loss": 0.4253, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.6723750701852893, |
| "grad_norm": 0.16123326071336702, |
| "learning_rate": 0.00011016551591915949, |
| "loss": 0.4239, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.6813587871982034, |
| "grad_norm": 0.148970328863511, |
| "learning_rate": 0.0001089773815529409, |
| "loss": 0.4245, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.6903425042111173, |
| "grad_norm": 0.16045971509449883, |
| "learning_rate": 0.00010778970639199795, |
| "loss": 0.4198, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.6993262212240314, |
| "grad_norm": 0.16138201760304008, |
| "learning_rate": 0.00010660262155092845, |
| "loss": 0.4275, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.7083099382369455, |
| "grad_norm": 0.16720529564166428, |
| "learning_rate": 0.00010541625807916123, |
| "loss": 0.4175, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.7172936552498597, |
| "grad_norm": 0.16645688605969786, |
| "learning_rate": 0.00010423074694648884, |
| "loss": 0.4272, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.7262773722627736, |
| "grad_norm": 0.23842852394974162, |
| "learning_rate": 0.00010304621902860899, |
| "loss": 0.4274, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.7352610892756877, |
| "grad_norm": 0.23233366002775901, |
| "learning_rate": 0.00010186280509267643, |
| "loss": 0.4275, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.7442448062886018, |
| "grad_norm": 0.16088479076799017, |
| "learning_rate": 0.00010068063578286667, |
| "loss": 0.4172, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.753228523301516, |
| "grad_norm": 0.1990526955294185, |
| "learning_rate": 9.949984160595352e-05, |
| "loss": 0.4279, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.76221224031443, |
| "grad_norm": 0.2080695351190495, |
| "learning_rate": 9.832055291690164e-05, |
| "loss": 0.4315, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.7711959573273441, |
| "grad_norm": 0.1890634090210766, |
| "learning_rate": 9.714289990447578e-05, |
| "loss": 0.4182, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.7801796743402583, |
| "grad_norm": 0.20016634340121803, |
| "learning_rate": 9.596701257686851e-05, |
| "loss": 0.4119, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.7891633913531724, |
| "grad_norm": 0.23107873036017035, |
| "learning_rate": 9.479302074734792e-05, |
| "loss": 0.4162, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.7981471083660865, |
| "grad_norm": 0.1830895281774673, |
| "learning_rate": 9.362105401992656e-05, |
| "loss": 0.4177, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.8071308253790006, |
| "grad_norm": 0.16770209709045605, |
| "learning_rate": 9.245124177505392e-05, |
| "loss": 0.421, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.8161145423919147, |
| "grad_norm": 0.2297597515397976, |
| "learning_rate": 9.12837131553331e-05, |
| "loss": 0.434, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.8250982594048288, |
| "grad_norm": 0.16824933783531398, |
| "learning_rate": 9.011859705126419e-05, |
| "loss": 0.418, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.834081976417743, |
| "grad_norm": 0.16377185436749506, |
| "learning_rate": 8.895602208701511e-05, |
| "loss": 0.4113, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.843065693430657, |
| "grad_norm": 0.22604619484876676, |
| "learning_rate": 8.779611660622215e-05, |
| "loss": 0.4239, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.8520494104435712, |
| "grad_norm": 0.2215922382832308, |
| "learning_rate": 8.66390086578212e-05, |
| "loss": 0.4121, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.861033127456485, |
| "grad_norm": 0.20334095034573255, |
| "learning_rate": 8.548482598191181e-05, |
| "loss": 0.4193, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.8700168444693992, |
| "grad_norm": 0.1829539632348045, |
| "learning_rate": 8.433369599565508e-05, |
| "loss": 0.4296, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.8790005614823133, |
| "grad_norm": 0.15716250429615242, |
| "learning_rate": 8.318574577920736e-05, |
| "loss": 0.4164, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.8879842784952274, |
| "grad_norm": 0.23307948623719757, |
| "learning_rate": 8.204110206169114e-05, |
| "loss": 0.4172, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.8969679955081415, |
| "grad_norm": 0.1761132573267027, |
| "learning_rate": 8.08998912072046e-05, |
| "loss": 0.4167, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.9059517125210554, |
| "grad_norm": 0.1690942036116379, |
| "learning_rate": 7.976223920087153e-05, |
| "loss": 0.4247, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.9149354295339696, |
| "grad_norm": 0.17381964913986805, |
| "learning_rate": 7.86282716349331e-05, |
| "loss": 0.4076, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.9239191465468837, |
| "grad_norm": 0.1558189761050885, |
| "learning_rate": 7.749811369488296e-05, |
| "loss": 0.425, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.9329028635597978, |
| "grad_norm": 0.16691148084521792, |
| "learning_rate": 7.637189014564727e-05, |
| "loss": 0.4152, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.941886580572712, |
| "grad_norm": 0.14084306621184936, |
| "learning_rate": 7.524972531781114e-05, |
| "loss": 0.4201, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.950870297585626, |
| "grad_norm": 0.1673195421371356, |
| "learning_rate": 7.413174309389303e-05, |
| "loss": 0.4084, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.9598540145985401, |
| "grad_norm": 0.16765695738584874, |
| "learning_rate": 7.301806689466864e-05, |
| "loss": 0.4275, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.9688377316114543, |
| "grad_norm": 0.16869078852623143, |
| "learning_rate": 7.190881966554573e-05, |
| "loss": 0.418, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.9778214486243684, |
| "grad_norm": 0.16808033042759418, |
| "learning_rate": 7.080412386299138e-05, |
| "loss": 0.4141, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.9868051656372825, |
| "grad_norm": 0.19910528453140844, |
| "learning_rate": 6.970410144101348e-05, |
| "loss": 0.4029, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.9957888826501966, |
| "grad_norm": 0.20801296591215268, |
| "learning_rate": 6.860887383769717e-05, |
| "loss": 0.4266, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.0061763054463784, |
| "grad_norm": 0.3587587352425674, |
| "learning_rate": 6.751856196179887e-05, |
| "loss": 0.7214, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.0151600224592925, |
| "grad_norm": 0.387007013989843, |
| "learning_rate": 6.643328617939827e-05, |
| "loss": 0.3541, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.0241437394722066, |
| "grad_norm": 0.27250154771422735, |
| "learning_rate": 6.53531663006105e-05, |
| "loss": 0.3741, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.0331274564851207, |
| "grad_norm": 0.31369626134171097, |
| "learning_rate": 6.427832156635937e-05, |
| "loss": 0.3623, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.042111173498035, |
| "grad_norm": 0.3899920698764709, |
| "learning_rate": 6.320887063521393e-05, |
| "loss": 0.3666, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.051094890510949, |
| "grad_norm": 0.22822552402245974, |
| "learning_rate": 6.214493157028881e-05, |
| "loss": 0.3588, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.060078607523863, |
| "grad_norm": 0.3578216864292053, |
| "learning_rate": 6.108662182621064e-05, |
| "loss": 0.3734, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.069062324536777, |
| "grad_norm": 0.25875022892169425, |
| "learning_rate": 6.003405823615149e-05, |
| "loss": 0.3595, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.0780460415496913, |
| "grad_norm": 0.2595984420837872, |
| "learning_rate": 5.898735699893096e-05, |
| "loss": 0.3605, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.0870297585626054, |
| "grad_norm": 0.298435693379116, |
| "learning_rate": 5.794663366618828e-05, |
| "loss": 0.3632, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.0960134755755195, |
| "grad_norm": 0.2237606329851956, |
| "learning_rate": 5.691200312962588e-05, |
| "loss": 0.3498, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.1049971925884337, |
| "grad_norm": 0.26971282475418673, |
| "learning_rate": 5.5883579608325826e-05, |
| "loss": 0.3629, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.1139809096013478, |
| "grad_norm": 0.2272539853787037, |
| "learning_rate": 5.486147663614048e-05, |
| "loss": 0.3616, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.1229646266142614, |
| "grad_norm": 0.2772404901897913, |
| "learning_rate": 5.3845807049158815e-05, |
| "loss": 0.3568, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.1319483436271756, |
| "grad_norm": 0.18005249012493324, |
| "learning_rate": 5.2836682973249665e-05, |
| "loss": 0.3655, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.1409320606400897, |
| "grad_norm": 0.25329620107734796, |
| "learning_rate": 5.1834215811683654e-05, |
| "loss": 0.3549, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.149915777653004, |
| "grad_norm": 0.16701885107466424, |
| "learning_rate": 5.0838516232834614e-05, |
| "loss": 0.3566, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.158899494665918, |
| "grad_norm": 0.2641235688031744, |
| "learning_rate": 4.9849694157962234e-05, |
| "loss": 0.3588, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.167883211678832, |
| "grad_norm": 0.15619382389891984, |
| "learning_rate": 4.886785874907724e-05, |
| "loss": 0.3488, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.176866928691746, |
| "grad_norm": 0.2501699730380541, |
| "learning_rate": 4.7893118396890284e-05, |
| "loss": 0.3715, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.1858506457046603, |
| "grad_norm": 0.16211610093557965, |
| "learning_rate": 4.6925580708846104e-05, |
| "loss": 0.3765, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.1948343627175744, |
| "grad_norm": 0.21430975519576806, |
| "learning_rate": 4.596535249724404e-05, |
| "loss": 0.3519, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.2038180797304885, |
| "grad_norm": 0.1482343492785817, |
| "learning_rate": 4.501253976744641e-05, |
| "loss": 0.3561, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.2128017967434026, |
| "grad_norm": 0.18600390573451364, |
| "learning_rate": 4.406724770617595e-05, |
| "loss": 0.3515, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.2217855137563167, |
| "grad_norm": 0.13270293221384985, |
| "learning_rate": 4.3129580669903586e-05, |
| "loss": 0.3612, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.230769230769231, |
| "grad_norm": 0.15896036552877565, |
| "learning_rate": 4.2199642173327955e-05, |
| "loss": 0.3482, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.239752947782145, |
| "grad_norm": 0.13144953626310965, |
| "learning_rate": 4.127753487794768e-05, |
| "loss": 0.3559, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.248736664795059, |
| "grad_norm": 0.14893603771330527, |
| "learning_rate": 4.036336058072806e-05, |
| "loss": 0.356, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.257720381807973, |
| "grad_norm": 0.13694686666323366, |
| "learning_rate": 3.9457220202863104e-05, |
| "loss": 0.3606, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.2667040988208873, |
| "grad_norm": 0.14835982621693597, |
| "learning_rate": 3.855921377863414e-05, |
| "loss": 0.3667, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.2756878158338014, |
| "grad_norm": 0.13903851563190525, |
| "learning_rate": 3.766944044436649e-05, |
| "loss": 0.3548, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.2846715328467155, |
| "grad_norm": 0.13419589581861444, |
| "learning_rate": 3.678799842748521e-05, |
| "loss": 0.3716, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.293655249859629, |
| "grad_norm": 0.14685606142797603, |
| "learning_rate": 3.5914985035671156e-05, |
| "loss": 0.3571, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.3026389668725433, |
| "grad_norm": 0.1304570595276937, |
| "learning_rate": 3.5050496646118584e-05, |
| "loss": 0.3655, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.3116226838854574, |
| "grad_norm": 0.12826171716327225, |
| "learning_rate": 3.4194628694895594e-05, |
| "loss": 0.364, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.3206064008983716, |
| "grad_norm": 0.11075309460103316, |
| "learning_rate": 3.334747566640824e-05, |
| "loss": 0.351, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.3295901179112857, |
| "grad_norm": 0.1312845965125087, |
| "learning_rate": 3.2509131082969915e-05, |
| "loss": 0.3687, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.3385738349242, |
| "grad_norm": 0.11983646002909432, |
| "learning_rate": 3.167968749447683e-05, |
| "loss": 0.3454, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.347557551937114, |
| "grad_norm": 0.11479008167106183, |
| "learning_rate": 3.0859236468190844e-05, |
| "loss": 0.3654, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.356541268950028, |
| "grad_norm": 0.11632197031117864, |
| "learning_rate": 3.00478685786309e-05, |
| "loss": 0.3617, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.365524985962942, |
| "grad_norm": 0.12594569973876463, |
| "learning_rate": 2.9245673397573843e-05, |
| "loss": 0.3582, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.3745087029758563, |
| "grad_norm": 0.124698262560256, |
| "learning_rate": 2.8452739484166123e-05, |
| "loss": 0.3628, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.3834924199887704, |
| "grad_norm": 0.11918130322626423, |
| "learning_rate": 2.7669154375147227e-05, |
| "loss": 0.3617, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.3924761370016845, |
| "grad_norm": 0.11920547285141371, |
| "learning_rate": 2.6895004575185922e-05, |
| "loss": 0.3657, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.4014598540145986, |
| "grad_norm": 0.10937163911147071, |
| "learning_rate": 2.6130375547330496e-05, |
| "loss": 0.3519, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.4104435710275127, |
| "grad_norm": 0.14631208309450952, |
| "learning_rate": 2.5375351703574044e-05, |
| "loss": 0.3631, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.419427288040427, |
| "grad_norm": 0.1265003307705914, |
| "learning_rate": 2.4630016395535618e-05, |
| "loss": 0.3679, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.428411005053341, |
| "grad_norm": 0.11607388860612512, |
| "learning_rate": 2.389445190525866e-05, |
| "loss": 0.3475, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.437394722066255, |
| "grad_norm": 0.13926049695780746, |
| "learning_rate": 2.316873943612722e-05, |
| "loss": 0.3509, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.446378439079169, |
| "grad_norm": 0.12235594276612768, |
| "learning_rate": 2.2452959103901613e-05, |
| "loss": 0.3597, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.4553621560920833, |
| "grad_norm": 0.11752333623641517, |
| "learning_rate": 2.1747189927873877e-05, |
| "loss": 0.3515, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.464345873104997, |
| "grad_norm": 0.11683542380499232, |
| "learning_rate": 2.1051509822144332e-05, |
| "loss": 0.3695, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.473329590117911, |
| "grad_norm": 0.12084518265976886, |
| "learning_rate": 2.0365995587020234e-05, |
| "loss": 0.3592, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.482313307130825, |
| "grad_norm": 0.10536387180642533, |
| "learning_rate": 1.969072290053725e-05, |
| "loss": 0.3634, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.4912970241437393, |
| "grad_norm": 0.12459678439311449, |
| "learning_rate": 1.902576631010499e-05, |
| "loss": 0.3444, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.5002807411566534, |
| "grad_norm": 0.10423725455797198, |
| "learning_rate": 1.8371199224277213e-05, |
| "loss": 0.3446, |
| "step": 278 |
| }, |
| { |
| "epoch": 2.5092644581695676, |
| "grad_norm": 0.09677902356707294, |
| "learning_rate": 1.772709390464784e-05, |
| "loss": 0.3565, |
| "step": 279 |
| }, |
| { |
| "epoch": 2.5182481751824817, |
| "grad_norm": 0.1056308438167975, |
| "learning_rate": 1.7093521457873555e-05, |
| "loss": 0.3667, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.527231892195396, |
| "grad_norm": 0.09014603021736677, |
| "learning_rate": 1.647055182782392e-05, |
| "loss": 0.3486, |
| "step": 281 |
| }, |
| { |
| "epoch": 2.53621560920831, |
| "grad_norm": 0.10287065883652714, |
| "learning_rate": 1.5858253787859857e-05, |
| "loss": 0.3525, |
| "step": 282 |
| }, |
| { |
| "epoch": 2.545199326221224, |
| "grad_norm": 0.09914950751521003, |
| "learning_rate": 1.5256694933241261e-05, |
| "loss": 0.3675, |
| "step": 283 |
| }, |
| { |
| "epoch": 2.554183043234138, |
| "grad_norm": 0.09453069458816908, |
| "learning_rate": 1.466594167366493e-05, |
| "loss": 0.3443, |
| "step": 284 |
| }, |
| { |
| "epoch": 2.5631667602470523, |
| "grad_norm": 0.10390944505822122, |
| "learning_rate": 1.4086059225933016e-05, |
| "loss": 0.3719, |
| "step": 285 |
| }, |
| { |
| "epoch": 2.5721504772599664, |
| "grad_norm": 0.10438780375230042, |
| "learning_rate": 1.3517111606753471e-05, |
| "loss": 0.3605, |
| "step": 286 |
| }, |
| { |
| "epoch": 2.5811341942728805, |
| "grad_norm": 0.09859350856353293, |
| "learning_rate": 1.2959161625672802e-05, |
| "loss": 0.3616, |
| "step": 287 |
| }, |
| { |
| "epoch": 2.5901179112857946, |
| "grad_norm": 0.09622498419041688, |
| "learning_rate": 1.2412270878142156e-05, |
| "loss": 0.3501, |
| "step": 288 |
| }, |
| { |
| "epoch": 2.5991016282987087, |
| "grad_norm": 0.09113584600196091, |
| "learning_rate": 1.1876499738717436e-05, |
| "loss": 0.3503, |
| "step": 289 |
| }, |
| { |
| "epoch": 2.608085345311623, |
| "grad_norm": 0.09741692104467439, |
| "learning_rate": 1.1351907354394194e-05, |
| "loss": 0.3657, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.6170690623245365, |
| "grad_norm": 0.09206757777787074, |
| "learning_rate": 1.0838551638078013e-05, |
| "loss": 0.3603, |
| "step": 291 |
| }, |
| { |
| "epoch": 2.626052779337451, |
| "grad_norm": 0.08188402631450145, |
| "learning_rate": 1.0336489262191212e-05, |
| "loss": 0.3462, |
| "step": 292 |
| }, |
| { |
| "epoch": 2.6350364963503647, |
| "grad_norm": 0.08808466701218914, |
| "learning_rate": 9.845775652416357e-06, |
| "loss": 0.3632, |
| "step": 293 |
| }, |
| { |
| "epoch": 2.6440202133632793, |
| "grad_norm": 0.0943618643568796, |
| "learning_rate": 9.366464981577584e-06, |
| "loss": 0.3594, |
| "step": 294 |
| }, |
| { |
| "epoch": 2.653003930376193, |
| "grad_norm": 0.09593739977292585, |
| "learning_rate": 8.89861016365997e-06, |
| "loss": 0.3515, |
| "step": 295 |
| }, |
| { |
| "epoch": 2.661987647389107, |
| "grad_norm": 0.08248015664645339, |
| "learning_rate": 8.442262847968263e-06, |
| "loss": 0.3608, |
| "step": 296 |
| }, |
| { |
| "epoch": 2.670971364402021, |
| "grad_norm": 0.08411868089198224, |
| "learning_rate": 7.997473413424846e-06, |
| "loss": 0.3592, |
| "step": 297 |
| }, |
| { |
| "epoch": 2.6799550814149353, |
| "grad_norm": 0.09490876979791782, |
| "learning_rate": 7.564290963008149e-06, |
| "loss": 0.358, |
| "step": 298 |
| }, |
| { |
| "epoch": 2.6889387984278494, |
| "grad_norm": 0.08867586845979594, |
| "learning_rate": 7.142763318331872e-06, |
| "loss": 0.351, |
| "step": 299 |
| }, |
| { |
| "epoch": 2.6979225154407636, |
| "grad_norm": 0.08779362567358494, |
| "learning_rate": 6.732937014365695e-06, |
| "loss": 0.3494, |
| "step": 300 |
| }, |
| { |
| "epoch": 2.7069062324536777, |
| "grad_norm": 0.08301900637144422, |
| "learning_rate": 6.3348572942979654e-06, |
| "loss": 0.3587, |
| "step": 301 |
| }, |
| { |
| "epoch": 2.715889949466592, |
| "grad_norm": 0.08891924464679533, |
| "learning_rate": 5.948568104541074e-06, |
| "loss": 0.3651, |
| "step": 302 |
| }, |
| { |
| "epoch": 2.724873666479506, |
| "grad_norm": 0.08730964097529133, |
| "learning_rate": 5.574112089879872e-06, |
| "loss": 0.3523, |
| "step": 303 |
| }, |
| { |
| "epoch": 2.73385738349242, |
| "grad_norm": 0.08450880465007451, |
| "learning_rate": 5.211530588763962e-06, |
| "loss": 0.3482, |
| "step": 304 |
| }, |
| { |
| "epoch": 2.742841100505334, |
| "grad_norm": 0.08342255207562929, |
| "learning_rate": 4.860863628744007e-06, |
| "loss": 0.3564, |
| "step": 305 |
| }, |
| { |
| "epoch": 2.7518248175182483, |
| "grad_norm": 0.08975850015832335, |
| "learning_rate": 4.522149922052897e-06, |
| "loss": 0.3491, |
| "step": 306 |
| }, |
| { |
| "epoch": 2.7608085345311624, |
| "grad_norm": 0.08240771555091773, |
| "learning_rate": 4.195426861332049e-06, |
| "loss": 0.3588, |
| "step": 307 |
| }, |
| { |
| "epoch": 2.7697922515440765, |
| "grad_norm": 0.08798778487887464, |
| "learning_rate": 3.880730515503412e-06, |
| "loss": 0.3427, |
| "step": 308 |
| }, |
| { |
| "epoch": 2.7787759685569906, |
| "grad_norm": 0.08113053094229192, |
| "learning_rate": 3.57809562578763e-06, |
| "loss": 0.3728, |
| "step": 309 |
| }, |
| { |
| "epoch": 2.7877596855699043, |
| "grad_norm": 0.08279617250380948, |
| "learning_rate": 3.2875556018687533e-06, |
| "loss": 0.3418, |
| "step": 310 |
| }, |
| { |
| "epoch": 2.796743402582819, |
| "grad_norm": 0.07729029098691793, |
| "learning_rate": 3.0091425182058514e-06, |
| "loss": 0.3617, |
| "step": 311 |
| }, |
| { |
| "epoch": 2.8057271195957325, |
| "grad_norm": 0.08856029522706829, |
| "learning_rate": 2.742887110492231e-06, |
| "loss": 0.3602, |
| "step": 312 |
| }, |
| { |
| "epoch": 2.814710836608647, |
| "grad_norm": 0.09170437352580232, |
| "learning_rate": 2.4888187722622945e-06, |
| "loss": 0.3532, |
| "step": 313 |
| }, |
| { |
| "epoch": 2.8236945536215607, |
| "grad_norm": 0.08713999383289339, |
| "learning_rate": 2.2469655516466e-06, |
| "loss": 0.3579, |
| "step": 314 |
| }, |
| { |
| "epoch": 2.832678270634475, |
| "grad_norm": 0.08158645722200215, |
| "learning_rate": 2.017354148275491e-06, |
| "loss": 0.3527, |
| "step": 315 |
| }, |
| { |
| "epoch": 2.841661987647389, |
| "grad_norm": 0.0775962718555909, |
| "learning_rate": 1.8000099103314957e-06, |
| "loss": 0.357, |
| "step": 316 |
| }, |
| { |
| "epoch": 2.850645704660303, |
| "grad_norm": 0.07417615515514962, |
| "learning_rate": 1.5949568317510827e-06, |
| "loss": 0.3633, |
| "step": 317 |
| }, |
| { |
| "epoch": 2.859629421673217, |
| "grad_norm": 0.07076814193057895, |
| "learning_rate": 1.402217549575769e-06, |
| "loss": 0.357, |
| "step": 318 |
| }, |
| { |
| "epoch": 2.8686131386861313, |
| "grad_norm": 0.08051085296339765, |
| "learning_rate": 1.2218133414530984e-06, |
| "loss": 0.3528, |
| "step": 319 |
| }, |
| { |
| "epoch": 2.8775968556990454, |
| "grad_norm": 0.07853549988975052, |
| "learning_rate": 1.0537641232876473e-06, |
| "loss": 0.3614, |
| "step": 320 |
| }, |
| { |
| "epoch": 2.8865805727119596, |
| "grad_norm": 0.07073622608834755, |
| "learning_rate": 8.980884470424321e-07, |
| "loss": 0.3551, |
| "step": 321 |
| }, |
| { |
| "epoch": 2.8955642897248737, |
| "grad_norm": 0.07143610759857762, |
| "learning_rate": 7.548034986908066e-07, |
| "loss": 0.3545, |
| "step": 322 |
| }, |
| { |
| "epoch": 2.904548006737788, |
| "grad_norm": 0.0749903587104429, |
| "learning_rate": 6.239250963192269e-07, |
| "loss": 0.3583, |
| "step": 323 |
| }, |
| { |
| "epoch": 2.913531723750702, |
| "grad_norm": 0.07746634488254509, |
| "learning_rate": 5.054676883809827e-07, |
| "loss": 0.3533, |
| "step": 324 |
| }, |
| { |
| "epoch": 2.922515440763616, |
| "grad_norm": 0.07444763137173055, |
| "learning_rate": 3.994443521011485e-07, |
| "loss": 0.3627, |
| "step": 325 |
| }, |
| { |
| "epoch": 2.93149915777653, |
| "grad_norm": 0.06987734521185171, |
| "learning_rate": 3.058667920329281e-07, |
| "loss": 0.348, |
| "step": 326 |
| }, |
| { |
| "epoch": 2.9404828747894443, |
| "grad_norm": 0.07171007945104789, |
| "learning_rate": 2.2474533876546995e-07, |
| "loss": 0.3455, |
| "step": 327 |
| }, |
| { |
| "epoch": 2.9494665918023584, |
| "grad_norm": 0.0727402518849161, |
| "learning_rate": 1.560889477834654e-07, |
| "loss": 0.3579, |
| "step": 328 |
| }, |
| { |
| "epoch": 2.958450308815272, |
| "grad_norm": 0.07322322016212719, |
| "learning_rate": 9.99051984784689e-08, |
| "loss": 0.3528, |
| "step": 329 |
| }, |
| { |
| "epoch": 2.9674340258281866, |
| "grad_norm": 0.07567114973675214, |
| "learning_rate": 5.620029331218986e-08, |
| "loss": 0.3564, |
| "step": 330 |
| }, |
| { |
| "epoch": 2.9764177428411003, |
| "grad_norm": 0.07625451494420618, |
| "learning_rate": 2.4979057131732006e-08, |
| "loss": 0.3594, |
| "step": 331 |
| }, |
| { |
| "epoch": 2.985401459854015, |
| "grad_norm": 0.07133050968488629, |
| "learning_rate": 6.2449366369555176e-09, |
| "loss": 0.3521, |
| "step": 332 |
| }, |
| { |
| "epoch": 2.9943851768669285, |
| "grad_norm": 0.07662216533129715, |
| "learning_rate": 0.0, |
| "loss": 0.3558, |
| "step": 333 |
| }, |
| { |
| "epoch": 2.9943851768669285, |
| "step": 333, |
| "total_flos": 4607052149424128.0, |
| "train_loss": 0.5244213240640657, |
| "train_runtime": 115493.2352, |
| "train_samples_per_second": 2.96, |
| "train_steps_per_second": 0.003 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 333, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4607052149424128.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|